Merge pull request #4867 from projectdiscovery/maint-runner-cache

Internal Templates Loader/Parser caches refactoring
This commit is contained in:
Mzack9999 2024-03-15 14:05:12 +01:00 committed by GitHub
commit 7d06c97a0d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
46 changed files with 558 additions and 521 deletions

View File

@ -20,14 +20,15 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
parsers "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/hosterrorscache"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/interactsh"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolinit"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/ratelimit"
@ -98,6 +99,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
catalog := disk.NewCatalog(path.Join(home, "nuclei-templates"))
ratelimiter := ratelimit.New(context.Background(), 150, time.Second)
defer ratelimiter.Stop()
executerOpts := protocols.ExecutorOptions{
Output: outputWriter,
Options: defaultOpts,
@ -109,6 +111,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error)
HostErrorsCache: cache,
Colorizer: aurora.NewAurora(true),
ResumeCfg: types.NewResumeCfg(),
Parser: templates.NewParser(),
}
engine := core.New(defaultOpts)
engine.SetExecuterOptions(executerOpts)

3
go.sum
View File

@ -880,8 +880,6 @@ github.com/projectdiscovery/uncover v1.0.7 h1:ut+2lTuvmftmveqF5RTjMWAgyLj8ltPQC7
github.com/projectdiscovery/uncover v1.0.7/go.mod h1:HFXgm1sRPuoN0D4oATljPIdmbo/EEh1wVuxQqo/dwFE=
github.com/projectdiscovery/useragent v0.0.40 h1:1LUhReSGPkhqsM5n40OOC9dIoNqMGs1dyGFJcOmg2Fo=
github.com/projectdiscovery/useragent v0.0.40/go.mod h1:EvK1x3s948Gtqb/XOahXcauyejCL/rSgy5d1IAvsKT4=
github.com/projectdiscovery/utils v0.0.84-0.20240311212130-16ce15974a4a h1:njYY24OsTQJ80L8O+QxcYFljiVl83xp/BWz3dsIJF30=
github.com/projectdiscovery/utils v0.0.84-0.20240311212130-16ce15974a4a/go.mod h1:bvcudEteeZ5MIZeBCXEfpcgj9h3tyB9qtnmc7zQR92w=
github.com/projectdiscovery/utils v0.0.84-0.20240313184656-e3ec80f4dd42 h1:l22rSOP8i6HXu1QfAtIot8NvmJgUmBHEn6Mih7s8Gak=
github.com/projectdiscovery/utils v0.0.84-0.20240313184656-e3ec80f4dd42/go.mod h1:VsoXXTuNAAziuodKWakLyurVXaV4tNTJU4Eo8umyr3Q=
github.com/projectdiscovery/wappalyzergo v0.0.112 h1:QPpp5jmj1lqLd5mFdFKQ9VvcYhQNqyU9Mr+IB0US2zA=
@ -1366,7 +1364,6 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=

View File

@ -17,6 +17,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/installer"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/parser"
uncoverlib "github.com/projectdiscovery/uncover"
pdcpauth "github.com/projectdiscovery/utils/auth/pdcp"
"github.com/projectdiscovery/utils/env"
@ -37,8 +38,8 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/external/customtemplates"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
parsers "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/progress"
"github.com/projectdiscovery/nuclei/v3/pkg/projectfile"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
@ -87,6 +88,7 @@ type Runner struct {
inputProvider provider.InputProvider
//general purpose temporary directory
tmpDir string
parser parser.Parser
}
const pprofServerAddress = "127.0.0.1:8086"
@ -148,12 +150,15 @@ func New(options *types.Options) (*Runner, error) {
}
}
if options.Validate {
parsers.ShouldValidate = true
}
parser := templates.NewParser()
if options.Validate {
parser.ShouldValidate = true
}
// TODO: refactor to pass options reference globally without cycles
parsers.NoStrictSyntax = options.NoStrictSyntax
parser.NoStrictSyntax = options.NoStrictSyntax
runner.parser = parser
yaml.StrictSyntax = !options.NoStrictSyntax
if options.Headless {
@ -431,6 +436,7 @@ func (r *Runner) RunEnumeration() error {
ExcludeMatchers: excludematchers.New(r.options.ExcludeMatchers),
InputHelper: input.NewHelper(),
TemporaryDirectory: r.tmpDir,
Parser: r.parser,
}
if len(r.options.SecretsFile) > 0 && !r.options.Validate {
@ -483,7 +489,7 @@ func (r *Runner) RunEnumeration() error {
if err := store.ValidateTemplates(); err != nil {
return err
}
if stats.GetValue(parsers.SyntaxErrorStats) == 0 && stats.GetValue(parsers.SyntaxWarningStats) == 0 && stats.GetValue(parsers.RuntimeWarningsStats) == 0 {
if stats.GetValue(templates.SyntaxErrorStats) == 0 && stats.GetValue(templates.SyntaxWarningStats) == 0 && stats.GetValue(templates.RuntimeWarningsStats) == 0 {
gologger.Info().Msgf("All templates validated successfully\n")
} else {
return errors.New("encountered errors while performing template validation")
@ -495,9 +501,6 @@ func (r *Runner) RunEnumeration() error {
disk.PrintDeprecatedPathsMsgIfApplicable(r.options.Silent)
templates.PrintDeprecatedProtocolNameMsgIfApplicable(r.options.Silent, r.options.Verbose)
// purge global caches primarily used for loading templates
config.DefaultConfig.PurgeGlobalCache()
// add the hosts from the metadata queries of loaded templates into input provider
if r.options.Uncover && len(r.options.UncoverQuery) == 0 {
uncoverOpts := &uncoverlib.Options{
@ -634,19 +637,22 @@ func (r *Runner) executeTemplatesInput(store *loader.Store, engine *core.Engine)
// displayExecutionInfo displays misc info about the nuclei engine execution
func (r *Runner) displayExecutionInfo(store *loader.Store) {
// Display stats for any loaded templates' syntax warnings or errors
stats.Display(parsers.SyntaxWarningStats)
stats.Display(parsers.SyntaxErrorStats)
stats.Display(parsers.RuntimeWarningsStats)
stats.Display(templates.SyntaxWarningStats)
stats.Display(templates.SyntaxErrorStats)
stats.Display(templates.RuntimeWarningsStats)
if r.options.Verbose {
// only print these stats in verbose mode
stats.DisplayAsWarning(parsers.HeadlessFlagWarningStats)
stats.DisplayAsWarning(parsers.CodeFlagWarningStats)
stats.DisplayAsWarning(parsers.FuzzFlagWarningStats)
stats.DisplayAsWarning(parsers.TemplatesExecutedStats)
stats.DisplayAsWarning(templates.HeadlessFlagWarningStats)
stats.DisplayAsWarning(templates.CodeFlagWarningStats)
stats.DisplayAsWarning(templates.TemplatesExecutedStats)
stats.DisplayAsWarning(templates.HeadlessFlagWarningStats)
stats.DisplayAsWarning(templates.CodeFlagWarningStats)
stats.DisplayAsWarning(templates.FuzzFlagWarningStats)
stats.DisplayAsWarning(templates.TemplatesExecutedStats)
}
stats.DisplayAsWarning(parsers.UnsignedCodeWarning)
stats.ForceDisplayWarning(parsers.SkippedUnsignedStats)
stats.DisplayAsWarning(templates.UnsignedCodeWarning)
stats.ForceDisplayWarning(templates.SkippedUnsignedStats)
cfg := config.DefaultConfig
@ -671,8 +677,8 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
value := v.Load()
if k == templates.Unsigned && value > 0 {
// adjust skipped unsigned templates via code or -dut flag
value = value - uint64(stats.GetValue(parsers.SkippedUnsignedStats))
value = value - uint64(stats.GetValue(parsers.CodeFlagWarningStats))
value = value - uint64(stats.GetValue(templates.SkippedUnsignedStats))
value = value - uint64(stats.GetValue(templates.CodeFlagWarningStats))
}
if value > 0 {
if k != templates.Unsigned {

View File

@ -4,7 +4,7 @@ import (
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
@ -15,15 +15,15 @@ func TestCreateReportingOptions(t *testing.T) {
options.ReportingConfig = "../../integration_tests/test-issue-tracker-config1.yaml"
resultOptions, err := createReportingOptions(&options)
assert.Nil(t, err)
assert.Equal(t, resultOptions.AllowList.Severities, severity.Severities{severity.High, severity.Critical})
assert.Equal(t, resultOptions.DenyList.Severities, severity.Severities{severity.Low})
require.Nil(t, err)
require.Equal(t, resultOptions.AllowList.Severities, severity.Severities{severity.High, severity.Critical})
require.Equal(t, resultOptions.DenyList.Severities, severity.Severities{severity.Low})
options.ReportingConfig = "../../integration_tests/test-issue-tracker-config2.yaml"
resultOptions2, err := createReportingOptions(&options)
assert.Nil(t, err)
assert.Equal(t, resultOptions2.AllowList.Severities, resultOptions.AllowList.Severities)
assert.Equal(t, resultOptions2.DenyList.Severities, resultOptions.DenyList.Severities)
require.Nil(t, err)
require.Equal(t, resultOptions2.AllowList.Severities, resultOptions.AllowList.Severities)
require.Equal(t, resultOptions2.DenyList.Severities, resultOptions.DenyList.Severities)
}
type TestStruct1 struct {
@ -69,8 +69,8 @@ func TestWalkReflectStructAssignsEnvVars(t *testing.T) {
Walk(testStruct, expandEndVars)
assert.Equal(t, "value", testStruct.A)
assert.Equal(t, "value2", testStruct.Struct.B)
require.Equal(t, "value", testStruct.A)
require.Equal(t, "value2", testStruct.Struct.B)
}
func TestWalkReflectStructHandlesDifferentTypes(t *testing.T) {
@ -85,9 +85,9 @@ func TestWalkReflectStructHandlesDifferentTypes(t *testing.T) {
Walk(testStruct, expandEndVars)
assert.Equal(t, "value", testStruct.A)
assert.Equal(t, "2", testStruct.B)
assert.Equal(t, "true", testStruct.C)
require.Equal(t, "value", testStruct.A)
require.Equal(t, "2", testStruct.B)
require.Equal(t, "true", testStruct.C)
}
func TestWalkReflectStructEmpty(t *testing.T) {
@ -102,9 +102,9 @@ func TestWalkReflectStructEmpty(t *testing.T) {
Walk(testStruct, expandEndVars)
assert.Equal(t, "value", testStruct.A)
assert.Equal(t, "", testStruct.B)
assert.Equal(t, "true", testStruct.C)
require.Equal(t, "value", testStruct.A)
require.Equal(t, "", testStruct.B)
require.Equal(t, "true", testStruct.C)
}
func TestWalkReflectStructWithNoYamlTag(t *testing.T) {
@ -119,9 +119,9 @@ func TestWalkReflectStructWithNoYamlTag(t *testing.T) {
os.Setenv("GITHUB_USER", "testuser")
Walk(test, expandEndVars)
assert.Equal(t, "testuser", test.A)
assert.Equal(t, "testuser", test.B.B, test.B)
assert.Equal(t, "$GITHUB_USER", test.C)
require.Equal(t, "testuser", test.A)
require.Equal(t, "testuser", test.B.B, test.B)
require.Equal(t, "$GITHUB_USER", test.C)
}
func TestWalkReflectStructHandlesNestedStructs(t *testing.T) {
@ -138,7 +138,7 @@ func TestWalkReflectStructHandlesNestedStructs(t *testing.T) {
Walk(testStruct, expandEndVars)
assert.Equal(t, "value", testStruct.A)
assert.Equal(t, "2", testStruct.Struct.B)
assert.Equal(t, "true", testStruct.Struct.C)
require.Equal(t, "value", testStruct.A)
require.Equal(t, "2", testStruct.Struct.B)
require.Equal(t, "true", testStruct.Struct.C)
}

View File

@ -11,18 +11,21 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
)
// log available templates for verbose (-vv)
func (r *Runner) logAvailableTemplate(tplPath string) {
t, err := parsers.ParseTemplate(tplPath, r.catalog)
t, err := r.parser.ParseTemplate(tplPath, r.catalog)
tpl, ok := t.(*templates.Template)
if !ok {
panic("not a template")
}
if err != nil {
gologger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
} else {
r.verboseTemplate(t)
r.verboseTemplate(tpl)
}
}

View File

@ -5,12 +5,11 @@ import (
"time"
"github.com/logrusorgru/aurora"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/ratelimit"
@ -40,6 +39,7 @@ func createEphemeralObjects(base *NucleiEngine, opts *types.Options) (*unsafeOpt
HostErrorsCache: base.hostErrCache,
Colorizer: aurora.NewAurora(true),
ResumeCfg: types.NewResumeCfg(),
Parser: base.parser,
}
if opts.RateLimitMinute > 0 {
u.executerOpts.RateLimiter = ratelimit.New(context.Background(), uint(opts.RateLimitMinute), time.Minute)
@ -88,7 +88,6 @@ func (e *ThreadSafeNucleiEngine) GlobalLoadAllTemplates() error {
// GlobalResultCallback sets a callback function which will be called for each result
func (e *ThreadSafeNucleiEngine) GlobalResultCallback(callback func(event *output.ResultEvent)) {
e.eng.resultCallbacks = []func(*output.ResultEvent){callback}
config.DefaultConfig.PurgeGlobalCache()
}
// ExecuteWithCallback executes templates on targets and calls callback on each result(only if results are found)
@ -110,7 +109,7 @@ func (e *ThreadSafeNucleiEngine) ExecuteNucleiWithOpts(targets []string, opts ..
}
// load templates
workflowLoader, err := parsers.NewLoader(&unsafeOpts.executerOpts)
workflowLoader, err := workflow.NewLoader(&unsafeOpts.executerOpts)
if err != nil {
return errorutil.New("Could not create workflow loader: %s\n", err)
}

View File

@ -11,8 +11,8 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
providerTypes "github.com/projectdiscovery/nuclei/v3/pkg/input/types"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/progress"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/hosterrorscache"
@ -72,6 +72,7 @@ type NucleiEngine struct {
mode engineMode
browserInstance *engine.Browser
httpClient *retryablehttp.Client
parser *templates.Parser
authprovider authprovider.AuthProvider
// unexported meta options
@ -86,7 +87,7 @@ type NucleiEngine struct {
// LoadAllTemplates loads all nuclei template based on given options
func (e *NucleiEngine) LoadAllTemplates() error {
workflowLoader, err := parsers.NewLoader(&e.executerOpts)
workflowLoader, err := workflow.NewLoader(&e.executerOpts)
if err != nil {
return errorutil.New("Could not create workflow loader: %s\n", err)
}

View File

@ -28,6 +28,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/http/httpclientpool"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/testutils"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
nucleiUtils "github.com/projectdiscovery/nuclei/v3/pkg/utils"
@ -113,6 +114,8 @@ func (e *NucleiEngine) init() error {
e.httpClient = httpclient
}
e.parser = templates.NewParser()
_ = protocolstate.Init(e.opts)
_ = protocolinit.Init(e.opts)
e.applyRequiredDefaults()
@ -157,6 +160,7 @@ func (e *NucleiEngine) init() error {
Colorizer: aurora.NewAurora(true),
ResumeCfg: types.NewResumeCfg(),
Browser: e.browserInstance,
Parser: e.parser,
}
if len(e.opts.SecretsFile) > 0 {
authTmplStore, err := runner.GetAuthTmplStore(*e.opts, e.catalog, e.executerOpts)

BIN
memogen

Binary file not shown.

View File

@ -45,9 +45,6 @@ type Config struct {
LatestNucleiTemplatesVersion string `json:"nuclei-templates-latest-version"`
LatestNucleiIgnoreHash string `json:"nuclei-latest-ignore-hash,omitempty"`
// Other AppLevel/Global Settings
registerdCaches []GlobalCache `json:"-"` // registered global caches
// internal / unexported fields
disableUpdates bool `json:"-"` // disable updates both version check and template updates
homeDir string `json:"-"` // User Home Directory
@ -301,19 +298,6 @@ func (c *Config) WriteTemplatesIndex(index map[string]string) error {
return os.WriteFile(indexFile, buff.Bytes(), 0600)
}
// RegisterGlobalCache registers a global cache at app level
// and is available to be purged on demand
func (c *Config) RegisterGlobalCache(cache GlobalCache) {
c.registerdCaches = append(c.registerdCaches, cache)
}
// PurgeGlobalCache purges all registered global caches
func (c *Config) PurgeGlobalCache() {
for _, cache := range c.registerdCaches {
cache.Purge()
}
}
// getTemplatesConfigFilePath returns configDir/.templates-config.json file path
func (c *Config) getTemplatesConfigFilePath() string {
return filepath.Join(c.configDir, TemplateConfigFileName)

View File

@ -13,13 +13,6 @@ import (
stringsutil "github.com/projectdiscovery/utils/strings"
)
// GlobalCache are global cache that have global
// scope and are not purged but can be purged
// via config.DefaultConfig
type GlobalCache interface {
Purge()
}
var knownConfigFiles = []string{"cves.json", "contributors.json", "TEMPLATES-STATS.json"}
// TemplateFormat

View File

@ -13,9 +13,9 @@ import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
cfg "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader/filter"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
templateTypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
@ -67,7 +67,7 @@ type Config struct {
// Store is a storage for loaded nuclei templates
type Store struct {
tagFilter *filter.TagFilter
tagFilter *templates.TagFilter
pathFilter *filter.PathFilter
config *Config
finalTemplates []string
@ -113,7 +113,7 @@ func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts pro
// New creates a new template store based on provided configuration
func New(cfg *Config) (*Store, error) {
tagFilter, err := filter.New(&filter.Config{
tagFilter, err := templates.NewTagFilter(&templates.TagFilterConfig{
Tags: cfg.Tags,
ExcludeTags: cfg.ExcludeTags,
Authors: cfg.Authors,
@ -269,25 +269,27 @@ func (store *Store) ValidateTemplates() error {
filteredTemplatePaths := store.pathFilter.Match(templatePaths)
filteredWorkflowPaths := store.pathFilter.Match(workflowPaths)
if areTemplatesValid(store, filteredTemplatePaths) && areWorkflowsValid(store, filteredWorkflowPaths) {
if store.areTemplatesValid(filteredTemplatePaths) && store.areWorkflowsValid(filteredWorkflowPaths) {
return nil
}
return errors.New("errors occurred during template validation")
}
func areWorkflowsValid(store *Store, filteredWorkflowPaths map[string]struct{}) bool {
return areWorkflowOrTemplatesValid(store, filteredWorkflowPaths, true, func(templatePath string, tagFilter *filter.TagFilter) (bool, error) {
return parsers.LoadWorkflow(templatePath, store.config.Catalog)
func (store *Store) areWorkflowsValid(filteredWorkflowPaths map[string]struct{}) bool {
return store.areWorkflowOrTemplatesValid(filteredWorkflowPaths, true, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) {
return false, nil
// return store.config.ExecutorOptions.Parser.LoadWorkflow(templatePath, store.config.Catalog)
})
}
func areTemplatesValid(store *Store, filteredTemplatePaths map[string]struct{}) bool {
return areWorkflowOrTemplatesValid(store, filteredTemplatePaths, false, func(templatePath string, tagFilter *filter.TagFilter) (bool, error) {
return parsers.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog)
func (store *Store) areTemplatesValid(filteredTemplatePaths map[string]struct{}) bool {
return store.areWorkflowOrTemplatesValid(filteredTemplatePaths, false, func(templatePath string, tagFilter *templates.TagFilter) (bool, error) {
return false, nil
// return store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog)
})
}
func areWorkflowOrTemplatesValid(store *Store, filteredTemplatePaths map[string]struct{}, isWorkflow bool, load func(templatePath string, tagFilter *filter.TagFilter) (bool, error)) bool {
func (store *Store) areWorkflowOrTemplatesValid(filteredTemplatePaths map[string]struct{}, isWorkflow bool, load func(templatePath string, tagFilter *templates.TagFilter) (bool, error)) bool {
areTemplatesValid := true
for templatePath := range filteredTemplatePaths {
@ -340,7 +342,7 @@ func areWorkflowTemplatesValid(store *Store, workflows []*workflows.WorkflowTemp
}
func isParsingError(message string, template string, err error) bool {
if errors.Is(err, filter.ErrExcluded) {
if errors.Is(err, templates.ErrExcluded) {
return false
}
if errors.Is(err, templates.ErrCreateTemplateExecutor) {
@ -363,7 +365,7 @@ func (store *Store) LoadWorkflows(workflowsList []string) []*templates.Template
loadedWorkflows := make([]*templates.Template, 0, len(workflowPathMap))
for workflowPath := range workflowPathMap {
loaded, err := parsers.LoadWorkflow(workflowPath, store.config.Catalog)
loaded, err := store.config.ExecutorOptions.Parser.LoadWorkflow(workflowPath, store.config.Catalog)
if err != nil {
gologger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err)
}
@ -388,43 +390,43 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
loadedTemplates := make([]*templates.Template, 0, len(templatePathMap))
for templatePath := range templatePathMap {
loaded, err := parsers.LoadTemplate(templatePath, store.tagFilter, tags, store.config.Catalog)
loaded, err := store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, tags, store.config.Catalog)
if loaded || store.pathFilter.MatchIncluded(templatePath) {
parsed, err := templates.Parse(templatePath, store.preprocessor, store.config.ExecutorOptions)
if err != nil {
// exclude templates not compatible with offline matching from total runtime warning stats
if !errors.Is(err, templates.ErrIncompatibleWithOfflineMatching) {
stats.Increment(parsers.RuntimeWarningsStats)
stats.Increment(templates.RuntimeWarningsStats)
}
gologger.Warning().Msgf("Could not parse template %s: %s\n", templatePath, err)
} else if parsed != nil {
if !parsed.Verified && store.config.ExecutorOptions.Options.DisableUnsignedTemplates {
// skip unverified templates when prompted to
stats.Increment(parsers.SkippedUnsignedStats)
stats.Increment(templates.SkippedUnsignedStats)
continue
}
if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
// donot include headless template in final list if headless flag is not set
stats.Increment(parsers.HeadlessFlagWarningStats)
stats.Increment(templates.HeadlessFlagWarningStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Headless flag is required for headless template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if len(parsed.RequestsCode) > 0 && !store.config.ExecutorOptions.Options.EnableCodeTemplates {
// donot include 'Code' protocol custom template in final list if code flag is not set
stats.Increment(parsers.CodeFlagWarningStats)
stats.Increment(templates.CodeFlagWarningStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Code flag is required for code protocol template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if len(parsed.RequestsCode) > 0 && !parsed.Verified && len(parsed.Workflows) == 0 {
// donot include unverified 'Code' protocol custom template in final list
stats.Increment(parsers.UnsignedCodeWarning)
stats.Increment(templates.UnsignedCodeWarning)
// these will be skipped so increment skip counter
stats.Increment(parsers.SkippedUnsignedStats)
stats.Increment(templates.SkippedUnsignedStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Tampered/Unsigned template at %v.\n", aurora.Yellow("WRN").String(), templatePath)
}
} else if parsed.IsFuzzing() && !store.config.ExecutorOptions.Options.FuzzTemplates {
stats.Increment(parsers.FuzzFlagWarningStats)
stats.Increment(templates.FuzzFlagWarningStats)
if config.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] Fuzz flag is required for fuzzing template '%s'.\n", aurora.Yellow("WRN").String(), templatePath)
}
@ -436,9 +438,9 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
}
}
if err != nil {
if strings.Contains(err.Error(), filter.ErrExcluded.Error()) {
stats.Increment(parsers.TemplatesExecutedStats)
if config.DefaultConfig.LogAllEvents {
if strings.Contains(err.Error(), templates.ErrExcluded.Error()) {
stats.Increment(templates.TemplatesExecutedStats)
if cfg.DefaultConfig.LogAllEvents {
gologger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error())
}
continue

View File

@ -0,0 +1,11 @@
package parser
import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
)
type Parser interface {
LoadTemplate(templatePath string, tagFilter any, extraTags []string, catalog catalog.Catalog) (bool, error)
ParseTemplate(templatePath string, catalog catalog.Catalog) (any, error)
LoadWorkflow(templatePath string, catalog catalog.Catalog) (bool, error)
}

View File

@ -1,4 +1,4 @@
package parsers
package workflow
import (
"github.com/projectdiscovery/gologger"
@ -6,17 +6,18 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader/filter"
"github.com/projectdiscovery/nuclei/v3/pkg/model"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
)
type workflowLoader struct {
pathFilter *filter.PathFilter
tagFilter *filter.TagFilter
tagFilter *templates.TagFilter
options *protocols.ExecutorOptions
}
// NewLoader returns a new workflow loader structure
func NewLoader(options *protocols.ExecutorOptions) (model.WorkflowLoader, error) {
tagFilter, err := filter.New(&filter.Config{
tagFilter, err := templates.NewTagFilter(&templates.TagFilterConfig{
Authors: options.Options.Authors,
Tags: options.Options.Tags,
ExcludeTags: options.Options.ExcludeTags,
@ -50,7 +51,7 @@ func (w *workflowLoader) GetTemplatePathsByTags(templateTags []string) []string
loadedTemplates := make([]string, 0, len(templatePathMap))
for templatePath := range templatePathMap {
loaded, _ := LoadTemplate(templatePath, w.tagFilter, templateTags, w.options.Catalog)
loaded, _ := w.options.Parser.LoadTemplate(templatePath, w.tagFilter, templateTags, w.options.Catalog)
if loaded {
loadedTemplates = append(loadedTemplates, templatePath)
}
@ -67,7 +68,7 @@ func (w *workflowLoader) GetTemplatePaths(templatesList []string, noValidate boo
loadedTemplates := make([]string, 0, len(templatesPathMap))
for templatePath := range templatesPathMap {
matched, err := LoadTemplate(templatePath, w.tagFilter, nil, w.options.Catalog)
matched, err := w.options.Parser.LoadTemplate(templatePath, w.tagFilter, nil, w.options.Catalog)
if err != nil && !matched {
gologger.Warning().Msg(err.Error())
} else if matched || noValidate {

View File

@ -7,7 +7,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gopkg.in/yaml.v2"
)
@ -29,10 +29,10 @@ func TestInfoJsonMarshal(t *testing.T) {
}
result, err := json.Marshal(&info)
assert.Nil(t, err)
require.Nil(t, err)
expected := `{"name":"Test Template Name","author":["forgedhallpass","ice3man"],"tags":["cve","misc"],"description":"Test description","reference":"Reference1","severity":"high","metadata":{"array_key":["array_value1","array_value2"],"map_key":{"key1":"val1"},"string_key":"string_value"}}`
assert.Equal(t, expected, string(result))
require.Equal(t, expected, string(result))
}
func TestInfoYamlMarshal(t *testing.T) {
@ -53,7 +53,7 @@ func TestInfoYamlMarshal(t *testing.T) {
}
result, err := yaml.Marshal(&info)
assert.Nil(t, err)
require.Nil(t, err)
expected := `name: Test Template Name
author:
@ -73,7 +73,7 @@ metadata:
key1: val1
string_key: string_value
`
assert.Equal(t, expected, string(result))
require.Equal(t, expected, string(result))
}
func TestUnmarshal(t *testing.T) {
@ -94,13 +94,13 @@ func TestUnmarshal(t *testing.T) {
t.Helper()
info := Info{}
err := yaml.Unmarshal([]byte(yamlPayload), &info)
assert.Nil(t, err)
assert.Equal(t, info.Name, templateName)
assert.Equal(t, info.Authors.ToSlice(), authors)
assert.Equal(t, info.Tags.ToSlice(), tags)
assert.Equal(t, info.SeverityHolder.Severity, severity.Critical)
assert.Equal(t, info.Reference.ToSlice(), references)
assert.Equal(t, info.Metadata, dynamicKeysMap)
require.Nil(t, err)
require.Equal(t, info.Name, templateName)
require.Equal(t, info.Authors.ToSlice(), authors)
require.Equal(t, info.Tags.ToSlice(), tags)
require.Equal(t, info.SeverityHolder.Severity, severity.Critical)
require.Equal(t, info.Reference.ToSlice(), references)
require.Equal(t, info.Metadata, dynamicKeysMap)
return info
}
@ -133,5 +133,5 @@ func TestUnmarshal(t *testing.T) {
info1 := assertUnmarshalledTemplateInfo(t, yamlPayload1)
info2 := assertUnmarshalledTemplateInfo(t, yamlPayload2)
assert.Equal(t, info1, info2)
require.Equal(t, info1, info2)
}

View File

@ -5,7 +5,6 @@ import (
"gopkg.in/yaml.v2"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -17,8 +16,8 @@ func TestYamlMarshal(t *testing.T) {
severity := Holder{Severity: High}
marshalled, err := severity.MarshalYAML()
assert.Nil(t, err, "could not marshal yaml")
assert.Equal(t, "high", marshalled, "could not marshal severity correctly")
require.Nil(t, err, "could not marshal yaml")
require.Equal(t, "high", marshalled, "could not marshal severity correctly")
}
func TestYamlUnmarshalFail(t *testing.T) {
@ -27,7 +26,7 @@ func TestYamlUnmarshalFail(t *testing.T) {
func TestGetSupportedSeverities(t *testing.T) {
severities := GetSupportedSeverities()
assert.Equal(t, severities, Severities{Info, Low, Medium, High, Critical, Unknown})
require.Equal(t, severities, Severities{Info, Low, Medium, High, Critical, Unknown})
}
func testUnmarshal(t *testing.T, unmarshaller func(data []byte, v interface{}) error, payloadCreator func(value string) string) {
@ -43,15 +42,15 @@ func testUnmarshal(t *testing.T, unmarshaller func(data []byte, v interface{}) e
for _, payload := range payloads { // nolint:scopelint // false-positive
t.Run(payload, func(t *testing.T) {
result := unmarshal(payload, unmarshaller)
assert.Equal(t, result.Severity, Info)
assert.Equal(t, result.Severity.String(), "info")
require.Equal(t, result.Severity, Info)
require.Equal(t, result.Severity.String(), "info")
})
}
}
func testUnmarshalFail(t *testing.T, unmarshaller func(data []byte, v interface{}) error, payloadCreator func(value string) string) {
t.Helper()
assert.Panics(t, func() { unmarshal(payloadCreator("invalid"), unmarshaller) })
require.Panics(t, func() { unmarshal(payloadCreator("invalid"), unmarshaller) })
}
func unmarshal(value string, unmarshaller func(data []byte, v interface{}) error) Holder {

View File

@ -5,7 +5,6 @@ import (
"testing"
"github.com/Knetic/govaluate"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -41,7 +40,7 @@ func testDslExpressionScenarios(t *testing.T, dslExpressions map[string]interfac
actualResult := evaluateExpression(t, dslExpression)
if expectedResult != nil {
assert.Equal(t, expectedResult, actualResult)
require.Equal(t, expectedResult, actualResult)
}
fmt.Printf("%s: \t %v\n", dslExpression, actualResult)

View File

@ -1,199 +0,0 @@
package parsers
import (
"encoding/json"
"fmt"
"regexp"
"strings"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader/filter"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/cache"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/stats"
"gopkg.in/yaml.v2"
)
const (
errMandatoryFieldMissingFmt = "mandatory '%s' field is missing"
errInvalidFieldFmt = "invalid field format for '%s' (allowed format is %s)"
warningFieldMissingFmt = "field '%s' is missing"
CouldNotLoadTemplate = "Could not load template %s: %s"
LoadedWithWarnings = "Loaded template %s: with syntax warning : %s"
)
// LoadTemplate returns true if the template is valid and matches the filtering criteria.
func LoadTemplate(templatePath string, tagFilter *filter.TagFilter, extraTags []string, catalog catalog.Catalog) (bool, error) {
template, templateParseError := ParseTemplate(templatePath, catalog)
if templateParseError != nil {
return false, fmt.Errorf(CouldNotLoadTemplate, templatePath, templateParseError)
}
if len(template.Workflows) > 0 {
return false, nil
}
validationError := validateTemplateMandatoryFields(template)
if validationError != nil {
stats.Increment(SyntaxErrorStats)
return false, fmt.Errorf(CouldNotLoadTemplate, templatePath, validationError)
}
ret, err := isTemplateInfoMetadataMatch(tagFilter, template, extraTags)
if err != nil {
return ret, fmt.Errorf(CouldNotLoadTemplate, templatePath, err)
}
// if template loaded then check the template for optional fields to add warnings
if ret {
validationWarning := validateTemplateOptionalFields(template)
if validationWarning != nil {
stats.Increment(SyntaxWarningStats)
return ret, fmt.Errorf(LoadedWithWarnings, templatePath, validationWarning)
}
}
return ret, nil
}
// LoadWorkflow returns true if the workflow is valid and matches the filtering criteria.
func LoadWorkflow(templatePath string, catalog catalog.Catalog) (bool, error) {
template, templateParseError := ParseTemplate(templatePath, catalog)
if templateParseError != nil {
return false, templateParseError
}
if len(template.Workflows) > 0 {
if validationError := validateTemplateMandatoryFields(template); validationError != nil {
stats.Increment(SyntaxErrorStats)
return false, validationError
}
return true, nil
}
return false, nil
}
func isTemplateInfoMetadataMatch(tagFilter *filter.TagFilter, template *templates.Template, extraTags []string) (bool, error) {
match, err := tagFilter.Match(template, extraTags)
if err == filter.ErrExcluded {
return false, filter.ErrExcluded
}
return match, err
}
// validateTemplateMandatoryFields validates the mandatory fields of a template
// return error from this function will cause hard fail and not proceed further
func validateTemplateMandatoryFields(template *templates.Template) error {
info := template.Info
var errors []string
if utils.IsBlank(info.Name) {
errors = append(errors, fmt.Sprintf(errMandatoryFieldMissingFmt, "name"))
}
if info.Authors.IsEmpty() {
errors = append(errors, fmt.Sprintf(errMandatoryFieldMissingFmt, "author"))
}
if template.ID == "" {
errors = append(errors, fmt.Sprintf(errMandatoryFieldMissingFmt, "id"))
} else if !templateIDRegexp.MatchString(template.ID) {
errors = append(errors, fmt.Sprintf(errInvalidFieldFmt, "id", templateIDRegexp.String()))
}
if len(errors) > 0 {
return fmt.Errorf(strings.Join(errors, ", "))
}
return nil
}
// validateTemplateOptionalFields validates the optional fields of a template
// return error from this function will throw a warning and proceed further
func validateTemplateOptionalFields(template *templates.Template) error {
info := template.Info
var warnings []string
if template.Type() != types.WorkflowProtocol && utils.IsBlank(info.SeverityHolder.Severity.String()) {
warnings = append(warnings, fmt.Sprintf(warningFieldMissingFmt, "severity"))
}
if len(warnings) > 0 {
return fmt.Errorf(strings.Join(warnings, ", "))
}
return nil
}
var (
parsedTemplatesCache *cache.Templates
ShouldValidate bool
NoStrictSyntax bool
templateIDRegexp = regexp.MustCompile(`^([a-zA-Z0-9]+[-_])*[a-zA-Z0-9]+$`)
)
const (
SyntaxWarningStats = "syntax-warnings"
SyntaxErrorStats = "syntax-errors"
RuntimeWarningsStats = "runtime-warnings"
UnsignedCodeWarning = "unsigned-warnings"
HeadlessFlagWarningStats = "headless-flag-missing-warnings"
TemplatesExecutedStats = "templates-executed"
CodeFlagWarningStats = "code-flag-missing-warnings"
FuzzFlagWarningStats = "fuzz-flag-missing-warnings"
// Note: this is redefined in workflows.go to avoid circular dependency, so make sure to keep it in sync
SkippedUnsignedStats = "skipped-unsigned-stats" // tracks loading of unsigned templates
)
func init() {
parsedTemplatesCache = cache.New()
config.DefaultConfig.RegisterGlobalCache(parsedTemplatesCache)
stats.NewEntry(SyntaxWarningStats, "Found %d templates with syntax warning (use -validate flag for further examination)")
stats.NewEntry(SyntaxErrorStats, "Found %d templates with syntax error (use -validate flag for further examination)")
stats.NewEntry(RuntimeWarningsStats, "Found %d templates with runtime error (use -validate flag for further examination)")
stats.NewEntry(UnsignedCodeWarning, "Found %d unsigned or tampered code template (carefully examine before using it & use -sign flag to sign them)")
stats.NewEntry(HeadlessFlagWarningStats, "Excluded %d headless template[s] (disabled as default), use -headless option to run headless templates.")
stats.NewEntry(CodeFlagWarningStats, "Excluded %d code template[s] (disabled as default), use -code option to run code templates.")
stats.NewEntry(TemplatesExecutedStats, "Excluded %d template[s] with known weak matchers / tags excluded from default run using .nuclei-ignore")
stats.NewEntry(FuzzFlagWarningStats, "Excluded %d fuzz template[s] (disabled as default), use -fuzz option to run fuzz templates.")
stats.NewEntry(SkippedUnsignedStats, "Skipping %d unsigned template[s]")
}
// ParseTemplate parses a template and returns a *templates.Template structure
func ParseTemplate(templatePath string, catalog catalog.Catalog) (*templates.Template, error) {
if value, err := parsedTemplatesCache.Has(templatePath); value != nil {
return value.(*templates.Template), err
}
data, err := utils.ReadFromPathOrURL(templatePath, catalog)
if err != nil {
return nil, err
}
template := &templates.Template{}
switch config.GetTemplateFormatFromExt(templatePath) {
case config.JSON:
err = json.Unmarshal(data, template)
case config.YAML:
if NoStrictSyntax {
err = yaml.Unmarshal(data, template)
} else {
err = yaml.UnmarshalStrict(data, template)
}
default:
err = fmt.Errorf("failed to identify template format expected JSON or YAML but got %v", templatePath)
}
if err != nil {
return nil, err
}
parsedTemplatesCache.Store(templatePath, template, nil)
return template, nil
}

View File

@ -4,9 +4,8 @@ import (
"encoding/hex"
"testing"
"github.com/stretchr/testify/assert"
"github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/stretchr/testify/require"
)
const input = "abcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmn"
@ -24,8 +23,8 @@ func TestHexDumpHighlighting(t *testing.T) {
t.Run("Test highlighting when the snippet is wrapped", func(t *testing.T) {
result, err := toHighLightedHexDump(hex.Dump([]byte(input)), "defghij")
assert.Nil(t, err)
assert.Equal(t, highlightedHexDumpResponse, result.String())
require.Nil(t, err)
require.Equal(t, highlightedHexDumpResponse, result.String())
})
t.Run("Test highlight when the snippet contains separator character", func(t *testing.T) {
@ -36,8 +35,8 @@ func TestHexDumpHighlighting(t *testing.T) {
"00000000 61 73 64 66 61 73 64 66 61 73 64 \x1b[32m61\x1b[0m \x1b[32m7c\x1b[0m \x1b[32m62\x1b[0m 61 73 |asdfasdfasd\x1b[32ma\x1b[0m\x1b[32m|\x1b[0m\x1b[32mb\x1b[0mas|\n" +
"00000010 64 66 61 64 73 64 66 73 7c |dfadsdfs||\n"
assert.Nil(t, err)
assert.Equal(t, expected, result.String())
require.Nil(t, err)
require.Equal(t, expected, result.String())
})
}
@ -59,7 +58,7 @@ func TestHighlight(t *testing.T) {
t.Run("Test highlighting when the snippet is wrapped", func(t *testing.T) {
result := Highlight(&operatorResult, hex.Dump([]byte(input)), false, true)
assert.Equal(t, multiSnippetHighlightHexDumpResponse, result)
require.Equal(t, multiSnippetHighlightHexDumpResponse, result)
})
t.Run("Test highlighting without hexdump", func(t *testing.T) {
@ -75,17 +74,17 @@ func TestHighlight(t *testing.T) {
"a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
"a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn"
print(result)
assert.Equal(t, expected, result)
require.Equal(t, expected, result)
})
t.Run("Test the response is not modified if noColor is true", func(t *testing.T) {
result := Highlight(&operatorResult, input, true, false)
assert.Equal(t, input, result)
require.Equal(t, input, result)
})
t.Run("Test the response is not modified if noColor is true", func(t *testing.T) {
result := Highlight(&operatorResult, hex.Dump([]byte(input)), true, true)
assert.Equal(t, hex.Dump([]byte(input)), result)
require.Equal(t, hex.Dump([]byte(input)), result)
})
}
@ -107,5 +106,5 @@ start ValueToMatch-2.1 end
"start \x1b[32mV\x1b[0m\x1b[32ma\x1b[0m\x1b[32ml\x1b[0m\x1b[32mu\x1b[0m\x1b[32me\x1b[0m\x1b[32mT\x1b[0m\x1b[32mo\x1b[0m\x1b[32mM\x1b[0m\x1b[32ma\x1b[0m\x1b[32mt\x1b[0m\x1b[32mc\x1b[0m\x1b[32mh\x1b[0m\x1b[32m-\x1b[0m\x1b[32m1\x1b[0m\x1b[32m.\x1b[0m\x1b[32m2\x1b[0m\x1b[32m.\x1b[0m\x1b[32m3\x1b[0m end\n" +
"start \x1b[32mV\x1b[0m\x1b[32ma\x1b[0m\x1b[32ml\x1b[0m\x1b[32mu\x1b[0m\x1b[32me\x1b[0m\x1b[32mT\x1b[0m\x1b[32mo\x1b[0m\x1b[32mM\x1b[0m\x1b[32ma\x1b[0m\x1b[32mt\x1b[0m\x1b[32mc\x1b[0m\x1b[32mh\x1b[0m\x1b[32m-\x1b[0m\x1b[32m2\x1b[0m\x1b[32m.\x1b[0m\x1b[32m1\x1b[0m end \n"
result := Highlight(&operatorResult, input, false, false)
assert.Equal(t, expected, result)
require.Equal(t, expected, result)
}

View File

@ -4,7 +4,7 @@ import (
"net"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestGetRandomIp(t *testing.T) {
@ -110,15 +110,15 @@ func TestGetRandomIp(t *testing.T) {
t.Run(test.name, func(t *testing.T) {
ip, err := GetRandomIPWithCidr(test.cidr...)
if test.valid {
assert.NoError(t, err)
require.NoError(t, err)
anyInRange := false
for _, cidr := range test.cidr {
_, network, _ := net.ParseCIDR(cidr)
anyInRange = anyInRange || network.Contains(ip)
}
assert.Truef(t, anyInRange, "the IP address returned %v is not in range of the provided CIDRs", ip)
require.Truef(t, anyInRange, "the IP address returned %v is not in range of the provided CIDRs", ip)
} else {
assert.Error(t, err, test.errorMsg)
require.Error(t, err, test.errorMsg)
}
})
}

View File

@ -3,7 +3,7 @@ package replacer
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestReplacerReplace(t *testing.T) {
@ -77,7 +77,7 @@ func TestReplacerReplace(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
assert.Equal(t, test.expected, Replace(test.template, test.values))
require.Equal(t, test.expected, Replace(test.template, test.values))
})
}
}
@ -135,7 +135,7 @@ func TestReplacerReplaceOne(t *testing.T) {
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
assert.Equal(t, test.expected, ReplaceOne(test.template, test.key, test.value))
require.Equal(t, test.expected, ReplaceOne(test.template, test.key, test.value))
})
}
}

View File

@ -8,7 +8,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/progress"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
@ -40,8 +40,9 @@ func setup() {
Browser: nil,
Catalog: disk.NewCatalog(config.DefaultConfig.TemplatesDirectory),
RateLimiter: ratelimit.New(context.Background(), uint(options.RateLimit), time.Second),
Parser: templates.NewParser(),
}
workflowLoader, err := parsers.NewLoader(&executerOpts)
workflowLoader, err := workflow.NewLoader(&executerOpts)
if err != nil {
log.Fatalf("Could not create workflow loader: %s\n", err)
}

View File

@ -14,6 +14,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
"github.com/projectdiscovery/nuclei/v3/pkg/js/compiler"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/parser"
"github.com/projectdiscovery/nuclei/v3/pkg/model"
"github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/projectdiscovery/nuclei/v3/pkg/operators/extractors"
@ -121,6 +122,7 @@ type ExecutorOptions struct {
AuthProvider authprovider.AuthProvider
//TemporaryDirectory is the directory to store temporary files
TemporaryDirectory string
Parser parser.Parser
}
// GetThreadsForPayloadRequests returns the number of threads to use as default for

View File

@ -4,7 +4,7 @@ import (
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestMarkDownHeaderCreation(t *testing.T) {
@ -21,7 +21,7 @@ func TestMarkDownHeaderCreation(t *testing.T) {
for _, currentTestCase := range testCases {
t.Run(strings.Join(currentTestCase.headers, ","), func(t1 *testing.T) {
assert.Equal(t1, CreateTableHeader(currentTestCase.headers...), currentTestCase.expectedValue)
require.Equal(t1, CreateTableHeader(currentTestCase.headers...), currentTestCase.expectedValue)
})
}
}
@ -34,8 +34,8 @@ func TestCreateTemplateInfoTableTooManyColumns(t *testing.T) {
{"h", "i"},
})
assert.NotNil(t, err)
assert.Empty(t, table)
require.NotNil(t, err)
require.Empty(t, table)
}
func TestCreateTemplateInfoTable1Column(t *testing.T) {
@ -48,8 +48,8 @@ func TestCreateTemplateInfoTable1Column(t *testing.T) {
| c |
`
assert.Nil(t, err)
assert.Equal(t, expected, table)
require.Nil(t, err)
require.Equal(t, expected, table)
}
func TestCreateTemplateInfoTable2Columns(t *testing.T) {
@ -66,8 +66,8 @@ func TestCreateTemplateInfoTable2Columns(t *testing.T) {
| d | e |
`
assert.Nil(t, err)
assert.Equal(t, expected, table)
require.Nil(t, err)
require.Equal(t, expected, table)
}
func TestCreateTemplateInfoTable3Columns(t *testing.T) {
@ -86,6 +86,6 @@ func TestCreateTemplateInfoTable3Columns(t *testing.T) {
| h | i | |
`
assert.Nil(t, err)
assert.Equal(t, expected, table)
require.Nil(t, err)
require.Equal(t, expected, table)
}

View File

@ -4,12 +4,11 @@ import (
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/projectdiscovery/nuclei/v3/pkg/model"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/markdown/util"
"github.com/stretchr/testify/require"
)
func TestToMarkdownTableString(t *testing.T) {
@ -44,6 +43,6 @@ func TestToMarkdownTableString(t *testing.T) {
actualAttributeSlice := strings.Split(result, "\n")
dynamicAttributeIndex := len(actualAttributeSlice) - len(expectedDynamicAttributes)
assert.Equal(t, strings.Split(expectedOrderedAttributes, "\n"), actualAttributeSlice[:dynamicAttributeIndex]) // the first part of the result is ordered
assert.ElementsMatch(t, expectedDynamicAttributes, actualAttributeSlice[dynamicAttributeIndex:]) // dynamic parameters are not ordered
require.Equal(t, strings.Split(expectedOrderedAttributes, "\n"), actualAttributeSlice[:dynamicAttributeIndex]) // the first part of the result is ordered
require.ElementsMatch(t, expectedDynamicAttributes, actualAttributeSlice[dynamicAttributeIndex:]) // dynamic parameters are not ordered
}

View File

@ -1,21 +1,22 @@
package jira
import (
"github.com/stretchr/testify/assert"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func TestLinkCreation(t *testing.T) {
jiraIntegration := &Integration{}
link := jiraIntegration.CreateLink("ProjectDiscovery", "https://projectdiscovery.io")
assert.Equal(t, "[ProjectDiscovery|https://projectdiscovery.io]", link)
require.Equal(t, "[ProjectDiscovery|https://projectdiscovery.io]", link)
}
func TestHorizontalLineCreation(t *testing.T) {
jiraIntegration := &Integration{}
horizontalLine := jiraIntegration.CreateHorizontalLine()
assert.True(t, strings.Contains(horizontalLine, "----"))
require.True(t, strings.Contains(horizontalLine, "----"))
}
func TestTableCreation(t *testing.T) {
@ -27,11 +28,11 @@ func TestTableCreation(t *testing.T) {
{"d", "e"},
})
assert.Nil(t, err)
require.Nil(t, err)
expected := `| key | value |
| a | b |
| c | |
| d | e |
`
assert.Equal(t, expected, table)
require.Equal(t, expected, table)
}

42
pkg/templates/cache.go Normal file
View File

@ -0,0 +1,42 @@
package templates
import (
"github.com/projectdiscovery/utils/conversion"
mapsutil "github.com/projectdiscovery/utils/maps"
)
// Templates is a cache for caching and storing templates for reuse.
type Cache struct {
items *mapsutil.SyncLockMap[string, parsedTemplate]
}
// New returns a new templates cache
func NewCache() *Cache {
return &Cache{items: mapsutil.NewSyncLockMap[string, parsedTemplate]()}
}
type parsedTemplate struct {
template *Template
raw string
err error
}
// Has returns true if the cache has a template. The template
// is returned along with any errors if found.
func (t *Cache) Has(template string) (*Template, []byte, error) {
value, ok := t.items.Get(template)
if !ok {
return nil, nil, nil
}
return value.template, conversion.Bytes(value.raw), value.err
}
// Store stores a template with data and error
func (t *Cache) Store(id string, tpl *Template, raw []byte, err error) {
_ = t.items.Set(id, parsedTemplate{template: tpl, raw: conversion.String(raw), err: err})
}
// Purge the cache
func (t *Cache) Purge() {
t.items.Clear()
}

View File

@ -1,40 +0,0 @@
package cache
import (
mapsutil "github.com/projectdiscovery/utils/maps"
)
// Templates is a cache for caching and storing templates for reuse.
type Templates struct {
items *mapsutil.SyncLockMap[string, parsedTemplateErrHolder]
}
// New returns a new templates cache
func New() *Templates {
return &Templates{items: mapsutil.NewSyncLockMap[string, parsedTemplateErrHolder]()}
}
type parsedTemplateErrHolder struct {
template interface{}
err error
}
// Has returns true if the cache has a template. The template
// is returned along with any errors if found.
func (t *Templates) Has(template string) (interface{}, error) {
value, ok := t.items.Get(template)
if !ok {
return nil, nil
}
return value.template, value.err
}
// Store stores a template with data and error
func (t *Templates) Store(template string, data interface{}, err error) {
_ = t.items.Set(template, parsedTemplateErrHolder{template: data, err: err})
}
// Purge the cache
func (t *Templates) Purge() {
t.items.Clear()
}

View File

@ -1,4 +1,4 @@
package cache
package templates
import (
"errors"
@ -8,15 +8,17 @@ import (
)
func TestCache(t *testing.T) {
templates := New()
templates := NewCache()
testErr := errors.New("test error")
data, err := templates.Has("test")
data, _, err := templates.Has("test")
require.Nil(t, err, "invalid value for err")
require.Nil(t, data, "invalid value for data")
templates.Store("test", "data", testErr)
data, err = templates.Has("test")
item := &Template{}
templates.Store("test", item, nil, testErr)
data, _, err = templates.Has("test")
require.Equal(t, testErr, err, "invalid value for err")
require.Equal(t, "data", data, "invalid value for data")
require.Equal(t, item, data, "invalid value for data")
}

View File

@ -1,6 +1,7 @@
package templates
import (
"bytes"
"encoding/json"
"fmt"
"io"
@ -18,11 +19,9 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/offlinehttp"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/cache"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/signer"
"github.com/projectdiscovery/nuclei/v3/pkg/tmplexec"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
stringsutil "github.com/projectdiscovery/utils/strings"
)
@ -30,7 +29,6 @@ import (
var (
ErrCreateTemplateExecutor = errors.New("cannot create template executer")
ErrIncompatibleWithOfflineMatching = errors.New("template can't be used for offline matching")
parsedTemplatesCache *cache.Templates
// track how many templates are verfied and by which signer
SignatureStats = map[string]*atomic.Uint64{}
)
@ -40,41 +38,42 @@ const (
)
func init() {
parsedTemplatesCache = cache.New()
for _, verifier := range signer.DefaultTemplateVerifiers {
SignatureStats[verifier.Identifier()] = &atomic.Uint64{}
}
SignatureStats[Unsigned] = &atomic.Uint64{}
config.DefaultConfig.RegisterGlobalCache(parsedTemplatesCache)
}
// Parse parses a yaml request template file
// TODO make sure reading from the disk the template parsing happens once: see parsers.ParseTemplate vs templates.Parse
//
//nolint:gocritic // this cannot be passed by pointer
func Parse(filePath string, preprocessor Preprocessor, options protocols.ExecutorOptions) (*Template, error) {
parser, ok := options.Parser.(*Parser)
if !ok {
panic("not a parser")
}
if !options.DoNotCache {
if value, err := parsedTemplatesCache.Has(filePath); value != nil {
return value.(*Template), err
if value, _, err := parser.compiledTemplatesCache.Has(filePath); value != nil {
return value, err
}
}
var reader io.ReadCloser
if utils.IsURL(filePath) {
// use retryablehttp (tls verification is enabled by default in the standard library)
resp, err := retryablehttp.DefaultClient().Get(filePath)
if err != nil {
return nil, err
if !options.DoNotCache {
_, raw, err := parser.parsedTemplatesCache.Has(filePath)
if err == nil && raw != nil {
reader = io.NopCloser(bytes.NewReader(raw))
}
reader = resp.Body
} else {
var err error
reader, err = options.Catalog.OpenFile(filePath)
}
var err error
if reader == nil {
reader, err = utils.ReaderFromPathOrURL(filePath, options.Catalog)
if err != nil {
return nil, err
}
}
defer reader.Close()
options.TemplatePath = filePath
template, err := ParseTemplateFromReader(reader, preprocessor, options.Copy())
if err != nil {
@ -90,7 +89,7 @@ func Parse(filePath string, preprocessor Preprocessor, options protocols.Executo
}
template.Path = filePath
if !options.DoNotCache {
parsedTemplatesCache.Store(filePath, template, err)
parser.compiledTemplatesCache.Store(filePath, template, nil, err)
}
return template, nil
}

View File

@ -13,12 +13,12 @@ import (
"github.com/julienschmidt/httprouter"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/model"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice"
"github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/projectdiscovery/nuclei/v3/pkg/operators/matchers"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/progress"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators"
@ -47,8 +47,9 @@ func setup() {
Browser: nil,
Catalog: disk.NewCatalog(config.DefaultConfig.TemplatesDirectory),
RateLimiter: ratelimit.New(context.Background(), uint(options.RateLimit), time.Second),
Parser: templates.NewParser(),
}
workflowLoader, err := parsers.NewLoader(&executerOpts)
workflowLoader, err := workflow.NewLoader(&executerOpts)
if err != nil {
log.Fatalf("Could not create workflow loader: %s\n", err)
}

View File

@ -3,23 +3,23 @@ package templates
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func Test_appendAtSignToAuthors(t *testing.T) {
result := appendAtSignToAuthors([]string{"user1", "user2", "user3"})
assert.Equal(t, result, "@user1,@user2,@user3")
require.Equal(t, result, "@user1,@user2,@user3")
}
func Test_appendAtSignToMissingAuthors(t *testing.T) {
result := appendAtSignToAuthors([]string{})
assert.Equal(t, result, "@none")
require.Equal(t, result, "@none")
result = appendAtSignToAuthors(nil)
assert.Equal(t, result, "@none")
require.Equal(t, result, "@none")
}
func Test_appendAtSignToOneAuthor(t *testing.T) {
result := appendAtSignToAuthors([]string{"user1"})
assert.Equal(t, result, "@user1")
require.Equal(t, result, "@user1")
}

145
pkg/templates/parser.go Normal file
View File

@ -0,0 +1,145 @@
package templates
import (
"encoding/json"
"fmt"
"io"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/stats"
yamlutil "github.com/projectdiscovery/nuclei/v3/pkg/utils/yaml"
fileutil "github.com/projectdiscovery/utils/file"
"gopkg.in/yaml.v2"
)
type Parser struct {
ShouldValidate bool
NoStrictSyntax bool
// this cache can be copied safely between ephemeral instances
parsedTemplatesCache *Cache
// this cache might potentially contain references to heap objects
// it's recommended to always empty it at the end of execution
compiledTemplatesCache *Cache
}
func NewParser() *Parser {
p := &Parser{
parsedTemplatesCache: NewCache(),
compiledTemplatesCache: NewCache(),
}
return p
}
// LoadTemplate returns true if the template is valid and matches the filtering criteria.
func (p *Parser) LoadTemplate(templatePath string, t any, extraTags []string, catalog catalog.Catalog) (bool, error) {
tagFilter, ok := t.(*TagFilter)
if !ok {
panic("not a *TagFilter")
}
t, templateParseError := p.ParseTemplate(templatePath, catalog)
if templateParseError != nil {
return false, ErrCouldNotLoadTemplate.Msgf(templatePath, templateParseError)
}
template, ok := t.(*Template)
if !ok {
panic("not a template")
}
if len(template.Workflows) > 0 {
return false, nil
}
validationError := validateTemplateMandatoryFields(template)
if validationError != nil {
stats.Increment(SyntaxErrorStats)
return false, ErrCouldNotLoadTemplate.Msgf(templatePath, validationError)
}
ret, err := isTemplateInfoMetadataMatch(tagFilter, template, extraTags)
if err != nil {
return ret, ErrCouldNotLoadTemplate.Msgf(templatePath, err)
}
// if template loaded then check the template for optional fields to add warnings
if ret {
validationWarning := validateTemplateOptionalFields(template)
if validationWarning != nil {
stats.Increment(SyntaxWarningStats)
return ret, ErrCouldNotLoadTemplate.Msgf(templatePath, validationWarning)
}
}
return ret, nil
}
// ParseTemplate parses a template and returns a *templates.Template structure
func (p *Parser) ParseTemplate(templatePath string, catalog catalog.Catalog) (any, error) {
value, _, err := p.parsedTemplatesCache.Has(templatePath)
if value != nil {
return value, err
}
reader, err := utils.ReaderFromPathOrURL(templatePath, catalog)
if err != nil {
return nil, err
}
defer reader.Close()
data, err := io.ReadAll(reader)
if err != nil {
return nil, err
}
// pre-process directives only for local files
if fileutil.FileExists(templatePath) && config.GetTemplateFormatFromExt(templatePath) == config.YAML {
data, err = yamlutil.PreProcess(data)
if err != nil {
return nil, err
}
}
template := &Template{}
switch config.GetTemplateFormatFromExt(templatePath) {
case config.JSON:
err = json.Unmarshal(data, template)
case config.YAML:
if p.NoStrictSyntax {
err = yaml.Unmarshal(data, template)
} else {
err = yaml.UnmarshalStrict(data, template)
}
default:
err = fmt.Errorf("failed to identify template format expected JSON or YAML but got %v", templatePath)
}
if err != nil {
return nil, err
}
p.parsedTemplatesCache.Store(templatePath, template, data, nil)
return template, nil
}
// LoadWorkflow returns true if the workflow is valid and matches the filtering criteria.
func (p *Parser) LoadWorkflow(templatePath string, catalog catalog.Catalog) (bool, error) {
t, templateParseError := p.ParseTemplate(templatePath, catalog)
if templateParseError != nil {
return false, templateParseError
}
template, ok := t.(*Template)
if !ok {
panic("not a template")
}
if len(template.Workflows) > 0 {
if validationError := validateTemplateMandatoryFields(template); validationError != nil {
stats.Increment(SyntaxErrorStats)
return false, validationError
}
return true, nil
}
return false, nil
}

View File

@ -0,0 +1,7 @@
package templates
import "regexp"
var (
ReTemplateID = regexp.MustCompile(`^([a-zA-Z0-9]+[-_])*[a-zA-Z0-9]+$`)
)

View File

@ -0,0 +1,13 @@
package templates
import (
errorutil "github.com/projectdiscovery/utils/errors"
)
var (
ErrMandatoryFieldMissingFmt = errorutil.NewWithFmt("mandatory '%s' field is missing")
ErrInvalidField = errorutil.NewWithFmt("invalid field format for '%s' (allowed format is %s)")
ErrWarningFieldMissing = errorutil.NewWithFmt("field '%s' is missing")
ErrCouldNotLoadTemplate = errorutil.NewWithFmt("Could not load template %s: %s")
ErrLoadedWithWarnings = errorutil.NewWithFmt("Loaded template %s: with syntax warning : %s")
)

View File

@ -0,0 +1,13 @@
package templates
const (
SyntaxWarningStats = "syntax-warnings"
SyntaxErrorStats = "syntax-errors"
RuntimeWarningsStats = "runtime-warnings"
UnsignedCodeWarning = "unsigned-warnings"
HeadlessFlagWarningStats = "headless-flag-missing-warnings"
TemplatesExecutedStats = "templates-executed"
CodeFlagWarningStats = "code-flag-missing-warnings"
FuzzFlagWarningStats = "fuzz-flag-missing-warnings"
SkippedUnsignedStats = "skipped-unsigned-stats" // tracks loading of unsigned templates
)

View File

@ -1,4 +1,4 @@
package parsers
package templates
import (
"errors"
@ -6,31 +6,28 @@ import (
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader/filter"
"github.com/projectdiscovery/nuclei/v3/pkg/model"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/stretchr/testify/require"
)
func TestLoadTemplate(t *testing.T) {
catalog := disk.NewCatalog("")
origTemplatesCache := parsedTemplatesCache
defer func() { parsedTemplatesCache = origTemplatesCache }()
p := NewParser()
tt := []struct {
name string
template *templates.Template
template *Template
templateErr error
filter filter.Config
filter TagFilterConfig
expectedErr error
isValid bool
}{
{
name: "valid",
template: &templates.Template{
template: &Template{
ID: "CVE-2021-27330",
Info: model.Info{
Name: "Valid template",
@ -42,24 +39,24 @@ func TestLoadTemplate(t *testing.T) {
},
{
name: "emptyTemplate",
template: &templates.Template{},
template: &Template{},
isValid: false,
expectedErr: errors.New("mandatory 'name' field is missing, mandatory 'author' field is missing, mandatory 'id' field is missing"),
expectedErr: errors.New("mandatory 'name' field is missing\nmandatory 'author' field is missing\nmandatory 'id' field is missing"),
},
{
name: "emptyNameWithInvalidID",
template: &templates.Template{
template: &Template{
ID: "invalid id",
Info: model.Info{
Authors: stringslice.StringSlice{Value: "Author"},
SeverityHolder: severity.Holder{Severity: severity.Medium},
},
},
expectedErr: errors.New("mandatory 'name' field is missing, invalid field format for 'id' (allowed format is ^([a-zA-Z0-9]+[-_])*[a-zA-Z0-9]+$)"),
expectedErr: errors.New("mandatory 'name' field is missing\ninvalid field format for 'id' (allowed format is ^([a-zA-Z0-9]+[-_])*[a-zA-Z0-9]+$)"),
},
{
name: "emptySeverity",
template: &templates.Template{
template: &Template{
ID: "CVE-2021-27330",
Info: model.Info{
Name: "Valid template",
@ -71,7 +68,7 @@ func TestLoadTemplate(t *testing.T) {
},
{
name: "template-without-severity-with-correct-filter-id",
template: &templates.Template{
template: &Template{
ID: "CVE-2021-27330",
Info: model.Info{
Name: "Valid template",
@ -81,11 +78,11 @@ func TestLoadTemplate(t *testing.T) {
// should be error because the template is loaded
expectedErr: errors.New("field 'severity' is missing"),
isValid: true,
filter: filter.Config{IncludeIds: []string{"CVE-2021-27330"}},
filter: TagFilterConfig{IncludeIds: []string{"CVE-2021-27330"}},
},
{
name: "template-without-severity-with-diff-filter-id",
template: &templates.Template{
template: &Template{
ID: "CVE-2021-27330",
Info: model.Info{
Name: "Valid template",
@ -93,7 +90,7 @@ func TestLoadTemplate(t *testing.T) {
},
},
isValid: false,
filter: filter.Config{IncludeIds: []string{"another-id"}},
filter: TagFilterConfig{IncludeIds: []string{"another-id"}},
// no error because the template is not loaded
expectedErr: nil,
},
@ -101,11 +98,11 @@ func TestLoadTemplate(t *testing.T) {
for _, tc := range tt {
t.Run(tc.name, func(t *testing.T) {
parsedTemplatesCache.Store(tc.name, tc.template, tc.templateErr)
p.parsedTemplatesCache.Store(tc.name, tc.template, nil, tc.templateErr)
tagFilter, err := filter.New(&tc.filter)
tagFilter, err := NewTagFilter(&tc.filter)
require.Nil(t, err)
success, err := LoadTemplate(tc.name, tagFilter, nil, catalog)
success, err := p.LoadTemplate(tc.name, tagFilter, nil, catalog)
if tc.expectedErr == nil {
require.NoError(t, err)
} else {
@ -135,7 +132,7 @@ func TestLoadTemplate(t *testing.T) {
for i, tc := range tt {
name := fmt.Sprintf("regexp%d", i)
t.Run(name, func(t *testing.T) {
template := &templates.Template{
template := &Template{
ID: tc.id,
Info: model.Info{
Name: "Valid template",
@ -143,11 +140,11 @@ func TestLoadTemplate(t *testing.T) {
SeverityHolder: severity.Holder{Severity: severity.Medium},
},
}
parsedTemplatesCache.Store(name, template, nil)
p.parsedTemplatesCache.Store(name, template, nil, nil)
tagFilter, err := filter.New(&filter.Config{})
tagFilter, err := NewTagFilter(&TagFilterConfig{})
require.Nil(t, err)
success, err := LoadTemplate(name, tagFilter, nil, catalog)
success, err := p.LoadTemplate(name, tagFilter, nil, catalog)
if tc.success {
require.NoError(t, err)
require.True(t, success)

View File

@ -0,0 +1,64 @@
package templates
import (
"errors"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
)
// validateTemplateMandatoryFields validates the mandatory fields of a template
// return error from this function will cause hard fail and not proceed further
func validateTemplateMandatoryFields(template *Template) error {
info := template.Info
var validateErrors []error
if utils.IsBlank(info.Name) {
validateErrors = append(validateErrors, ErrMandatoryFieldMissingFmt.Msgf("name"))
}
if info.Authors.IsEmpty() {
validateErrors = append(validateErrors, ErrMandatoryFieldMissingFmt.Msgf("author"))
}
if template.ID == "" {
validateErrors = append(validateErrors, ErrMandatoryFieldMissingFmt.Msgf("id"))
} else if !ReTemplateID.MatchString(template.ID) {
validateErrors = append(validateErrors, ErrInvalidField.Msgf("id", ReTemplateID.String()))
}
if len(validateErrors) > 0 {
return errors.Join(validateErrors...)
}
return nil
}
func isTemplateInfoMetadataMatch(tagFilter *TagFilter, template *Template, extraTags []string) (bool, error) {
match, err := tagFilter.Match(template, extraTags)
if err == ErrExcluded {
return false, ErrExcluded
}
return match, err
}
// validateTemplateOptionalFields validates the optional fields of a template
// return error from this function will throw a warning and proceed further
func validateTemplateOptionalFields(template *Template) error {
info := template.Info
var warnings []error
if template.Type() != types.WorkflowProtocol && utils.IsBlank(info.SeverityHolder.Severity.String()) {
warnings = append(warnings, ErrWarningFieldMissing.Msgf("severity"))
}
if len(warnings) > 0 {
return errors.Join(warnings...)
}
return nil
}

15
pkg/templates/stats.go Normal file
View File

@ -0,0 +1,15 @@
package templates
import "github.com/projectdiscovery/nuclei/v3/pkg/utils/stats"
func init() {
stats.NewEntry(SyntaxWarningStats, "Found %d templates with syntax warning (use -validate flag for further examination)")
stats.NewEntry(SyntaxErrorStats, "Found %d templates with syntax error (use -validate flag for further examination)")
stats.NewEntry(RuntimeWarningsStats, "Found %d templates with runtime error (use -validate flag for further examination)")
stats.NewEntry(UnsignedCodeWarning, "Found %d unsigned or tampered code template (carefully examine before using it & use -sign flag to sign them)")
stats.NewEntry(HeadlessFlagWarningStats, "Excluded %d headless template[s] (disabled as default), use -headless option to run headless templates.")
stats.NewEntry(CodeFlagWarningStats, "Excluded %d code template[s] (disabled as default), use -code option to run code templates.")
stats.NewEntry(TemplatesExecutedStats, "Excluded %d template[s] with known weak matchers / tags excluded from default run using .nuclei-ignore")
stats.NewEntry(FuzzFlagWarningStats, "Excluded %d fuzz template[s] (disabled as default), use -fuzz option to run fuzz templates.")
stats.NewEntry(SkippedUnsignedStats, "Skipping %d unsigned template[s]")
}

View File

@ -1,4 +1,4 @@
package filter
package templates
import (
"bufio"
@ -14,7 +14,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/operators/common/dsl"
"github.com/projectdiscovery/nuclei/v3/pkg/operators/extractors"
"github.com/projectdiscovery/nuclei/v3/pkg/operators/matchers"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
sliceutil "github.com/projectdiscovery/utils/slice"
)
@ -42,7 +41,7 @@ var ErrExcluded = errors.New("the template was excluded")
// unless it is explicitly specified by user using the includeTags (matchAllows field).
// Matching rule: (tag1 OR tag2...) AND (author1 OR author2...) AND (severity1 OR severity2...) AND (extraTags1 OR extraTags2...)
// Returns true if the template matches the filter criteria, false otherwise.
func (tagFilter *TagFilter) Match(template *templates.Template, extraTags []string) (bool, error) {
func (tagFilter *TagFilter) Match(template *Template, extraTags []string) (bool, error) {
templateTags := template.Info.Tags.ToSlice()
for _, templateTag := range templateTags {
_, blocked := tagFilter.block[templateTag]
@ -193,7 +192,7 @@ func isIdMatch(tagFilter *TagFilter, templateId string) bool {
return included && !excluded
}
func tryCollectConditionsMatchinfo(template *templates.Template) map[string]interface{} {
func tryCollectConditionsMatchinfo(template *Template) map[string]interface{} {
// attempts to unwrap fields to their basic types
// mapping must be manual because of various abstraction layers, custom marshaling and forceful validation
parameters := map[string]interface{}{
@ -319,7 +318,7 @@ func collectExtractorTypes(extractors []*extractors.Extractor) []string {
return extractorTypes
}
func isConditionMatch(tagFilter *TagFilter, template *templates.Template) bool {
func isConditionMatch(tagFilter *TagFilter, template *Template) bool {
if len(tagFilter.includeConditions) == 0 {
return true
}
@ -348,7 +347,7 @@ func isConditionMatch(tagFilter *TagFilter, template *templates.Template) bool {
return true
}
type Config struct {
type TagFilterConfig struct {
Tags []string
ExcludeTags []string
Authors []string
@ -365,7 +364,7 @@ type Config struct {
// New returns a tag filter for nuclei tag based execution
//
// It takes into account Tags, Severities, ExcludeSeverities, Authors, IncludeTags, ExcludeTags, Conditions.
func New(config *Config) (*TagFilter, error) {
func NewTagFilter(config *TagFilterConfig) (*TagFilter, error) {
filter := &TagFilter{
allowedTags: make(map[string]struct{}),
authors: make(map[string]struct{}),

View File

@ -1,4 +1,4 @@
package filter
package templates
import (
"testing"
@ -8,14 +8,13 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/dns"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/http"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
"github.com/stretchr/testify/require"
)
func TestTagBasedFilter(t *testing.T) {
newDummyTemplate := func(id string, tags, authors []string, severityValue severity.Severity, protocolType types.ProtocolType) *templates.Template {
dummyTemplate := &templates.Template{}
newDummyTemplate := func(id string, tags, authors []string, severityValue severity.Severity, protocolType types.ProtocolType) *Template {
dummyTemplate := &Template{}
if id != "" {
dummyTemplate.ID = id
}
@ -35,7 +34,7 @@ func TestTagBasedFilter(t *testing.T) {
return dummyTemplate
}
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
Tags: []string{"cves", "2021", "jira"},
})
require.Nil(t, err)
@ -62,7 +61,7 @@ func TestTagBasedFilter(t *testing.T) {
})
t.Run("not-match-excludes", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
ExcludeTags: []string{"dos"},
})
require.Nil(t, err)
@ -72,7 +71,7 @@ func TestTagBasedFilter(t *testing.T) {
require.Equal(t, ErrExcluded, err, "could not get correct error")
})
t.Run("match-includes", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
Tags: []string{"cves", "fuzz"},
ExcludeTags: []string{"dos", "fuzz"},
IncludeTags: []string{"fuzz"},
@ -84,7 +83,7 @@ func TestTagBasedFilter(t *testing.T) {
require.True(t, matched, "could not get correct match")
})
t.Run("match-includes", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
IncludeTags: []string{"fuzz"},
ExcludeTags: []string{"fuzz"},
})
@ -95,7 +94,7 @@ func TestTagBasedFilter(t *testing.T) {
require.True(t, matched, "could not get correct match")
})
t.Run("match-author", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
Authors: []string{"pdteam"},
})
require.Nil(t, err)
@ -104,7 +103,7 @@ func TestTagBasedFilter(t *testing.T) {
require.True(t, matched, "could not get correct match")
})
t.Run("match-severity", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
Severities: severity.Severities{severity.High},
})
require.Nil(t, err)
@ -113,7 +112,7 @@ func TestTagBasedFilter(t *testing.T) {
require.True(t, matched, "could not get correct match")
})
t.Run("match-id", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
IncludeIds: []string{"cve-test"},
})
require.Nil(t, err)
@ -122,7 +121,7 @@ func TestTagBasedFilter(t *testing.T) {
require.True(t, matched, "could not get correct match")
})
t.Run("match-exclude-severity", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
ExcludeSeverities: severity.Severities{severity.Low},
})
require.Nil(t, err)
@ -134,7 +133,7 @@ func TestTagBasedFilter(t *testing.T) {
require.False(t, matched, "could not get correct match")
})
t.Run("match-exclude-with-tags", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
Tags: []string{"tag"},
ExcludeTags: []string{"another"},
})
@ -144,7 +143,7 @@ func TestTagBasedFilter(t *testing.T) {
require.False(t, matched, "could not get correct match")
})
t.Run("match-conditions", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
Authors: []string{"pdteam"},
Tags: []string{"jira"},
Severities: severity.Severities{severity.High},
@ -165,7 +164,7 @@ func TestTagBasedFilter(t *testing.T) {
require.False(t, matched, "could not get correct match")
})
t.Run("match-type", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
Protocols: []types.ProtocolType{types.HTTPProtocol},
})
require.Nil(t, err)
@ -175,7 +174,7 @@ func TestTagBasedFilter(t *testing.T) {
require.True(t, matched, "could not get correct match")
})
t.Run("match-exclude-id", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
ExcludeIds: []string{"cve-test"},
})
require.Nil(t, err)
@ -187,7 +186,7 @@ func TestTagBasedFilter(t *testing.T) {
require.False(t, matched, "could not get correct match")
})
t.Run("match-exclude-type", func(t *testing.T) {
filter, err := New(&Config{
filter, err := NewTagFilter(&TagFilterConfig{
ExcludeProtocols: []types.ProtocolType{types.HTTPProtocol},
})
require.Nil(t, err)
@ -267,9 +266,9 @@ func TestTagBasedFilter(t *testing.T) {
})
}
func testAdvancedFiltering(t *testing.T, includeConditions []string, template *templates.Template, shouldError, shouldMatch bool) {
func testAdvancedFiltering(t *testing.T, includeConditions []string, template *Template, shouldError, shouldMatch bool) {
// basic properties
advancedFilter, err := New(&Config{IncludeConditions: includeConditions})
advancedFilter, err := NewTagFilter(&TagFilterConfig{IncludeConditions: includeConditions})
if shouldError {
require.NotNil(t, err)
return

View File

@ -10,11 +10,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/workflows"
)
const (
// Note: we redefine to avoid cyclic dependency but it should be same as parsers.SkippedUnsignedStats
SkippedUnsignedStats = "skipped-unsigned-stats" // tracks loading of unsigned templates
)
// compileWorkflow compiles the workflow for execution
func compileWorkflow(path string, preprocessor Preprocessor, options *protocols.ExecutorOptions, workflow *workflows.Workflow, loader model.WorkflowLoader) {
for _, workflow := range workflow.Workflows {

View File

@ -8,7 +8,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/progress"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
@ -35,8 +35,9 @@ func setup() {
Browser: nil,
Catalog: disk.NewCatalog(config.DefaultConfig.TemplatesDirectory),
RateLimiter: ratelimit.New(context.Background(), uint(options.RateLimit), time.Second),
Parser: templates.NewParser(),
}
workflowLoader, err := parsers.NewLoader(&executerOpts)
workflowLoader, err := workflow.NewLoader(&executerOpts)
if err != nil {
log.Fatalf("Could not create workflow loader: %s\n", err)
}

View File

@ -8,7 +8,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk"
"github.com/projectdiscovery/nuclei/v3/pkg/parsers"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/progress"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
@ -35,8 +35,9 @@ func setup() {
Browser: nil,
Catalog: disk.NewCatalog(config.DefaultConfig.TemplatesDirectory),
RateLimiter: ratelimit.New(context.Background(), uint(options.RateLimit), time.Second),
Parser: templates.NewParser(),
}
workflowLoader, err := parsers.NewLoader(&executerOpts)
workflowLoader, err := workflow.NewLoader(&executerOpts)
if err != nil {
log.Fatalf("Could not create workflow loader: %s\n", err)
}

View File

@ -7,10 +7,7 @@ import (
"strings"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/yaml"
"github.com/projectdiscovery/retryablehttp-go"
fileutil "github.com/projectdiscovery/utils/file"
)
func IsBlank(value string) bool {
@ -35,38 +32,20 @@ func IsURL(input string) bool {
}
// ReadFromPathOrURL reads and returns the contents of a file or url.
func ReadFromPathOrURL(templatePath string, catalog catalog.Catalog) (data []byte, err error) {
var reader io.Reader
func ReaderFromPathOrURL(templatePath string, catalog catalog.Catalog) (io.ReadCloser, error) {
if IsURL(templatePath) {
resp, err := retryablehttp.DefaultClient().Get(templatePath)
if err != nil {
return nil, err
}
defer resp.Body.Close()
reader = resp.Body
return resp.Body, nil
} else {
f, err := catalog.OpenFile(templatePath)
if err != nil {
return nil, err
}
defer f.Close()
reader = f
return f, nil
}
data, err = io.ReadAll(reader)
if err != nil {
return nil, err
}
// pre-process directives only for local files
if fileutil.FileExists(templatePath) && config.GetTemplateFormatFromExt(templatePath) == config.YAML {
data, err = yaml.PreProcess(data)
if err != nil {
return nil, err
}
}
return
}
// StringSliceContains checks if a string slice contains a string.