mirror of
https://github.com/projectdiscovery/nuclei.git
synced 2025-12-18 04:35:24 +00:00
Added custom json-unmarshaller + misc updates (#2556)
* Added custom json-unmarshaller + misc updates * Added support for nuclei-cloud based scan execution * Removed unnecessary files * Misc * Changes as per review comments * misc option update Co-authored-by: sandeep <sandeep@projectdiscovery.io>
This commit is contained in:
parent
42d9e7e75e
commit
04b47b0309
@ -269,6 +269,12 @@ on extensive configurability, massive extensibility and ease of use.`)
|
||||
flagSet.IntVarP(&options.MetricsPort, "metrics-port", "mp", 9092, "port to expose nuclei metrics on"),
|
||||
)
|
||||
|
||||
flagSet.CreateGroup("cloud", "Cloud",
|
||||
flagSet.BoolVar(&options.Cloud, "cloud", false, "run scan on nuclei cloud"),
|
||||
flagSet.StringVarEnv(&options.CloudURL, "cloud-server", "cs", "http://cloud-dev.nuclei.sh", "NUCLEI_CLOUD_SERVER", "url for the nuclei cloud server"),
|
||||
flagSet.StringVarEnv(&options.CloudAPIKey, "cloud-api-key", "ak", "", "NUCLEI_CLOUD_APIKEY", "api-key for the nuclei cloud server"),
|
||||
)
|
||||
|
||||
_ = flagSet.Parse()
|
||||
|
||||
if options.LeaveDefaultPorts {
|
||||
|
||||
74
v2/internal/runner/enumerate.go
Normal file
74
v2/internal/runner/enumerate.go
Normal file
@ -0,0 +1,74 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
_ "net/http/pprof"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/projectdiscovery/gologger"
|
||||
"github.com/projectdiscovery/nuclei/v2/internal/runner/nucleicloud"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/core"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
||||
"go.uber.org/atomic"
|
||||
)
|
||||
|
||||
// runStandardEnumeration runs standard enumeration
|
||||
func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecuterOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
|
||||
if r.options.AutomaticScan {
|
||||
return r.executeSmartWorkflowInput(executerOpts, store, engine)
|
||||
}
|
||||
return r.executeTemplatesInput(store, engine)
|
||||
}
|
||||
|
||||
// runCloudEnumeration runs cloud based enumeration
|
||||
func (r *Runner) runCloudEnumeration(store *loader.Store) (*atomic.Bool, error) {
|
||||
now := time.Now()
|
||||
defer func() {
|
||||
gologger.Info().Msgf("Scan execution took %s", time.Since(now))
|
||||
}()
|
||||
client := nucleicloud.New(r.options.CloudURL, r.options.CloudAPIKey)
|
||||
|
||||
results := &atomic.Bool{}
|
||||
|
||||
targets := make([]string, 0, r.hmapInputProvider.Count())
|
||||
r.hmapInputProvider.Scan(func(value string) {
|
||||
targets = append(targets, value)
|
||||
})
|
||||
templates := make([]string, 0, len(store.Templates()))
|
||||
for _, template := range store.Templates() {
|
||||
templates = append(templates, getTemplateRelativePath(template.Path))
|
||||
}
|
||||
taskID, err := client.AddScan(&nucleicloud.AddScanRequest{
|
||||
RawTargets: targets,
|
||||
PublicTemplates: templates,
|
||||
})
|
||||
if err != nil {
|
||||
return results, err
|
||||
}
|
||||
gologger.Info().Msgf("Created task with ID: %s", taskID)
|
||||
time.Sleep(3 * time.Second)
|
||||
|
||||
err = client.GetResults(taskID, func(re *output.ResultEvent) {
|
||||
results.CAS(false, true)
|
||||
|
||||
if outputErr := r.output.Write(re); outputErr != nil {
|
||||
gologger.Warning().Msgf("Could not write output: %s", err)
|
||||
}
|
||||
if r.issuesClient != nil {
|
||||
if err := r.issuesClient.CreateIssue(re); err != nil {
|
||||
gologger.Warning().Msgf("Could not create issue on tracker: %s", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func getTemplateRelativePath(templatePath string) string {
|
||||
splitted := strings.SplitN(templatePath, "nuclei-templates", 2)
|
||||
if len(splitted) < 2 {
|
||||
return ""
|
||||
}
|
||||
return strings.TrimPrefix(splitted[1], "/")
|
||||
}
|
||||
115
v2/internal/runner/nucleicloud/cloud.go
Normal file
115
v2/internal/runner/nucleicloud/cloud.go
Normal file
@ -0,0 +1,115 @@
|
||||
package nucleicloud
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||
"github.com/projectdiscovery/retryablehttp-go"
|
||||
)
|
||||
|
||||
// Client is a client for result retrieval from nuclei-cloud API
|
||||
type Client struct {
|
||||
baseURL string
|
||||
apiKey string
|
||||
httpclient *retryablehttp.Client
|
||||
}
|
||||
|
||||
const (
|
||||
pollInterval = 1 * time.Second
|
||||
defaultBaseURL = "http://webapp.localhost"
|
||||
)
|
||||
|
||||
// New returns a nuclei-cloud API client
|
||||
func New(baseURL, apiKey string) *Client {
|
||||
options := retryablehttp.DefaultOptionsSingle
|
||||
options.Timeout = 15 * time.Second
|
||||
client := retryablehttp.NewClient(options)
|
||||
|
||||
baseAppURL := baseURL
|
||||
if baseAppURL == "" {
|
||||
baseAppURL = defaultBaseURL
|
||||
}
|
||||
return &Client{httpclient: client, baseURL: baseAppURL, apiKey: apiKey}
|
||||
}
|
||||
|
||||
// AddScan adds a scan for templates and target to nuclei server
|
||||
func (c *Client) AddScan(req *AddScanRequest) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
if err := jsoniter.NewEncoder(&buf).Encode(req); err != nil {
|
||||
return "", errors.Wrap(err, "could not json encode scan request")
|
||||
}
|
||||
httpReq, err := retryablehttp.NewRequest(http.MethodPost, fmt.Sprintf("%s/scan", c.baseURL), bytes.NewReader(buf.Bytes()))
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, "could not make request")
|
||||
}
|
||||
httpReq.Header.Set("X-API-Key", c.apiKey)
|
||||
|
||||
resp, err := c.httpclient.Do(httpReq)
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, "could not do add scan request")
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
data, _ := ioutil.ReadAll(resp.Body)
|
||||
resp.Body.Close()
|
||||
return "", errors.Errorf("could not do request %d: %s", resp.StatusCode, string(data))
|
||||
}
|
||||
var data map[string]string
|
||||
if err := jsoniter.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
resp.Body.Close()
|
||||
return "", errors.Wrap(err, "could not decode resp")
|
||||
}
|
||||
resp.Body.Close()
|
||||
id := data["id"]
|
||||
return id, nil
|
||||
}
|
||||
|
||||
// GetResults gets results from nuclei server for an ID
|
||||
// until there are no more results left to retrieve.
|
||||
func (c *Client) GetResults(ID string, callback func(*output.ResultEvent)) error {
|
||||
lastID := int64(0)
|
||||
for {
|
||||
httpReq, err := retryablehttp.NewRequest(http.MethodGet, fmt.Sprintf("%s/results?id=%s&from=%d", c.baseURL, ID, lastID), nil)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not make request")
|
||||
}
|
||||
httpReq.Header.Set("X-API-Key", c.apiKey)
|
||||
|
||||
resp, err := c.httpclient.Do(httpReq)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not do ger result request")
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
data, _ := ioutil.ReadAll(resp.Body)
|
||||
resp.Body.Close()
|
||||
return errors.Errorf("could not do request %d: %s", resp.StatusCode, string(data))
|
||||
}
|
||||
var items GetResultsResponse
|
||||
if err := jsoniter.NewDecoder(resp.Body).Decode(&items); err != nil {
|
||||
resp.Body.Close()
|
||||
return errors.Wrap(err, "could not decode results")
|
||||
}
|
||||
resp.Body.Close()
|
||||
|
||||
for _, item := range items.Items {
|
||||
lastID = item.ID
|
||||
|
||||
var result output.ResultEvent
|
||||
if err := jsoniter.NewDecoder(strings.NewReader(item.Raw)).Decode(&result); err != nil {
|
||||
return errors.Wrap(err, "could not decode result item")
|
||||
}
|
||||
callback(&result)
|
||||
}
|
||||
if items.Finished {
|
||||
break
|
||||
}
|
||||
time.Sleep(pollInterval)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
22
v2/internal/runner/nucleicloud/types.go
Normal file
22
v2/internal/runner/nucleicloud/types.go
Normal file
@ -0,0 +1,22 @@
|
||||
package nucleicloud
|
||||
|
||||
// AddScanRequest is a nuclei scan input item.
|
||||
type AddScanRequest struct {
|
||||
// RawTargets is a list of raw target URLs for the scan.
|
||||
RawTargets []string `json:"raw_targets,omitempty"`
|
||||
// PublicTemplates is a list of public templates for the scan
|
||||
PublicTemplates []string `json:"public_templates,omitempty"`
|
||||
// PrivateTemplates is a map of template-name->contents that
|
||||
// are private to the user executing the scan. (TODO: TBD)
|
||||
PrivateTemplates map[string]string `json:"private_templates,omitempty"`
|
||||
}
|
||||
|
||||
type GetResultsResponse struct {
|
||||
Finished bool `json:"finished"`
|
||||
Items []GetResultsResponseItem `json:"items"`
|
||||
}
|
||||
|
||||
type GetResultsResponseItem struct {
|
||||
ID int64 `json:"id"`
|
||||
Raw string `json:"raw"`
|
||||
}
|
||||
@ -381,11 +381,11 @@ func (r *Runner) RunEnumeration() error {
|
||||
if templateConfig == nil {
|
||||
templateConfig = &config.Config{}
|
||||
}
|
||||
|
||||
store, err := loader.New(loader.NewConfig(r.options, templateConfig, r.catalog, executerOpts))
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not load templates from config")
|
||||
}
|
||||
|
||||
if r.options.Validate {
|
||||
if err := store.ValidateTemplates(); err != nil {
|
||||
return err
|
||||
@ -402,15 +402,12 @@ func (r *Runner) RunEnumeration() error {
|
||||
r.displayExecutionInfo(store)
|
||||
|
||||
var results *atomic.Bool
|
||||
if r.options.AutomaticScan {
|
||||
if results, err = r.executeSmartWorkflowInput(executerOpts, store, engine); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if r.options.Cloud {
|
||||
gologger.Info().Msgf("Running scan on cloud with URL %s", r.options.CloudURL)
|
||||
results, err = r.runCloudEnumeration(store)
|
||||
} else {
|
||||
if results, err = r.executeTemplatesInput(store, engine); err != nil {
|
||||
return err
|
||||
}
|
||||
results, err = r.runStandardEnumeration(executerOpts, store, engine)
|
||||
}
|
||||
|
||||
if r.interactsh != nil {
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
package filter
|
||||
|
||||
import "github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
||||
import (
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
||||
)
|
||||
|
||||
// PathFilter is a path based template filter
|
||||
type PathFilter struct {
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
package severity
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
@ -42,6 +43,23 @@ func (severities *Severities) UnmarshalYAML(unmarshal func(interface{}) error) e
|
||||
return nil
|
||||
}
|
||||
|
||||
func (severities *Severities) UnmarshalJSON(data []byte) error {
|
||||
var stringSliceValue stringslice.StringSlice
|
||||
if err := json.Unmarshal(data, &stringSliceValue); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
stringSLice := stringSliceValue.ToSlice()
|
||||
var result = make(Severities, 0, len(stringSLice))
|
||||
for _, severityString := range stringSLice {
|
||||
if err := setSeverity(&result, severityString); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
*severities = result
|
||||
return nil
|
||||
}
|
||||
|
||||
func (severities Severities) String() string {
|
||||
var stringSeverities = make([]string, 0, len(severities))
|
||||
for _, severity := range severities {
|
||||
|
||||
@ -97,6 +97,21 @@ func (severityHolder *Holder) UnmarshalYAML(unmarshal func(interface{}) error) e
|
||||
return nil
|
||||
}
|
||||
|
||||
func (severityHolder *Holder) UnmarshalJSON(data []byte) error {
|
||||
var marshalledSeverity string
|
||||
if err := json.Unmarshal(data, &marshalledSeverity); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
computedSeverity, err := toSeverity(marshalledSeverity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
severityHolder.Severity = computedSeverity
|
||||
return nil
|
||||
}
|
||||
|
||||
func (severityHolder *Holder) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(severityHolder.Severity.String())
|
||||
}
|
||||
|
||||
@ -51,7 +51,6 @@ func (stringSlice *StringSlice) UnmarshalYAML(unmarshal func(interface{}) error)
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(marshalledSlice))
|
||||
//nolint:gosimple,nolintlint //cannot be replaced with result = append(result, slices...) because the values are being normalized
|
||||
for _, value := range marshalledSlice {
|
||||
result = append(result, strings.ToLower(strings.TrimSpace(value))) // TODO do we need to introduce RawStringSlice and/or NormalizedStringSlices?
|
||||
}
|
||||
@ -67,6 +66,36 @@ func (stringSlice StringSlice) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(stringSlice.Value)
|
||||
}
|
||||
|
||||
func (stringSlice *StringSlice) UnmarshalJSON(data []byte) error {
|
||||
var marshalledValueAsString string
|
||||
var marshalledValuesAsSlice []string
|
||||
|
||||
sliceMarshalError := json.Unmarshal(data, &marshalledValuesAsSlice)
|
||||
if sliceMarshalError != nil {
|
||||
stringMarshalError := json.Unmarshal(data, &marshalledValueAsString)
|
||||
if stringMarshalError != nil {
|
||||
return stringMarshalError
|
||||
}
|
||||
}
|
||||
|
||||
var result []string
|
||||
switch {
|
||||
case len(marshalledValuesAsSlice) > 0:
|
||||
result = marshalledValuesAsSlice
|
||||
case utils.IsNotBlank(marshalledValueAsString):
|
||||
result = strings.Split(marshalledValueAsString, ",")
|
||||
default:
|
||||
result = []string{}
|
||||
}
|
||||
|
||||
values := make([]string, 0, len(result))
|
||||
for _, value := range result {
|
||||
values = append(values, strings.ToLower(strings.TrimSpace(value))) // TODO do we need to introduce RawStringSlice and/or NormalizedStringSlices?
|
||||
}
|
||||
stringSlice.Value = values
|
||||
return nil
|
||||
}
|
||||
|
||||
func marshalStringToSlice(unmarshal func(interface{}) error) ([]string, error) {
|
||||
var marshalledValueAsString string
|
||||
var marshalledValuesAsSlice []string
|
||||
|
||||
@ -91,6 +91,10 @@ type Options struct {
|
||||
MarkdownExportDirectory string
|
||||
// SarifExport is the file to export sarif output format to
|
||||
SarifExport string
|
||||
// CloudURL is the URL for the nuclei cloud endpoint
|
||||
CloudURL string
|
||||
// CloudAPIKey is the api-key for the nuclei cloud endpoint
|
||||
CloudAPIKey string
|
||||
// ResolversFile is a file containing resolvers for nuclei.
|
||||
ResolversFile string
|
||||
// StatsInterval is the number of seconds to display stats after
|
||||
@ -178,6 +182,8 @@ type Options struct {
|
||||
JSON bool
|
||||
// JSONRequests writes requests/responses for matches in JSON output
|
||||
JSONRequests bool
|
||||
// Cloud enables nuclei cloud scan execution
|
||||
Cloud bool
|
||||
// EnableProgressBar enables progress bar
|
||||
EnableProgressBar bool
|
||||
// TemplatesVersion shows the templates installed version
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user