2021-09-09 18:55:25 +05:30
|
|
|
package main
|
|
|
|
|
|
|
|
|
|
import (
|
2022-05-14 14:06:48 +02:00
|
|
|
"bytes"
|
2022-07-21 21:30:15 +05:30
|
|
|
"encoding/json"
|
2021-09-09 18:55:25 +05:30
|
|
|
"flag"
|
|
|
|
|
"fmt"
|
2023-04-12 12:49:49 +02:00
|
|
|
"io"
|
2021-09-09 18:55:25 +05:30
|
|
|
"log"
|
2023-04-12 12:49:49 +02:00
|
|
|
"net/http"
|
2022-08-22 18:11:08 +05:30
|
|
|
"net/url"
|
2021-09-09 18:55:25 +05:30
|
|
|
"os"
|
2023-05-09 23:29:25 +05:30
|
|
|
"path/filepath"
|
|
|
|
|
"reflect"
|
2021-09-09 18:55:25 +05:30
|
|
|
"regexp"
|
2023-05-09 23:29:25 +05:30
|
|
|
"sort"
|
2023-04-12 12:49:49 +02:00
|
|
|
"strconv"
|
2021-09-09 18:55:25 +05:30
|
|
|
"strings"
|
|
|
|
|
|
2022-07-21 21:30:15 +05:30
|
|
|
"github.com/pkg/errors"
|
2023-05-09 23:29:25 +05:30
|
|
|
"github.com/projectdiscovery/gologger"
|
|
|
|
|
"github.com/projectdiscovery/gologger/levels"
|
|
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
|
2022-08-10 23:35:58 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/disk"
|
2023-05-09 23:29:25 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
|
|
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/protocolinit"
|
|
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/protocolstate"
|
|
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
|
|
|
|
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
2022-03-14 12:37:43 +05:30
|
|
|
"github.com/projectdiscovery/nvd"
|
2023-03-02 14:54:01 +01:00
|
|
|
"github.com/projectdiscovery/retryablehttp-go"
|
2022-11-06 21:24:23 +01:00
|
|
|
sliceutil "github.com/projectdiscovery/utils/slice"
|
|
|
|
|
stringsutil "github.com/projectdiscovery/utils/strings"
|
2022-05-14 14:06:48 +02:00
|
|
|
"gopkg.in/yaml.v3"
|
2021-09-09 18:55:25 +05:30
|
|
|
)
|
|
|
|
|
|
2022-05-14 14:06:48 +02:00
|
|
|
const (
|
|
|
|
|
yamlIndentSpaces = 2
|
|
|
|
|
)
|
|
|
|
|
|
2022-07-21 21:30:15 +05:30
|
|
|
var cisaKnownExploitedVulnerabilities map[string]struct{}
|
|
|
|
|
|
2023-05-09 23:29:25 +05:30
|
|
|
// allTagsRegex is a list of all tags in nuclei templates except id, info, and -
|
|
|
|
|
var allTagsRegex []*regexp.Regexp
|
|
|
|
|
var defaultOpts = types.DefaultOptions()
|
|
|
|
|
|
2022-07-21 21:30:15 +05:30
|
|
|
func init() {
|
2023-05-09 23:29:25 +05:30
|
|
|
var tm templates.Template
|
|
|
|
|
t := reflect.TypeOf(tm)
|
|
|
|
|
for i := 0; i < t.NumField(); i++ {
|
|
|
|
|
tag := t.Field(i).Tag.Get("yaml")
|
|
|
|
|
if strings.Contains(tag, ",") {
|
|
|
|
|
tag = strings.Split(tag, ",")[0]
|
|
|
|
|
}
|
|
|
|
|
// ignore these tags
|
|
|
|
|
if tag == "id" || tag == "info" || tag == "" || tag == "-" {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
re := regexp.MustCompile(tag + `:\s*\n`)
|
|
|
|
|
allTagsRegex = append(allTagsRegex, re)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
defaultOpts := types.DefaultOptions()
|
|
|
|
|
// need to set headless to true for headless templates
|
|
|
|
|
defaultOpts.Headless = true
|
|
|
|
|
if err := protocolstate.Init(defaultOpts); err != nil {
|
|
|
|
|
gologger.Fatal().Msgf("Could not initialize protocol state: %s\n", err)
|
|
|
|
|
}
|
|
|
|
|
if err := protocolinit.Init(defaultOpts); err != nil {
|
|
|
|
|
gologger.Fatal().Msgf("Could not initialize protocol state: %s\n", err)
|
|
|
|
|
}
|
2022-07-21 21:30:15 +05:30
|
|
|
if err := fetchCISAKnownExploitedVulnerabilities(); err != nil {
|
|
|
|
|
panic(err)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-09 18:55:25 +05:30
|
|
|
var (
|
2023-05-09 23:29:25 +05:30
|
|
|
input = flag.String("i", "", "Templates to annotate")
|
|
|
|
|
verbose = flag.Bool("v", false, "show verbose output")
|
2021-09-09 18:55:25 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
|
|
func main() {
|
|
|
|
|
flag.Parse()
|
|
|
|
|
|
2023-05-09 23:29:25 +05:30
|
|
|
if *input == "" {
|
2021-09-09 18:55:25 +05:30
|
|
|
log.Fatalf("invalid input, see -h\n")
|
|
|
|
|
}
|
2023-05-09 23:29:25 +05:30
|
|
|
if strings.HasPrefix(*input, "~/") {
|
|
|
|
|
home, err := os.UserHomeDir()
|
|
|
|
|
if err != nil {
|
|
|
|
|
log.Fatalf("Failed to read UserHomeDir: %v, provide absolute template path/directory\n", err)
|
|
|
|
|
}
|
|
|
|
|
*input = filepath.Join(home, (*input)[2:])
|
|
|
|
|
}
|
|
|
|
|
gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
|
|
|
|
|
if *verbose {
|
|
|
|
|
gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose)
|
|
|
|
|
}
|
2021-09-09 18:55:25 +05:30
|
|
|
if err := process(); err != nil {
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Error().Msgf("could not process: %s\n", err)
|
2021-09-09 18:55:25 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func process() error {
|
2022-08-25 13:22:08 +02:00
|
|
|
tempDir, err := os.MkdirTemp("", "nuclei-nvd-%s")
|
2021-09-09 18:55:25 +05:30
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
defer os.RemoveAll(tempDir)
|
|
|
|
|
|
2023-04-12 12:49:49 +02:00
|
|
|
client := nvd.NewClientV2()
|
2023-05-31 16:58:10 -04:00
|
|
|
templateCatalog := disk.NewCatalog(filepath.Dir(*input))
|
|
|
|
|
paths, err := templateCatalog.GetTemplatePath(*input)
|
2021-09-09 18:55:25 +05:30
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
for _, path := range paths {
|
2022-02-23 13:54:46 +01:00
|
|
|
data, err := os.ReadFile(path)
|
2021-09-09 18:55:25 +05:30
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
2022-08-22 18:11:08 +05:30
|
|
|
dataString := string(data)
|
2023-05-09 23:29:25 +05:30
|
|
|
// try to fill max-requests
|
2023-05-31 16:58:10 -04:00
|
|
|
dataString, err = parseAndAddMaxRequests(templateCatalog, path, dataString)
|
2023-05-09 23:29:25 +05:30
|
|
|
if err != nil {
|
|
|
|
|
gologger.Error().Msgf("Could not compile max request %s: %s\n", path, err)
|
|
|
|
|
}
|
|
|
|
|
// try to resolve references to tags
|
2022-08-22 18:11:08 +05:30
|
|
|
dataString, err = parseAndAddReferenceBasedTags(path, dataString)
|
|
|
|
|
if err != nil {
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Error().Msgf("Could not parse reference tags %s: %s\n", path, err)
|
2022-08-22 18:11:08 +05:30
|
|
|
continue
|
|
|
|
|
}
|
2023-05-09 23:29:25 +05:30
|
|
|
// try and fill CVE data
|
2022-08-22 18:11:08 +05:30
|
|
|
getCVEData(client, path, dataString)
|
2021-09-09 18:55:25 +05:30
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
idRegex = regexp.MustCompile("id: ([C|c][V|v][E|e]-[0-9]+-[0-9]+)")
|
|
|
|
|
severityRegex = regexp.MustCompile(`severity: ([a-z]+)`)
|
|
|
|
|
)
|
|
|
|
|
|
2022-05-14 14:06:48 +02:00
|
|
|
const maxReferenceCount = 5
|
|
|
|
|
|
2022-06-14 01:34:00 -04:00
|
|
|
// dead sites to skip for references
|
2022-07-08 10:58:22 -04:00
|
|
|
var badRefs = []string{
|
|
|
|
|
"osvdb.org/",
|
|
|
|
|
"securityfocus.com/",
|
|
|
|
|
"archives.neohapsis.com/",
|
|
|
|
|
"iss.net/",
|
|
|
|
|
"ntelbras.com/",
|
|
|
|
|
"andmp.com/",
|
|
|
|
|
"blacklanternsecurity.com/",
|
|
|
|
|
"pwnwiki.org/",
|
|
|
|
|
"0dayhack.net/",
|
|
|
|
|
"correkt.horse/",
|
|
|
|
|
"poc.wgpsec.org/",
|
|
|
|
|
"ctf-writeup.revers3c.com/",
|
2022-08-30 02:59:30 -04:00
|
|
|
"secunia.com/",
|
2022-06-14 01:34:00 -04:00
|
|
|
}
|
|
|
|
|
|
2023-04-12 12:49:49 +02:00
|
|
|
func getCVEData(client *nvd.ClientV2, filePath, data string) {
|
2021-09-09 18:55:25 +05:30
|
|
|
matches := idRegex.FindAllStringSubmatch(data, 1)
|
|
|
|
|
if len(matches) == 0 {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
cveName := matches[0][1]
|
|
|
|
|
|
2022-07-21 21:30:15 +05:30
|
|
|
// Perform CISA Known-exploited-vulnerabilities tag annotation
|
|
|
|
|
// if we discover it has been exploited.
|
|
|
|
|
var err error
|
|
|
|
|
if cisaKnownExploitedVulnerabilities != nil {
|
|
|
|
|
_, ok := cisaKnownExploitedVulnerabilities[strings.ToLower(cveName)]
|
|
|
|
|
if ok {
|
|
|
|
|
data, err = parseAndAddCISAKevTagTemplate(filePath, data)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if err != nil {
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Error().Msgf("Could not parse cisa data %s: %s\n", cveName, err)
|
2022-07-21 21:30:15 +05:30
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-09 18:55:25 +05:30
|
|
|
severityMatches := severityRegex.FindAllStringSubmatch(data, 1)
|
2021-12-20 12:17:04 +01:00
|
|
|
if len(severityMatches) == 0 {
|
2021-09-09 18:55:25 +05:30
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
severityValue := severityMatches[0][1]
|
|
|
|
|
|
|
|
|
|
cveItem, err := client.FetchCVE(cveName)
|
|
|
|
|
if err != nil {
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Error().Msgf("Could not fetch cve %s: %s\n", cveName, err)
|
2021-09-09 18:55:25 +05:30
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
var cweID []string
|
2023-04-12 12:49:49 +02:00
|
|
|
for _, weaknessData := range cveItem.Cve.Weaknesses {
|
|
|
|
|
for _, description := range weaknessData.Description {
|
2021-09-09 18:55:25 +05:30
|
|
|
cweID = append(cweID, description.Value)
|
|
|
|
|
}
|
|
|
|
|
}
|
2023-04-12 12:49:49 +02:00
|
|
|
cvssData, err := getPrimaryCVSSData(cveItem)
|
|
|
|
|
if err != nil {
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Error().Msgf("Could not get CVSS data %s: %s\n", cveName, err)
|
2023-04-12 12:49:49 +02:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
cvssScore := cvssData.BaseScore
|
|
|
|
|
cvssMetrics := cvssData.VectorString
|
2021-09-09 18:55:25 +05:30
|
|
|
|
|
|
|
|
// Perform some hacky string replacement to place the metadata in templates
|
|
|
|
|
infoBlockIndexData := data[strings.Index(data, "info:"):]
|
|
|
|
|
requestsIndex := strings.Index(infoBlockIndexData, "requests:")
|
|
|
|
|
networkIndex := strings.Index(infoBlockIndexData, "network:")
|
2022-05-14 14:06:48 +02:00
|
|
|
variablesIndex := strings.Index(infoBlockIndexData, "variables:")
|
|
|
|
|
if requestsIndex == -1 && networkIndex == -1 && variablesIndex == -1 {
|
2021-09-09 18:55:25 +05:30
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if networkIndex != -1 {
|
|
|
|
|
requestsIndex = networkIndex
|
|
|
|
|
}
|
2022-05-14 14:06:48 +02:00
|
|
|
if variablesIndex != -1 {
|
|
|
|
|
requestsIndex = variablesIndex
|
|
|
|
|
}
|
2021-09-09 18:55:25 +05:30
|
|
|
infoBlockData := infoBlockIndexData[:requestsIndex]
|
|
|
|
|
infoBlockClean := strings.TrimRight(infoBlockData, "\n")
|
|
|
|
|
|
2022-05-14 14:06:48 +02:00
|
|
|
infoBlock := InfoBlock{}
|
|
|
|
|
err = yaml.Unmarshal([]byte(data), &infoBlock)
|
|
|
|
|
if err != nil {
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Warning().Msgf("Could not unmarshal info block: %s\n", err)
|
2022-05-14 14:06:48 +02:00
|
|
|
}
|
2021-09-09 18:55:25 +05:30
|
|
|
|
2022-05-14 14:06:48 +02:00
|
|
|
var changed bool
|
2021-09-09 18:55:25 +05:30
|
|
|
if newSeverity := isSeverityMatchingCvssScore(severityValue, cvssScore); newSeverity != "" {
|
|
|
|
|
changed = true
|
2022-05-14 14:06:48 +02:00
|
|
|
infoBlock.Info.Severity = newSeverity
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Info().Msgf("Adjusting severity for %s from %s=>%s (%.2f)\n", filePath, severityValue, newSeverity, cvssScore)
|
2021-09-09 18:55:25 +05:30
|
|
|
}
|
2022-05-14 14:06:48 +02:00
|
|
|
isCvssEmpty := cvssScore == 0 || cvssMetrics == ""
|
|
|
|
|
hasCvssChanged := infoBlock.Info.Classification.CvssScore != cvssScore || cvssMetrics != infoBlock.Info.Classification.CvssMetrics
|
|
|
|
|
if !isCvssEmpty && hasCvssChanged {
|
2021-09-09 18:55:25 +05:30
|
|
|
changed = true
|
2022-05-14 14:06:48 +02:00
|
|
|
infoBlock.Info.Classification.CvssMetrics = cvssMetrics
|
|
|
|
|
infoBlock.Info.Classification.CvssScore = cvssScore
|
|
|
|
|
infoBlock.Info.Classification.CveId = cveName
|
2021-09-09 18:55:25 +05:30
|
|
|
if len(cweID) > 0 && (cweID[0] != "NVD-CWE-Other" && cweID[0] != "NVD-CWE-noinfo") {
|
2022-05-14 14:06:48 +02:00
|
|
|
infoBlock.Info.Classification.CweId = strings.Join(cweID, ",")
|
2021-09-09 18:55:25 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// If there is no description field, fill the description from CVE information
|
2023-04-12 12:49:49 +02:00
|
|
|
enDescription, err := getEnglishLangString(cveItem.Cve.Descriptions)
|
|
|
|
|
hasDescriptionData := err != nil
|
2022-05-14 14:06:48 +02:00
|
|
|
isDescriptionEmpty := infoBlock.Info.Description == ""
|
|
|
|
|
if isDescriptionEmpty && hasDescriptionData {
|
2021-09-09 18:55:25 +05:30
|
|
|
changed = true
|
2022-05-14 14:06:48 +02:00
|
|
|
// removes all new lines
|
2023-04-12 12:49:49 +02:00
|
|
|
description := stringsutil.ReplaceAll(enDescription, "", "\n", "\\", "'", "\t")
|
2022-05-14 14:06:48 +02:00
|
|
|
description += "\n"
|
|
|
|
|
infoBlock.Info.Description = description
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// we are unmarshaling info block to have valid data
|
|
|
|
|
var referenceDataURLs []string
|
2022-06-14 01:34:00 -04:00
|
|
|
|
|
|
|
|
// skip sites that are no longer alive
|
2023-04-12 12:49:49 +02:00
|
|
|
for _, reference := range cveItem.Cve.References {
|
2022-07-08 10:58:22 -04:00
|
|
|
if stringsutil.ContainsAny(reference.URL, badRefs...) {
|
|
|
|
|
continue
|
2022-06-14 01:34:00 -04:00
|
|
|
}
|
2022-07-08 10:58:22 -04:00
|
|
|
referenceDataURLs = append(referenceDataURLs, reference.URL)
|
2021-09-09 18:55:25 +05:30
|
|
|
}
|
2023-04-12 12:49:49 +02:00
|
|
|
hasReferenceData := len(cveItem.Cve.References) > 0
|
2022-05-14 14:06:48 +02:00
|
|
|
areCveReferencesContained := sliceutil.ContainsItems(infoBlock.Info.Reference, referenceDataURLs)
|
|
|
|
|
referencesCount := len(infoBlock.Info.Reference)
|
|
|
|
|
if hasReferenceData && !areCveReferencesContained {
|
2021-09-09 18:55:25 +05:30
|
|
|
changed = true
|
2022-06-14 01:34:00 -04:00
|
|
|
for _, ref := range referenceDataURLs {
|
2022-05-14 14:06:48 +02:00
|
|
|
referencesCount++
|
|
|
|
|
if referencesCount >= maxReferenceCount {
|
|
|
|
|
break
|
|
|
|
|
}
|
2022-06-14 01:34:00 -04:00
|
|
|
infoBlock.Info.Reference = append(infoBlock.Info.Reference, ref)
|
2021-09-09 18:55:25 +05:30
|
|
|
}
|
2022-05-14 14:06:48 +02:00
|
|
|
infoBlock.Info.Reference = sliceutil.PruneEmptyStrings(sliceutil.Dedupe(infoBlock.Info.Reference))
|
2021-09-09 18:55:25 +05:30
|
|
|
}
|
2022-05-14 14:06:48 +02:00
|
|
|
|
2023-04-12 12:49:49 +02:00
|
|
|
cpeSet := map[string]bool{}
|
|
|
|
|
for _, config := range cveItem.Cve.Configurations {
|
|
|
|
|
// Right now this covers only simple configurations. More complex configurations can have multiple CPEs
|
|
|
|
|
if len(config.Nodes) == 1 {
|
|
|
|
|
changed = true
|
|
|
|
|
node := config.Nodes[0]
|
|
|
|
|
for _, match := range node.CpeMatch {
|
2023-05-31 16:58:10 -04:00
|
|
|
cpeSet[extractVersionlessCpe(match.Criteria)] = true
|
2023-04-12 12:49:49 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
uniqueCpes := make([]string, 0, len(cpeSet))
|
|
|
|
|
for k := range cpeSet {
|
|
|
|
|
uniqueCpes = append(uniqueCpes, k)
|
|
|
|
|
}
|
|
|
|
|
if len(uniqueCpes) == 1 {
|
|
|
|
|
infoBlock.Info.Classification.Cpe = uniqueCpes[0]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
epss, err := fetchEpss(cveName)
|
|
|
|
|
if err != nil {
|
|
|
|
|
log.Printf("Could not fetch Epss score: %s\n", err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
hasEpssChanged := epss != infoBlock.Info.Classification.EpssScore
|
|
|
|
|
if hasEpssChanged {
|
|
|
|
|
changed = true
|
|
|
|
|
infoBlock.Info.Classification.EpssScore = epss
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-14 14:06:48 +02:00
|
|
|
var newInfoBlock bytes.Buffer
|
|
|
|
|
yamlEncoder := yaml.NewEncoder(&newInfoBlock)
|
|
|
|
|
yamlEncoder.SetIndent(yamlIndentSpaces)
|
|
|
|
|
err = yamlEncoder.Encode(infoBlock)
|
|
|
|
|
if err != nil {
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Warning().Msgf("Could not marshal info block: %s\n", err)
|
2022-05-14 14:06:48 +02:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
newInfoBlockData := strings.TrimSuffix(newInfoBlock.String(), "\n")
|
|
|
|
|
|
|
|
|
|
newTemplate := strings.ReplaceAll(data, infoBlockClean, newInfoBlockData)
|
2021-09-09 18:55:25 +05:30
|
|
|
if changed {
|
2022-08-25 13:22:08 +02:00
|
|
|
_ = os.WriteFile(filePath, []byte(newTemplate), 0644)
|
2023-05-09 23:29:25 +05:30
|
|
|
gologger.Info().Msgf("Wrote updated template to %s\n", filePath)
|
2021-09-09 18:55:25 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-04-12 12:49:49 +02:00
|
|
|
func getPrimaryCVSSData(vuln nvd.Vulnerability) (nvd.CvssData, error) {
|
|
|
|
|
for _, data := range vuln.Cve.Metrics.CvssMetricV31 {
|
|
|
|
|
if data.Type == "Primary" {
|
|
|
|
|
return data.CvssData, nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for _, data := range vuln.Cve.Metrics.CvssMetricV3 {
|
|
|
|
|
if data.Type == "Primary" {
|
|
|
|
|
return data.CvssData, nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nvd.CvssData{}, fmt.Errorf("no primary cvss metric found")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func getEnglishLangString(data []nvd.LangString) (string, error) {
|
|
|
|
|
for _, item := range data {
|
|
|
|
|
if item.Lang == "en" {
|
|
|
|
|
return item.Value, nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return "", fmt.Errorf("no english item found")
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-09 18:55:25 +05:30
|
|
|
func isSeverityMatchingCvssScore(severity string, score float64) string {
|
|
|
|
|
if score == 0.0 {
|
|
|
|
|
return ""
|
|
|
|
|
}
|
|
|
|
|
var expected string
|
|
|
|
|
|
|
|
|
|
if score >= 0.1 && score <= 3.9 {
|
|
|
|
|
expected = "low"
|
|
|
|
|
} else if score >= 4.0 && score <= 6.9 {
|
|
|
|
|
expected = "medium"
|
|
|
|
|
} else if score >= 7.0 && score <= 8.9 {
|
|
|
|
|
expected = "high"
|
|
|
|
|
} else if score >= 9.0 && score <= 10.0 {
|
|
|
|
|
expected = "critical"
|
|
|
|
|
}
|
|
|
|
|
if expected != "" && expected != severity {
|
|
|
|
|
return expected
|
|
|
|
|
}
|
|
|
|
|
return ""
|
|
|
|
|
}
|
2022-05-14 14:06:48 +02:00
|
|
|
|
2023-04-12 12:49:49 +02:00
|
|
|
func extractVersionlessCpe(cpe string) string {
|
|
|
|
|
parts := strings.Split(cpe, ":")
|
|
|
|
|
versionlessPart := parts[0:5]
|
|
|
|
|
rest := strings.Split(strings.Repeat("*", len(parts)-len(versionlessPart)), "")
|
|
|
|
|
return strings.Join(append(versionlessPart, rest...), ":")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type ApiFirstEpssResponse struct {
|
|
|
|
|
Status string `json:"status"`
|
|
|
|
|
StatusCode int `json:"status-code"`
|
|
|
|
|
Version string `json:"version"`
|
|
|
|
|
Access string `json:"access"`
|
|
|
|
|
Total int `json:"total"`
|
|
|
|
|
Offset int `json:"offset"`
|
|
|
|
|
Limit int `json:"limit"`
|
|
|
|
|
Data []struct {
|
|
|
|
|
Cve string `json:"cve"`
|
|
|
|
|
Epss string `json:"epss"`
|
|
|
|
|
Percentile string `json:"percentile"`
|
|
|
|
|
Date string `json:"date"`
|
|
|
|
|
} `json:"data"`
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func fetchEpss(cveId string) (float64, error) {
|
|
|
|
|
resp, err := http.Get(fmt.Sprintf("https://api.first.org/data/v1/epss?cve=%s", cveId))
|
|
|
|
|
if err != nil {
|
|
|
|
|
return 0, fmt.Errorf("unable to fetch EPSS data from first.org: %v", err)
|
|
|
|
|
}
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return 0, fmt.Errorf("unable to read reponse body: %v", err)
|
|
|
|
|
}
|
|
|
|
|
var parsedResp ApiFirstEpssResponse
|
|
|
|
|
err = json.Unmarshal(body, &parsedResp)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return 0, fmt.Errorf("error while parsing EPSS response: %v", err)
|
|
|
|
|
}
|
|
|
|
|
if len(parsedResp.Data) != 1 {
|
|
|
|
|
return 0, fmt.Errorf("unexpected number of results in EPSS response. Expecting exactly 1, got %v", len(parsedResp.Data))
|
|
|
|
|
}
|
|
|
|
|
epss := parsedResp.Data[0].Epss
|
|
|
|
|
return strconv.ParseFloat(epss, 64)
|
|
|
|
|
}
|
|
|
|
|
|
2022-07-21 21:30:15 +05:30
|
|
|
type cisaKEVData struct {
|
|
|
|
|
Vulnerabilities []struct {
|
|
|
|
|
CVEID string `json:"cveID"`
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// fetchCISAKnownExploitedVulnerabilities fetches CISA known exploited
|
|
|
|
|
// vulnerabilities catalog for template tag enrichment
|
|
|
|
|
func fetchCISAKnownExploitedVulnerabilities() error {
|
|
|
|
|
data := &cisaKEVData{}
|
|
|
|
|
|
2023-03-02 14:54:01 +01:00
|
|
|
resp, err := retryablehttp.DefaultClient().Get("https://www.cisa.gov/sites/default/files/feeds/known_exploited_vulnerabilities.json")
|
2022-07-21 21:30:15 +05:30
|
|
|
if err != nil {
|
|
|
|
|
return errors.Wrap(err, "could not get cisa kev catalog")
|
|
|
|
|
}
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
|
|
if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
|
|
|
|
|
return errors.Wrap(err, "could not decode cisa kev catalog json data")
|
|
|
|
|
}
|
|
|
|
|
cisaKnownExploitedVulnerabilities = make(map[string]struct{})
|
|
|
|
|
for _, vuln := range data.Vulnerabilities {
|
|
|
|
|
cisaKnownExploitedVulnerabilities[strings.ToLower(vuln.CVEID)] = struct{}{}
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// parseAndAddCISAKevTagTemplate parses and adds `kev` tag to CISA KEV templates.
|
|
|
|
|
// also removes cisa tag if it exists
|
|
|
|
|
func parseAndAddCISAKevTagTemplate(path string, data string) (string, error) {
|
|
|
|
|
block := &InfoBlock{}
|
|
|
|
|
|
|
|
|
|
if err := yaml.NewDecoder(strings.NewReader(data)).Decode(block); err != nil {
|
|
|
|
|
return "", errors.Wrap(err, "could not decode template yaml")
|
|
|
|
|
}
|
|
|
|
|
splitted := strings.Split(block.Info.Tags, ",")
|
2022-08-22 18:11:08 +05:30
|
|
|
if len(splitted) == 0 {
|
|
|
|
|
return data, nil
|
|
|
|
|
}
|
2022-07-21 21:30:15 +05:30
|
|
|
|
|
|
|
|
var cisaIndex = -1
|
|
|
|
|
for i, tag := range splitted {
|
|
|
|
|
// If we already have tag, return
|
|
|
|
|
if tag == "kev" {
|
2022-08-22 18:11:08 +05:30
|
|
|
return data, nil
|
2022-07-21 21:30:15 +05:30
|
|
|
}
|
|
|
|
|
if tag == "cisa" {
|
|
|
|
|
cisaIndex = i
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Remove CISA index tag element
|
|
|
|
|
if cisaIndex >= 0 {
|
|
|
|
|
splitted = append(splitted[:cisaIndex], splitted[cisaIndex+1:]...)
|
|
|
|
|
}
|
|
|
|
|
splitted = append(splitted, "kev")
|
2022-08-22 18:11:08 +05:30
|
|
|
replaced := strings.ReplaceAll(data, block.Info.Tags, strings.Join(splitted, ","))
|
2022-08-25 13:22:08 +02:00
|
|
|
return replaced, os.WriteFile(path, []byte(replaced), os.ModePerm)
|
2022-08-22 18:11:08 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// parseAndAddReferenceBasedTags parses and adds reference based tags to templates
|
|
|
|
|
func parseAndAddReferenceBasedTags(path string, data string) (string, error) {
|
|
|
|
|
block := &InfoBlock{}
|
|
|
|
|
if err := yaml.NewDecoder(strings.NewReader(data)).Decode(block); err != nil {
|
|
|
|
|
return "", errors.Wrap(err, "could not decode template yaml")
|
|
|
|
|
}
|
|
|
|
|
splitted := strings.Split(block.Info.Tags, ",")
|
|
|
|
|
if len(splitted) == 0 {
|
|
|
|
|
return data, nil
|
|
|
|
|
}
|
|
|
|
|
tagsCurrent := fmt.Sprintf("tags: %s", block.Info.Tags)
|
|
|
|
|
newTags := suggestTagsBasedOnReference(block.Info.Reference, splitted)
|
|
|
|
|
|
|
|
|
|
if len(newTags) == len(splitted) {
|
|
|
|
|
return data, nil
|
|
|
|
|
}
|
|
|
|
|
replaced := strings.ReplaceAll(data, tagsCurrent, fmt.Sprintf("tags: %s", strings.Join(newTags, ",")))
|
2022-08-25 13:22:08 +02:00
|
|
|
return replaced, os.WriteFile(path, []byte(replaced), os.ModePerm)
|
2022-07-21 21:30:15 +05:30
|
|
|
}
|
|
|
|
|
|
2022-08-22 18:11:08 +05:30
|
|
|
var referenceMapping = map[string]string{
|
|
|
|
|
"huntr.dev": "huntr",
|
|
|
|
|
"hackerone.com": "hackerone",
|
|
|
|
|
"tenable.com": "tenable",
|
|
|
|
|
"packetstormsecurity.org": "packetstorm",
|
|
|
|
|
"seclists.org": "seclists",
|
|
|
|
|
"wpscan.com": "wpscan",
|
|
|
|
|
"packetstormsecurity.com": "packetstorm",
|
|
|
|
|
"exploit-db.com": "edb",
|
|
|
|
|
"https://github.com/rapid7/metasploit-framework/": "msf",
|
|
|
|
|
"https://github.com/vulhub/vulhub/": "vulhub",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func suggestTagsBasedOnReference(references, currentTags []string) []string {
|
|
|
|
|
uniqueTags := make(map[string]struct{})
|
|
|
|
|
for _, value := range currentTags {
|
|
|
|
|
uniqueTags[value] = struct{}{}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, reference := range references {
|
|
|
|
|
parsed, err := url.Parse(reference)
|
|
|
|
|
if err != nil {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
hostname := parsed.Hostname()
|
|
|
|
|
|
|
|
|
|
for value, tag := range referenceMapping {
|
|
|
|
|
if strings.HasSuffix(hostname, value) || strings.HasPrefix(reference, value) {
|
|
|
|
|
uniqueTags[tag] = struct{}{}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
newTags := make([]string, 0, len(uniqueTags))
|
|
|
|
|
for tag := range uniqueTags {
|
|
|
|
|
newTags = append(newTags, tag)
|
|
|
|
|
}
|
|
|
|
|
return newTags
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-31 16:58:10 -04:00
|
|
|
// InfoBlock Cloning struct from nuclei as we don't want any validation
|
2022-05-14 14:06:48 +02:00
|
|
|
type InfoBlock struct {
|
|
|
|
|
Info TemplateInfo `yaml:"info"`
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type TemplateClassification struct {
|
|
|
|
|
CvssMetrics string `yaml:"cvss-metrics,omitempty"`
|
|
|
|
|
CvssScore float64 `yaml:"cvss-score,omitempty"`
|
|
|
|
|
CveId string `yaml:"cve-id,omitempty"`
|
|
|
|
|
CweId string `yaml:"cwe-id,omitempty"`
|
2023-04-12 12:49:49 +02:00
|
|
|
Cpe string `yaml:"cpe,omitempty"`
|
|
|
|
|
EpssScore float64 `yaml:"epss-score,omitempty"`
|
2022-05-14 14:06:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type TemplateInfo struct {
|
|
|
|
|
Name string `yaml:"name"`
|
|
|
|
|
Author string `yaml:"author"`
|
2023-05-09 23:29:25 +05:30
|
|
|
Severity string `yaml:"severity,omitempty"`
|
2022-05-14 14:06:48 +02:00
|
|
|
Description string `yaml:"description,omitempty"`
|
|
|
|
|
Reference []string `yaml:"reference,omitempty"`
|
|
|
|
|
Remediation string `yaml:"remediation,omitempty"`
|
|
|
|
|
Classification TemplateClassification `yaml:"classification,omitempty"`
|
2023-05-09 23:29:25 +05:30
|
|
|
Metadata map[string]interface{} `yaml:"metadata,omitempty"`
|
2022-05-14 14:06:48 +02:00
|
|
|
Tags string `yaml:"tags,omitempty"`
|
|
|
|
|
}
|
2023-05-09 23:29:25 +05:30
|
|
|
|
|
|
|
|
// parseAndAddMaxRequests parses and adds max requests to templates
|
|
|
|
|
func parseAndAddMaxRequests(catalog catalog.Catalog, path, data string) (string, error) {
|
|
|
|
|
template, err := parseTemplate(catalog, path)
|
|
|
|
|
if err != nil {
|
|
|
|
|
gologger.Warning().Label("max-request").Msgf("Could not parse template: %s\n", err)
|
|
|
|
|
return data, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if template.TotalRequests < 1 {
|
|
|
|
|
return data, nil
|
|
|
|
|
}
|
|
|
|
|
// Marshal the updated info block back to YAML.
|
|
|
|
|
infoBlockStart, infoBlockEnd := getInfoStartEnd(data)
|
|
|
|
|
infoBlockOrig := data[infoBlockStart:infoBlockEnd]
|
|
|
|
|
infoBlockOrig = strings.TrimRight(infoBlockOrig, "\n")
|
|
|
|
|
|
|
|
|
|
infoBlock := InfoBlock{}
|
|
|
|
|
err = yaml.Unmarshal([]byte(data), &infoBlock)
|
|
|
|
|
if err != nil {
|
|
|
|
|
gologger.Warning().Label("max-request").Msgf("Could not unmarshal info block: %s\n", err)
|
|
|
|
|
return data, err
|
|
|
|
|
}
|
|
|
|
|
// if metadata is nil, create a new map
|
|
|
|
|
if infoBlock.Info.Metadata == nil {
|
|
|
|
|
infoBlock.Info.Metadata = make(map[string]interface{})
|
|
|
|
|
}
|
|
|
|
|
// do not update if it is already present and equal
|
|
|
|
|
if mr, ok := infoBlock.Info.Metadata["max-request"]; ok && mr.(int) == template.TotalRequests {
|
|
|
|
|
return data, nil
|
|
|
|
|
}
|
|
|
|
|
infoBlock.Info.Metadata["max-request"] = template.TotalRequests
|
|
|
|
|
|
|
|
|
|
var newInfoBlock bytes.Buffer
|
|
|
|
|
yamlEncoder := yaml.NewEncoder(&newInfoBlock)
|
|
|
|
|
yamlEncoder.SetIndent(yamlIndentSpaces)
|
|
|
|
|
err = yamlEncoder.Encode(infoBlock)
|
|
|
|
|
if err != nil {
|
|
|
|
|
gologger.Warning().Msgf("Could not marshal info block: %s\n", err)
|
|
|
|
|
return data, err
|
|
|
|
|
}
|
|
|
|
|
newInfoBlockData := strings.TrimSuffix(newInfoBlock.String(), "\n")
|
|
|
|
|
|
|
|
|
|
// replace old info block with new info block
|
|
|
|
|
newTemplate := strings.ReplaceAll(data, infoBlockOrig, newInfoBlockData)
|
|
|
|
|
|
|
|
|
|
err = os.WriteFile(path, []byte(newTemplate), 0644)
|
|
|
|
|
if err == nil {
|
|
|
|
|
gologger.Info().Label("max-request").Msgf("Wrote updated template to %s\n", path)
|
|
|
|
|
}
|
|
|
|
|
return newTemplate, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// parseTemplate parses a template and returns the template object
|
|
|
|
|
func parseTemplate(catalog catalog.Catalog, templatePath string) (*templates.Template, error) {
|
2023-05-31 16:58:10 -04:00
|
|
|
executorOpts := protocols.ExecutorOptions{
|
2023-05-09 23:29:25 +05:30
|
|
|
Catalog: catalog,
|
|
|
|
|
Options: defaultOpts,
|
|
|
|
|
}
|
2023-05-31 16:58:10 -04:00
|
|
|
reader, err := executorOpts.Catalog.OpenFile(templatePath)
|
2023-05-09 23:29:25 +05:30
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
2023-05-31 16:58:10 -04:00
|
|
|
template, err := templates.ParseTemplateFromReader(reader, nil, executorOpts)
|
2023-05-09 23:29:25 +05:30
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
return template, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// find the start and end of the info block
|
|
|
|
|
func getInfoStartEnd(data string) (int, int) {
|
|
|
|
|
info := strings.Index(data, "info:")
|
|
|
|
|
var indices []int
|
|
|
|
|
for _, re := range allTagsRegex {
|
2023-05-31 16:58:10 -04:00
|
|
|
// find the first occurrence of the label
|
2023-05-09 23:29:25 +05:30
|
|
|
match := re.FindStringIndex(data)
|
|
|
|
|
if match != nil {
|
|
|
|
|
indices = append(indices, match[0])
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// find the first one after info block
|
|
|
|
|
sort.Ints(indices)
|
|
|
|
|
return info, indices[0] - 1
|
|
|
|
|
}
|