Refactoring file templates to handle large files in chunks + removing deprecated io methods

This commit is contained in:
mzack 2022-02-23 13:54:46 +01:00
parent 1d11a8e40c
commit 6746071979
27 changed files with 161 additions and 112 deletions

View File

@ -49,7 +49,7 @@ func process() error {
return err return err
} }
for _, path := range paths { for _, path := range paths {
data, err := ioutil.ReadFile(path) data, err := os.ReadFile(path)
if err != nil { if err != nil {
return err return err
} }

View File

@ -69,6 +69,7 @@ require github.com/projectdiscovery/folderutil v0.0.0-20211206150108-b4e7ea80f36
require ( require (
github.com/Ice3man543/nvd v1.0.8 github.com/Ice3man543/nvd v1.0.8
github.com/docker/go-units v0.4.0
github.com/openrdap/rdap v0.9.1-0.20191017185644-af93e7ef17b7 github.com/openrdap/rdap v0.9.1-0.20191017185644-af93e7ef17b7
github.com/projectdiscovery/iputil v0.0.0-20210804143329-3a30fcde43f3 github.com/projectdiscovery/iputil v0.0.0-20210804143329-3a30fcde43f3
github.com/stretchr/testify v1.7.0 github.com/stretchr/testify v1.7.0

View File

@ -134,6 +134,8 @@ github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUn
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=
github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=
github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q= github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo= github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=

View File

@ -3,7 +3,6 @@ package runner
import ( import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
@ -158,7 +157,7 @@ func New(options *types.Options) (*Runner, error) {
resumeCfg := types.NewResumeCfg() resumeCfg := types.NewResumeCfg()
if runner.options.ShouldLoadResume() { if runner.options.ShouldLoadResume() {
gologger.Info().Msg("Resuming from save checkpoint") gologger.Info().Msg("Resuming from save checkpoint")
file, err := ioutil.ReadFile(types.DefaultResumeFilePath()) file, err := os.ReadFile(types.DefaultResumeFilePath())
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -251,7 +251,7 @@ func (r *Runner) downloadReleaseAndUnzip(ctx context.Context, version, downloadU
return nil, fmt.Errorf("failed to download a release file from %s: Not successful status %d", downloadURL, res.StatusCode) return nil, fmt.Errorf("failed to download a release file from %s: Not successful status %d", downloadURL, res.StatusCode)
} }
buf, err := ioutil.ReadAll(res.Body) buf, err := io.ReadAll(res.Body)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create buffer for zip file: %w", err) return nil, fmt.Errorf("failed to create buffer for zip file: %w", err)
} }

View File

@ -3,7 +3,7 @@ package dsl
import ( import (
"compress/gzip" "compress/gzip"
"fmt" "fmt"
"io/ioutil" "io"
"math" "math"
"regexp" "regexp"
"strings" "strings"
@ -46,7 +46,7 @@ func TestDSLGzipSerialize(t *testing.T) {
require.Nil(t, err, "could not evaluate compare time") require.Nil(t, err, "could not evaluate compare time")
reader, _ := gzip.NewReader(strings.NewReader(types.ToString(result))) reader, _ := gzip.NewReader(strings.NewReader(types.ToString(result)))
data, _ := ioutil.ReadAll(reader) data, _ := io.ReadAll(reader)
require.Equal(t, "hello world", string(data), "could not get gzip encoded data") require.Equal(t, "hello world", string(data), "could not get gzip encoded data")
} }

View File

@ -8,10 +8,10 @@ import (
func TestMakeDynamicValuesCallback(t *testing.T) { func TestMakeDynamicValuesCallback(t *testing.T) {
input := map[string][]string{ input := map[string][]string{
"a": []string{"1", "2"}, "a": {"1", "2"},
"b": []string{"3"}, "b": {"3"},
"c": []string{}, "c": {},
"d": []string{"A", "B", "C"}, "d": {"A", "B", "C"},
} }
count := 0 count := 0
@ -24,9 +24,9 @@ func TestMakeDynamicValuesCallback(t *testing.T) {
t.Run("all", func(t *testing.T) { t.Run("all", func(t *testing.T) {
input := map[string][]string{ input := map[string][]string{
"a": []string{"1"}, "a": {"1"},
"b": []string{"2"}, "b": {"2"},
"c": []string{"3"}, "c": {"3"},
} }
count := 0 count := 0
@ -40,10 +40,10 @@ func TestMakeDynamicValuesCallback(t *testing.T) {
t.Run("first", func(t *testing.T) { t.Run("first", func(t *testing.T) {
input := map[string][]string{ input := map[string][]string{
"a": []string{"1", "2"}, "a": {"1", "2"},
"b": []string{"3"}, "b": {"3"},
"c": []string{}, "c": {},
"d": []string{"A", "B", "C"}, "d": {"A", "B", "C"},
} }
count := 0 count := 0

View File

@ -7,10 +7,11 @@ import (
// CreateEvent wraps the outputEvent with the result of the operators defined on the request // CreateEvent wraps the outputEvent with the result of the operators defined on the request
func CreateEvent(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool) *output.InternalWrappedEvent { func CreateEvent(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool) *output.InternalWrappedEvent {
return CreateEventWithAdditionalOptions(request, outputEvent, isResponseDebug, func(internalWrappedEvent *output.InternalWrappedEvent) {}) return CreateEventWithAdditionalOptions(request, outputEvent, isResponseDebug, nil)
} }
// CreateEventWithAdditionalOptions wraps the outputEvent with the result of the operators defined on the request and enables extending the resulting event with additional attributes or values. // CreateEventWithAdditionalOptions wraps the outputEvent with the result of the operators defined on the request
// and enables extending the resulting event with additional attributes or values.
func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool, func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool,
addAdditionalOptions func(internalWrappedEvent *output.InternalWrappedEvent)) *output.InternalWrappedEvent { addAdditionalOptions func(internalWrappedEvent *output.InternalWrappedEvent)) *output.InternalWrappedEvent {
event := &output.InternalWrappedEvent{InternalEvent: outputEvent} event := &output.InternalWrappedEvent{InternalEvent: outputEvent}
@ -19,7 +20,9 @@ func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent out
result, ok := compiledOperator.Execute(outputEvent, request.Match, request.Extract, isResponseDebug) result, ok := compiledOperator.Execute(outputEvent, request.Match, request.Extract, isResponseDebug)
if ok && result != nil { if ok && result != nil {
event.OperatorsResult = result event.OperatorsResult = result
addAdditionalOptions(event) if addAdditionalOptions != nil {
addAdditionalOptions(event)
}
event.Results = append(event.Results, request.MakeResultEvent(event)...) event.Results = append(event.Results, request.MakeResultEvent(event)...)
} }
} }

View File

@ -4,12 +4,18 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/docker/go-units"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v2/pkg/operators" "github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols" "github.com/projectdiscovery/nuclei/v2/pkg/protocols"
) )
var (
defaultMaxReadSize, _ = units.FromHumanSize("1Gb")
chunkSize, _ = units.FromHumanSize("100Mb")
)
// Request contains a File matching mechanism for local disk operations. // Request contains a File matching mechanism for local disk operations.
type Request struct { type Request struct {
// Operators for the current request go here. // Operators for the current request go here.
@ -34,11 +40,14 @@ type Request struct {
// description: | // description: |
// MaxSize is the maximum size of the file to run request on. // MaxSize is the maximum size of the file to run request on.
// //
// By default, nuclei will process 5 MB files and not go more than that. // By default, nuclei will process 1 GB of content and not go more than that.
// It can be set to much lower or higher depending on use. // It can be set to much lower or higher depending on use.
// If set to "no" then all content will be processed
// examples: // examples:
// - value: 2048 // - value: 5Mb
MaxSize int `yaml:"max-size,omitempty" jsonschema:"title=max size data to run request on,description=Maximum size of the file to run request on"` MaxSize string `yaml:"max-size,omitempty" jsonschema:"title=max size data to run request on,description=Maximum size of the file to run request on"`
maxSize int64
CompiledOperators *operators.Operators `yaml:"-"` CompiledOperators *operators.Operators `yaml:"-"`
// cache any variables that may be needed for operation. // cache any variables that may be needed for operation.
@ -83,10 +92,21 @@ func (request *Request) Compile(options *protocols.ExecuterOptions) error {
} }
request.CompiledOperators = compiled request.CompiledOperators = compiled
} }
// By default, use 1GB (1024 MB) as max size to read.
if request.MaxSize == 0 { // By default, use default max size if not defined
request.MaxSize = 1024 * 1024 * 1024 switch {
case request.MaxSize != "":
maxSize, err := units.FromHumanSize(request.MaxSize)
if err != nil {
return errors.Wrap(err, "could not compile operators")
}
request.maxSize = maxSize
case request.MaxSize == "no":
request.maxSize = -1
default:
request.maxSize = defaultMaxReadSize
} }
request.options = options request.options = options
request.extensions = make(map[string]struct{}) request.extensions = make(map[string]struct{})

View File

@ -17,7 +17,7 @@ func TestFileCompile(t *testing.T) {
templateID := "testing-file" templateID := "testing-file"
request := &Request{ request := &Request{
ID: templateID, ID: templateID,
MaxSize: 1024, MaxSize: "1Gb",
NoRecursive: false, NoRecursive: false,
Extensions: []string{"all", ".lock"}, Extensions: []string{"all", ".lock"},
DenyList: []string{".go"}, DenyList: []string{".go"},

View File

@ -20,7 +20,7 @@ func TestFindInputPaths(t *testing.T) {
templateID := "testing-file" templateID := "testing-file"
request := &Request{ request := &Request{
ID: templateID, ID: templateID,
MaxSize: 1024, MaxSize: "1Gb",
NoRecursive: false, NoRecursive: false,
Extensions: []string{"all", ".lock"}, Extensions: []string{"all", ".lock"},
DenyList: []string{".go"}, DenyList: []string{".go"},

View File

@ -67,12 +67,24 @@ func (request *Request) getMatchPart(part string, data output.InternalEvent) (st
return itemStr, true return itemStr, true
} }
// responseToDSLMap converts a file response to a map for use in DSL matching type fileStatus struct {
func (request *Request) responseToDSLMap(raw, inputFilePath, matchedFileName string) output.InternalEvent { raw string
inputFilePath string
matchedFileName string
lines int
words int
bytes int
}
// toDSLMap converts a file chunk elaboration to a map for use in DSL matching
func (request *Request) toDSLMap(state *fileStatus) output.InternalEvent {
return output.InternalEvent{ return output.InternalEvent{
"path": inputFilePath, "path": state.inputFilePath,
"matched": matchedFileName, "matched": state.matchedFileName,
"raw": raw, "raw": state.raw,
"lines": state.lines,
"words": state.words,
"bytes": state.bytes,
"type": request.Type().String(), "type": request.Type().String(),
"template-id": request.options.TemplateID, "template-id": request.options.TemplateID,
"template-info": request.options.TemplateInfo, "template-info": request.options.TemplateInfo,
@ -89,6 +101,8 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []
return results return results
} }
linesOffset := wrapped.InternalEvent["lines"].(int)
rawStr, ok := raw.(string) rawStr, ok := raw.(string)
if !ok { if !ok {
return results return results
@ -109,7 +123,7 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []
lineWords[v] = struct{}{} lineWords[v] = struct{}{}
} }
} }
result.LineCount = calculateLineFunc(rawStr, lineWords) result.LineCount = calculateLineFunc(rawStr, linesOffset, lineWords)
} }
// Identify the position of match in file using a dirty hack. // Identify the position of match in file using a dirty hack.
@ -123,7 +137,7 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []
if result.FileToIndexPosition == nil { if result.FileToIndexPosition == nil {
result.FileToIndexPosition = make(map[string]int) result.FileToIndexPosition = make(map[string]int)
} }
result.FileToIndexPosition[result.Matched] = line result.FileToIndexPosition[result.Matched] = line + linesOffset
continue continue
} }
line++ line++

View File

@ -21,7 +21,7 @@ func TestResponseToDSLMap(t *testing.T) {
templateID := "testing-file" templateID := "testing-file"
request := &Request{ request := &Request{
ID: templateID, ID: templateID,
MaxSize: 1024, MaxSize: "1Gb",
NoRecursive: false, NoRecursive: false,
Extensions: []string{"*", ".lock"}, Extensions: []string{"*", ".lock"},
DenyList: []string{".go"}, DenyList: []string{".go"},
@ -34,8 +34,8 @@ func TestResponseToDSLMap(t *testing.T) {
require.Nil(t, err, "could not compile file request") require.Nil(t, err, "could not compile file request")
resp := "test-data\r\n" resp := "test-data\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"})
require.Len(t, event, 7, "could not get correct number of items in dsl map") require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp") require.Equal(t, resp, event["raw"], "could not get correct resp")
} }
@ -46,7 +46,7 @@ func TestFileOperatorMatch(t *testing.T) {
templateID := "testing-file" templateID := "testing-file"
request := &Request{ request := &Request{
ID: templateID, ID: templateID,
MaxSize: 1024, MaxSize: "1Gb",
NoRecursive: false, NoRecursive: false,
Extensions: []string{"*", ".lock"}, Extensions: []string{"*", ".lock"},
DenyList: []string{".go"}, DenyList: []string{".go"},
@ -59,8 +59,8 @@ func TestFileOperatorMatch(t *testing.T) {
require.Nil(t, err, "could not compile file request") require.Nil(t, err, "could not compile file request")
resp := "test-data\r\n1.1.1.1\r\n" resp := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"})
require.Len(t, event, 7, "could not get correct number of items in dsl map") require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp") require.Equal(t, resp, event["raw"], "could not get correct resp")
t.Run("valid", func(t *testing.T) { t.Run("valid", func(t *testing.T) {
@ -108,8 +108,8 @@ func TestFileOperatorMatch(t *testing.T) {
t.Run("caseInsensitive", func(t *testing.T) { t.Run("caseInsensitive", func(t *testing.T) {
resp := "TEST-DATA\r\n1.1.1.1\r\n" resp := "TEST-DATA\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"})
require.Len(t, event, 7, "could not get correct number of items in dsl map") require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp") require.Equal(t, resp, event["raw"], "could not get correct resp")
matcher := &matchers.Matcher{ matcher := &matchers.Matcher{
@ -134,7 +134,7 @@ func TestFileOperatorExtract(t *testing.T) {
templateID := "testing-file" templateID := "testing-file"
request := &Request{ request := &Request{
ID: templateID, ID: templateID,
MaxSize: 1024, MaxSize: "1Gb",
NoRecursive: false, NoRecursive: false,
Extensions: []string{"*", ".lock"}, Extensions: []string{"*", ".lock"},
DenyList: []string{".go"}, DenyList: []string{".go"},
@ -147,8 +147,8 @@ func TestFileOperatorExtract(t *testing.T) {
require.Nil(t, err, "could not compile file request") require.Nil(t, err, "could not compile file request")
resp := "test-data\r\n1.1.1.1\r\n" resp := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"})
require.Len(t, event, 7, "could not get correct number of items in dsl map") require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp") require.Equal(t, resp, event["raw"], "could not get correct resp")
t.Run("extract", func(t *testing.T) { t.Run("extract", func(t *testing.T) {
@ -241,7 +241,7 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi
templateID := "testing-file" templateID := "testing-file"
request := &Request{ request := &Request{
ID: templateID, ID: templateID,
MaxSize: 1024, MaxSize: "1Gb",
NoRecursive: false, NoRecursive: false,
Extensions: []string{"*", ".lock"}, Extensions: []string{"*", ".lock"},
DenyList: []string{".go"}, DenyList: []string{".go"},
@ -265,8 +265,8 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi
matchedFileName := "test.txt" matchedFileName := "test.txt"
fileContent := "test-data\r\n1.1.1.1\r\n" fileContent := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(fileContent, "/tmp", matchedFileName) event := request.toDSLMap(&fileStatus{raw: fileContent, inputFilePath: "/tmp", matchedFileName: matchedFileName})
require.Len(t, event, 7, "could not get correct number of items in dsl map") require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, fileContent, event["raw"], "could not get correct resp") require.Equal(t, fileContent, event["raw"], "could not get correct resp")
finalEvent := &output.InternalWrappedEvent{InternalEvent: event} finalEvent := &output.InternalWrappedEvent{InternalEvent: event}

View File

@ -1,12 +1,14 @@
package file package file
import ( import (
"bufio"
"encoding/hex" "encoding/hex"
"io/ioutil" "io"
"os" "os"
"sort" "sort"
"strings" "strings"
"github.com/docker/go-units"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/remeh/sizedwaitgroup" "github.com/remeh/sizedwaitgroup"
@ -15,7 +17,6 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/protocols" "github.com/projectdiscovery/nuclei/v2/pkg/protocols"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/eventcreator" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/eventcreator"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/responsehighlighter" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/responsehighlighter"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/tostring"
templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types" templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
) )
@ -27,7 +28,7 @@ func (request *Request) Type() templateTypes.ProtocolType {
} }
// ExecuteWithResults executes the protocol requests and returns results instead of writing them. // ExecuteWithResults executes the protocol requests and returns results instead of writing them.
func (request *Request) ExecuteWithResults(input string, metadata /*TODO review unused parameter*/, previous output.InternalEvent, callback protocols.OutputEventCallback) error { func (request *Request) ExecuteWithResults(input string, metadata, previous output.InternalEvent, callback protocols.OutputEventCallback) error {
wg := sizedwaitgroup.New(request.options.Options.BulkSize) wg := sizedwaitgroup.New(request.options.Options.BulkSize)
err := request.getInputPaths(input, func(data string) { err := request.getInputPaths(input, func(data string) {
@ -49,30 +50,47 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
gologger.Error().Msgf("Could not stat file path %s: %s\n", filePath, err) gologger.Error().Msgf("Could not stat file path %s: %s\n", filePath, err)
return return
} }
if stat.Size() >= int64(request.MaxSize) { if stat.Size() >= request.maxSize {
gologger.Verbose().Msgf("Could not process path %s: exceeded max size\n", filePath) gologger.Verbose().Msgf("Limiting %s processed data to %s bytes: exceeded max size\n", filePath, units.HumanSize(float64(request.maxSize)))
return
} }
totalBytes := units.BytesSize(float64(stat.Size()))
fileReader := io.LimitReader(file, request.maxSize)
var bytesCount, linesCount, wordsCount int
scanner := bufio.NewScanner(fileReader)
buffer := []byte{}
scanner.Buffer(buffer, int(chunkSize))
for scanner.Scan() {
fileContent := scanner.Text()
n := len(fileContent)
buffer, err := ioutil.ReadAll(file) // update counters
if err != nil { currentBytes := bytesCount + n
gologger.Error().Msgf("Could not read file path %s: %s\n", filePath, err) processedBytes := units.BytesSize(float64(currentBytes))
return
gologger.Verbose().Msgf("[%s] Processing file %s chunk %s/%s", request.options.TemplateID, filePath, processedBytes, totalBytes)
outputEvent := request.toDSLMap(&fileStatus{
raw: fileContent,
inputFilePath: input,
matchedFileName: filePath,
lines: linesCount,
words: wordsCount,
bytes: bytesCount,
})
for k, v := range previous {
outputEvent[k] = v
}
event := eventcreator.CreateEvent(request, outputEvent, request.options.Options.Debug || request.options.Options.DebugResponse)
dumpResponse(event, request.options, fileContent, filePath)
callback(event)
currentLinesCount := 1 + strings.Count(fileContent, "\n")
linesCount += currentLinesCount
wordsCount += strings.Count(fileContent, " ")
bytesCount = currentBytes
request.options.Progress.IncrementRequests()
} }
fileContent := tostring.UnsafeToString(buffer)
gologger.Verbose().Msgf("[%s] Sent FILE request to %s", request.options.TemplateID, filePath)
outputEvent := request.responseToDSLMap(fileContent, input, filePath)
for k, v := range previous {
outputEvent[k] = v
}
event := eventcreator.CreateEvent(request, outputEvent, request.options.Options.Debug || request.options.Options.DebugResponse)
dumpResponse(event, request.options, fileContent, filePath)
callback(event)
request.options.Progress.IncrementRequests()
}(data) }(data)
}) })
wg.Wait() wg.Wait()
@ -112,22 +130,15 @@ func getAllStringSubmatchIndex(content string, word string) []int {
return indexes return indexes
} }
func calculateLineFunc(contents string, words map[string]struct{}) []int { func calculateLineFunc(contents string, linesOffset int, words map[string]struct{}) []int {
var lines []int var lines []int
for word := range words { for word := range words {
matches := getAllStringSubmatchIndex(contents, word) matches := getAllStringSubmatchIndex(contents, word)
for _, index := range matches { for _, index := range matches {
lineCount := int(0) lineCount := 1 + strings.Count(contents[:index], "\n")
for _, c := range contents[:index] { lines = append(lines, linesOffset+lineCount)
if c == '\n' {
lineCount++
}
}
if lineCount > 0 {
lines = append(lines, lineCount+1)
}
} }
} }
sort.Ints(lines) sort.Ints(lines)

View File

@ -24,7 +24,7 @@ func TestFileExecuteWithResults(t *testing.T) {
templateID := "testing-file" templateID := "testing-file"
request := &Request{ request := &Request{
ID: templateID, ID: templateID,
MaxSize: 1024, MaxSize: "1Gb",
NoRecursive: false, NoRecursive: false,
Extensions: []string{"all"}, Extensions: []string{"all"},
DenyList: []string{".go"}, DenyList: []string{".go"},
@ -88,6 +88,6 @@ eeee
RequestDataTooBig RequestDataTooBig
dd dd
RequestDataTooBig3 RequestDataTooBig3
SuspiciousOperation`, map[string]struct{}{"SuspiciousOperation": {}, "RequestDataTooBig": {}}) SuspiciousOperation`, 0, map[string]struct{}{"SuspiciousOperation": {}, "RequestDataTooBig": {}})
require.ElementsMatch(t, []int{4, 7, 9, 10}, lines, "could not calculate correct lines") require.ElementsMatch(t, []int{4, 7, 9, 10}, lines, "could not calculate correct lines")
} }

View File

@ -2,7 +2,7 @@ package engine
import ( import (
"fmt" "fmt"
"io/ioutil" "io"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"net/url" "net/url"
@ -414,7 +414,7 @@ func TestActionSetBody(t *testing.T) {
} }
handler := func(w http.ResponseWriter, r *http.Request) { handler := func(w http.ResponseWriter, r *http.Request) {
body, _ := ioutil.ReadAll(r.Body) body, _ := io.ReadAll(r.Body)
_, _ = fmt.Fprintln(w, string(body)) _, _ = fmt.Fprintln(w, string(body))
} }

View File

@ -3,7 +3,6 @@ package race
import ( import (
"fmt" "fmt"
"io" "io"
"io/ioutil"
"time" "time"
) )
@ -23,7 +22,7 @@ func NewSyncedReadCloser(r io.ReadCloser) *SyncedReadCloser {
s SyncedReadCloser s SyncedReadCloser
err error err error
) )
s.data, err = ioutil.ReadAll(r) s.data, err = io.ReadAll(r)
if err != nil { if err != nil {
return nil return nil
} }

View File

@ -6,7 +6,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"io/ioutil"
"net/url" "net/url"
"path" "path"
"strings" "strings"
@ -134,7 +133,7 @@ func Parse(request, baseURL string, unsafe bool) (*Request, error) {
} }
// Set the request body // Set the request body
b, err := ioutil.ReadAll(reader) b, err := io.ReadAll(reader)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not read request body: %w", err) return nil, fmt.Errorf("could not read request body: %w", err)
} }

View File

@ -486,7 +486,7 @@ func (request *Request) executeRequest(reqURL string, generatedRequest *generate
} else { } else {
bodyReader = resp.Body bodyReader = resp.Body
} }
data, err := ioutil.ReadAll(bodyReader) data, err := io.ReadAll(bodyReader)
if err != nil { if err != nil {
// Ignore body read due to server misconfiguration errors // Ignore body read due to server misconfiguration errors
if stringsutil.ContainsAny(err.Error(), "gzip: invalid header") { if stringsutil.ContainsAny(err.Error(), "gzip: invalid header") {

View File

@ -4,7 +4,7 @@ import (
"bytes" "bytes"
"context" "context"
"errors" "errors"
"io/ioutil" "io"
"net/http" "net/http"
"time" "time"
@ -89,7 +89,7 @@ func (awsSigner *AwsSigner) SignHTTP(request *http.Request, args interface{}) er
awsSigner.prepareRequest(request) awsSigner.prepareRequest(request)
var body *bytes.Reader var body *bytes.Reader
if request.Body != nil { if request.Body != nil {
bodyBytes, err := ioutil.ReadAll(request.Body) bodyBytes, err := io.ReadAll(request.Body)
if err != nil { if err != nil {
return err return err
} }

View File

@ -63,7 +63,7 @@ func dumpResponseWithRedirectChain(resp *http.Response, body []byte) ([]redirect
break break
} }
if redirectResp.Body != nil { if redirectResp.Body != nil {
body, _ = ioutil.ReadAll(redirectResp.Body) body, _ = io.ReadAll(redirectResp.Body)
} }
respObj := redirectedResponse{ respObj := redirectedResponse{
headers: respData, headers: respData,
@ -162,7 +162,7 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte,
} }
defer reader.Close() defer reader.Close()
bodyDec, err = ioutil.ReadAll(reader) bodyDec, err = io.ReadAll(reader)
if err != nil { if err != nil {
return bodyOrig, err return bodyOrig, err
} }
@ -173,7 +173,7 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte,
func decodeGBK(s []byte) ([]byte, error) { func decodeGBK(s []byte) ([]byte, error) {
I := bytes.NewReader(s) I := bytes.NewReader(s)
O := transform.NewReader(I, simplifiedchinese.GBK.NewDecoder()) O := transform.NewReader(I, simplifiedchinese.GBK.NewDecoder())
d, e := ioutil.ReadAll(O) d, e := io.ReadAll(O)
if e != nil { if e != nil {
return nil, e return nil, e
} }

View File

@ -2,7 +2,7 @@ package offlinehttp
import ( import (
"fmt" "fmt"
"io/ioutil" "io"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"net/http/httputil" "net/http/httputil"
@ -149,7 +149,7 @@ Server: Google Frontend
resp, err := readResponseFromString(tt.data) resp, err := readResponseFromString(tt.data)
require.Nil(t, err, "could not read response from string") require.Nil(t, err, "could not read response from string")
respData, err := ioutil.ReadAll(resp.Body) respData, err := io.ReadAll(resp.Body)
require.Nil(t, err, "could not read response body") require.Nil(t, err, "could not read response body")
require.Equal(t, expectedBody, string(respData), "could not get correct parsed body") require.Equal(t, expectedBody, string(respData), "could not get correct parsed body")
require.Equal(t, "Google Frontend", resp.Header.Get("Server"), "could not get correct headers") require.Equal(t, "Google Frontend", resp.Header.Get("Server"), "could not get correct headers")
@ -190,7 +190,7 @@ Server: Google Frontend
respData, err := readResponseFromString(string(b)) respData, err := readResponseFromString(string(b))
require.Nil(t, err, "could not read response from string") require.Nil(t, err, "could not read response from string")
_, err = ioutil.ReadAll(respData.Body) _, err = io.ReadAll(respData.Body)
require.Nil(t, err, "could not read response body") require.Nil(t, err, "could not read response body")
require.Equal(t, "Google Frontend", respData.Header.Get("Server"), "could not get correct headers") require.Equal(t, "Google Frontend", respData.Header.Get("Server"), "could not get correct headers")

View File

@ -1,7 +1,7 @@
package offlinehttp package offlinehttp
import ( import (
"io/ioutil" "io"
"net/http" "net/http"
"net/http/httputil" "net/http/httputil"
"os" "os"
@ -54,7 +54,7 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
return return
} }
buffer, err := ioutil.ReadAll(file) buffer, err := io.ReadAll(file)
if err != nil { if err != nil {
gologger.Error().Msgf("Could not read file path %s: %s\n", data, err) gologger.Error().Msgf("Could not read file path %s: %s\n", data, err)
return return
@ -79,7 +79,7 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
return return
} }
body, err := ioutil.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
gologger.Error().Msgf("Could not read raw http response body %s: %s\n", data, err) gologger.Error().Msgf("Could not read raw http response body %s: %s\n", data, err)
return return

View File

@ -3,7 +3,7 @@ package utils
import ( import (
"crypto/tls" "crypto/tls"
"crypto/x509" "crypto/x509"
"io/ioutil" "os"
"github.com/projectdiscovery/nuclei/v2/pkg/types" "github.com/projectdiscovery/nuclei/v2/pkg/types"
) )
@ -22,7 +22,7 @@ func AddConfiguredClientCertToRequest(tlsConfig *tls.Config, options *types.Opti
tlsConfig.Certificates = []tls.Certificate{cert} tlsConfig.Certificates = []tls.Certificate{cert}
// Load the certificate authority PEM certificate into the TLS configuration // Load the certificate authority PEM certificate into the TLS configuration
caCert, err := ioutil.ReadFile(options.ClientCAFile) caCert, err := os.ReadFile(options.ClientCAFile)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -6,6 +6,7 @@ import (
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
"time" "time"
@ -108,7 +109,7 @@ func (exporter *Exporter) Export(event *output.ResultEvent) error {
return err return err
} }
b, err = ioutil.ReadAll(res.Body) b, err = io.ReadAll(res.Body)
if err != nil { if err != nil {
return errors.New(err.Error() + "error thrown by elasticsearch " + string(b)) return errors.New(err.Error() + "error thrown by elasticsearch " + string(b))
} }

View File

@ -3,7 +3,7 @@ package jira
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"io/ioutil" "io"
"strings" "strings"
"github.com/andygrunwald/go-jira" "github.com/andygrunwald/go-jira"
@ -101,7 +101,7 @@ func (i *Integration) CreateNewIssue(event *output.ResultEvent) error {
if err != nil { if err != nil {
var data string var data string
if resp != nil && resp.Body != nil { if resp != nil && resp.Body != nil {
d, _ := ioutil.ReadAll(resp.Body) d, _ := io.ReadAll(resp.Body)
data = string(d) data = string(d)
} }
return fmt.Errorf("%w => %s", err, data) return fmt.Errorf("%w => %s", err, data)
@ -138,7 +138,7 @@ func (i *Integration) FindExistingIssue(event *output.ResultEvent) (string, erro
if err != nil { if err != nil {
var data string var data string
if resp != nil && resp.Body != nil { if resp != nil && resp.Body != nil {
d, _ := ioutil.ReadAll(resp.Body) d, _ := io.ReadAll(resp.Body)
data = string(d) data = string(d)
} }
return "", fmt.Errorf("%w => %s", err, data) return "", fmt.Errorf("%w => %s", err, data)

View File

@ -2,7 +2,7 @@ package utils
import ( import (
"errors" "errors"
"io/ioutil" "io"
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
@ -51,7 +51,7 @@ func ReadFromPathOrURL(templatePath string) (data []byte, err error) {
return nil, err return nil, err
} }
defer resp.Body.Close() defer resp.Body.Close()
data, err = ioutil.ReadAll(resp.Body) data, err = io.ReadAll(resp.Body)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -61,7 +61,7 @@ func ReadFromPathOrURL(templatePath string) (data []byte, err error) {
return nil, err return nil, err
} }
defer f.Close() defer f.Close()
data, err = ioutil.ReadAll(f) data, err = io.ReadAll(f)
if err != nil { if err != nil {
return nil, err return nil, err
} }