diff --git a/v2/cmd/cve-annotate/main.go b/v2/cmd/cve-annotate/main.go index 3e21fe94e..28f5a501b 100644 --- a/v2/cmd/cve-annotate/main.go +++ b/v2/cmd/cve-annotate/main.go @@ -49,7 +49,7 @@ func process() error { return err } for _, path := range paths { - data, err := ioutil.ReadFile(path) + data, err := os.ReadFile(path) if err != nil { return err } diff --git a/v2/go.mod b/v2/go.mod index 7569c9148..8ac001287 100644 --- a/v2/go.mod +++ b/v2/go.mod @@ -69,6 +69,7 @@ require github.com/projectdiscovery/folderutil v0.0.0-20211206150108-b4e7ea80f36 require ( github.com/Ice3man543/nvd v1.0.8 + github.com/docker/go-units v0.4.0 github.com/openrdap/rdap v0.9.1-0.20191017185644-af93e7ef17b7 github.com/projectdiscovery/iputil v0.0.0-20210804143329-3a30fcde43f3 github.com/stretchr/testify v1.7.0 diff --git a/v2/go.sum b/v2/go.sum index 8af50303f..7b0603793 100644 --- a/v2/go.sum +++ b/v2/go.sum @@ -134,6 +134,8 @@ github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUn github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= +github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw= +github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q= github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= diff --git a/v2/internal/runner/runner.go b/v2/internal/runner/runner.go index 333805119..5f7a5d9c6 100644 --- a/v2/internal/runner/runner.go +++ b/v2/internal/runner/runner.go @@ -3,7 +3,6 @@ package runner import ( "bufio" "encoding/json" - "io/ioutil" "os" "path/filepath" "strings" @@ -158,7 +157,7 @@ func New(options *types.Options) (*Runner, error) { resumeCfg := types.NewResumeCfg() if runner.options.ShouldLoadResume() { gologger.Info().Msg("Resuming from save checkpoint") - file, err := ioutil.ReadFile(types.DefaultResumeFilePath()) + file, err := os.ReadFile(types.DefaultResumeFilePath()) if err != nil { return nil, err } diff --git a/v2/internal/runner/update.go b/v2/internal/runner/update.go index 4966901ee..6a8bc2751 100644 --- a/v2/internal/runner/update.go +++ b/v2/internal/runner/update.go @@ -251,7 +251,7 @@ func (r *Runner) downloadReleaseAndUnzip(ctx context.Context, version, downloadU return nil, fmt.Errorf("failed to download a release file from %s: Not successful status %d", downloadURL, res.StatusCode) } - buf, err := ioutil.ReadAll(res.Body) + buf, err := io.ReadAll(res.Body) if err != nil { return nil, fmt.Errorf("failed to create buffer for zip file: %w", err) } diff --git a/v2/pkg/operators/common/dsl/dsl_test.go b/v2/pkg/operators/common/dsl/dsl_test.go index c49bffdaa..dee099f6a 100644 --- a/v2/pkg/operators/common/dsl/dsl_test.go +++ b/v2/pkg/operators/common/dsl/dsl_test.go @@ -3,7 +3,7 @@ package dsl import ( "compress/gzip" "fmt" - "io/ioutil" + "io" "math" "regexp" "strings" @@ -46,7 +46,7 @@ func TestDSLGzipSerialize(t *testing.T) { require.Nil(t, err, "could not evaluate compare time") reader, _ := gzip.NewReader(strings.NewReader(types.ToString(result))) - data, _ := ioutil.ReadAll(reader) + data, _ := io.ReadAll(reader) require.Equal(t, "hello world", string(data), "could not get gzip encoded data") } diff --git a/v2/pkg/operators/operators_test.go b/v2/pkg/operators/operators_test.go index 204cd57ba..8ae881b07 100644 --- a/v2/pkg/operators/operators_test.go +++ b/v2/pkg/operators/operators_test.go @@ -8,10 +8,10 @@ import ( func TestMakeDynamicValuesCallback(t *testing.T) { input := map[string][]string{ - "a": []string{"1", "2"}, - "b": []string{"3"}, - "c": []string{}, - "d": []string{"A", "B", "C"}, + "a": {"1", "2"}, + "b": {"3"}, + "c": {}, + "d": {"A", "B", "C"}, } count := 0 @@ -24,9 +24,9 @@ func TestMakeDynamicValuesCallback(t *testing.T) { t.Run("all", func(t *testing.T) { input := map[string][]string{ - "a": []string{"1"}, - "b": []string{"2"}, - "c": []string{"3"}, + "a": {"1"}, + "b": {"2"}, + "c": {"3"}, } count := 0 @@ -40,10 +40,10 @@ func TestMakeDynamicValuesCallback(t *testing.T) { t.Run("first", func(t *testing.T) { input := map[string][]string{ - "a": []string{"1", "2"}, - "b": []string{"3"}, - "c": []string{}, - "d": []string{"A", "B", "C"}, + "a": {"1", "2"}, + "b": {"3"}, + "c": {}, + "d": {"A", "B", "C"}, } count := 0 diff --git a/v2/pkg/protocols/common/helpers/eventcreator/eventcreator.go b/v2/pkg/protocols/common/helpers/eventcreator/eventcreator.go index 72375a6a5..71fa68640 100644 --- a/v2/pkg/protocols/common/helpers/eventcreator/eventcreator.go +++ b/v2/pkg/protocols/common/helpers/eventcreator/eventcreator.go @@ -7,10 +7,11 @@ import ( // CreateEvent wraps the outputEvent with the result of the operators defined on the request func CreateEvent(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool) *output.InternalWrappedEvent { - return CreateEventWithAdditionalOptions(request, outputEvent, isResponseDebug, func(internalWrappedEvent *output.InternalWrappedEvent) {}) + return CreateEventWithAdditionalOptions(request, outputEvent, isResponseDebug, nil) } -// CreateEventWithAdditionalOptions wraps the outputEvent with the result of the operators defined on the request and enables extending the resulting event with additional attributes or values. +// CreateEventWithAdditionalOptions wraps the outputEvent with the result of the operators defined on the request +// and enables extending the resulting event with additional attributes or values. func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool, addAdditionalOptions func(internalWrappedEvent *output.InternalWrappedEvent)) *output.InternalWrappedEvent { event := &output.InternalWrappedEvent{InternalEvent: outputEvent} @@ -19,7 +20,9 @@ func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent out result, ok := compiledOperator.Execute(outputEvent, request.Match, request.Extract, isResponseDebug) if ok && result != nil { event.OperatorsResult = result - addAdditionalOptions(event) + if addAdditionalOptions != nil { + addAdditionalOptions(event) + } event.Results = append(event.Results, request.MakeResultEvent(event)...) } } diff --git a/v2/pkg/protocols/file/file.go b/v2/pkg/protocols/file/file.go index 449cadf15..0bdebca1d 100644 --- a/v2/pkg/protocols/file/file.go +++ b/v2/pkg/protocols/file/file.go @@ -4,12 +4,18 @@ import ( "path/filepath" "strings" + "github.com/docker/go-units" "github.com/pkg/errors" "github.com/projectdiscovery/nuclei/v2/pkg/operators" "github.com/projectdiscovery/nuclei/v2/pkg/protocols" ) +var ( + defaultMaxReadSize, _ = units.FromHumanSize("1Gb") + chunkSize, _ = units.FromHumanSize("100Mb") +) + // Request contains a File matching mechanism for local disk operations. type Request struct { // Operators for the current request go here. @@ -34,11 +40,14 @@ type Request struct { // description: | // MaxSize is the maximum size of the file to run request on. // - // By default, nuclei will process 5 MB files and not go more than that. + // By default, nuclei will process 1 GB of content and not go more than that. // It can be set to much lower or higher depending on use. + // If set to "no" then all content will be processed // examples: - // - value: 2048 - MaxSize int `yaml:"max-size,omitempty" jsonschema:"title=max size data to run request on,description=Maximum size of the file to run request on"` + // - value: 5Mb + MaxSize string `yaml:"max-size,omitempty" jsonschema:"title=max size data to run request on,description=Maximum size of the file to run request on"` + maxSize int64 + CompiledOperators *operators.Operators `yaml:"-"` // cache any variables that may be needed for operation. @@ -83,10 +92,21 @@ func (request *Request) Compile(options *protocols.ExecuterOptions) error { } request.CompiledOperators = compiled } - // By default, use 1GB (1024 MB) as max size to read. - if request.MaxSize == 0 { - request.MaxSize = 1024 * 1024 * 1024 + + // By default, use default max size if not defined + switch { + case request.MaxSize != "": + maxSize, err := units.FromHumanSize(request.MaxSize) + if err != nil { + return errors.Wrap(err, "could not compile operators") + } + request.maxSize = maxSize + case request.MaxSize == "no": + request.maxSize = -1 + default: + request.maxSize = defaultMaxReadSize } + request.options = options request.extensions = make(map[string]struct{}) diff --git a/v2/pkg/protocols/file/file_test.go b/v2/pkg/protocols/file/file_test.go index c32b7a313..73f4fde91 100644 --- a/v2/pkg/protocols/file/file_test.go +++ b/v2/pkg/protocols/file/file_test.go @@ -17,7 +17,7 @@ func TestFileCompile(t *testing.T) { templateID := "testing-file" request := &Request{ ID: templateID, - MaxSize: 1024, + MaxSize: "1Gb", NoRecursive: false, Extensions: []string{"all", ".lock"}, DenyList: []string{".go"}, diff --git a/v2/pkg/protocols/file/find_test.go b/v2/pkg/protocols/file/find_test.go index 9ca543adc..5f8ee41c6 100644 --- a/v2/pkg/protocols/file/find_test.go +++ b/v2/pkg/protocols/file/find_test.go @@ -20,7 +20,7 @@ func TestFindInputPaths(t *testing.T) { templateID := "testing-file" request := &Request{ ID: templateID, - MaxSize: 1024, + MaxSize: "1Gb", NoRecursive: false, Extensions: []string{"all", ".lock"}, DenyList: []string{".go"}, diff --git a/v2/pkg/protocols/file/operators.go b/v2/pkg/protocols/file/operators.go index d4f83ea77..b83f62ec5 100644 --- a/v2/pkg/protocols/file/operators.go +++ b/v2/pkg/protocols/file/operators.go @@ -67,12 +67,24 @@ func (request *Request) getMatchPart(part string, data output.InternalEvent) (st return itemStr, true } -// responseToDSLMap converts a file response to a map for use in DSL matching -func (request *Request) responseToDSLMap(raw, inputFilePath, matchedFileName string) output.InternalEvent { +type fileStatus struct { + raw string + inputFilePath string + matchedFileName string + lines int + words int + bytes int +} + +// toDSLMap converts a file chunk elaboration to a map for use in DSL matching +func (request *Request) toDSLMap(state *fileStatus) output.InternalEvent { return output.InternalEvent{ - "path": inputFilePath, - "matched": matchedFileName, - "raw": raw, + "path": state.inputFilePath, + "matched": state.matchedFileName, + "raw": state.raw, + "lines": state.lines, + "words": state.words, + "bytes": state.bytes, "type": request.Type().String(), "template-id": request.options.TemplateID, "template-info": request.options.TemplateInfo, @@ -89,6 +101,8 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) [] return results } + linesOffset := wrapped.InternalEvent["lines"].(int) + rawStr, ok := raw.(string) if !ok { return results @@ -109,7 +123,7 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) [] lineWords[v] = struct{}{} } } - result.LineCount = calculateLineFunc(rawStr, lineWords) + result.LineCount = calculateLineFunc(rawStr, linesOffset, lineWords) } // Identify the position of match in file using a dirty hack. @@ -123,7 +137,7 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) [] if result.FileToIndexPosition == nil { result.FileToIndexPosition = make(map[string]int) } - result.FileToIndexPosition[result.Matched] = line + result.FileToIndexPosition[result.Matched] = line + linesOffset continue } line++ diff --git a/v2/pkg/protocols/file/operators_test.go b/v2/pkg/protocols/file/operators_test.go index cfafe5b50..067c5a9b6 100644 --- a/v2/pkg/protocols/file/operators_test.go +++ b/v2/pkg/protocols/file/operators_test.go @@ -21,7 +21,7 @@ func TestResponseToDSLMap(t *testing.T) { templateID := "testing-file" request := &Request{ ID: templateID, - MaxSize: 1024, + MaxSize: "1Gb", NoRecursive: false, Extensions: []string{"*", ".lock"}, DenyList: []string{".go"}, @@ -34,8 +34,8 @@ func TestResponseToDSLMap(t *testing.T) { require.Nil(t, err, "could not compile file request") resp := "test-data\r\n" - event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 7, "could not get correct number of items in dsl map") + event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"}) + require.Len(t, event, 10, "could not get correct number of items in dsl map") require.Equal(t, resp, event["raw"], "could not get correct resp") } @@ -46,7 +46,7 @@ func TestFileOperatorMatch(t *testing.T) { templateID := "testing-file" request := &Request{ ID: templateID, - MaxSize: 1024, + MaxSize: "1Gb", NoRecursive: false, Extensions: []string{"*", ".lock"}, DenyList: []string{".go"}, @@ -59,8 +59,8 @@ func TestFileOperatorMatch(t *testing.T) { require.Nil(t, err, "could not compile file request") resp := "test-data\r\n1.1.1.1\r\n" - event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 7, "could not get correct number of items in dsl map") + event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"}) + require.Len(t, event, 10, "could not get correct number of items in dsl map") require.Equal(t, resp, event["raw"], "could not get correct resp") t.Run("valid", func(t *testing.T) { @@ -108,8 +108,8 @@ func TestFileOperatorMatch(t *testing.T) { t.Run("caseInsensitive", func(t *testing.T) { resp := "TEST-DATA\r\n1.1.1.1\r\n" - event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 7, "could not get correct number of items in dsl map") + event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"}) + require.Len(t, event, 10, "could not get correct number of items in dsl map") require.Equal(t, resp, event["raw"], "could not get correct resp") matcher := &matchers.Matcher{ @@ -134,7 +134,7 @@ func TestFileOperatorExtract(t *testing.T) { templateID := "testing-file" request := &Request{ ID: templateID, - MaxSize: 1024, + MaxSize: "1Gb", NoRecursive: false, Extensions: []string{"*", ".lock"}, DenyList: []string{".go"}, @@ -147,8 +147,8 @@ func TestFileOperatorExtract(t *testing.T) { require.Nil(t, err, "could not compile file request") resp := "test-data\r\n1.1.1.1\r\n" - event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one") - require.Len(t, event, 7, "could not get correct number of items in dsl map") + event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"}) + require.Len(t, event, 10, "could not get correct number of items in dsl map") require.Equal(t, resp, event["raw"], "could not get correct resp") t.Run("extract", func(t *testing.T) { @@ -241,7 +241,7 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi templateID := "testing-file" request := &Request{ ID: templateID, - MaxSize: 1024, + MaxSize: "1Gb", NoRecursive: false, Extensions: []string{"*", ".lock"}, DenyList: []string{".go"}, @@ -265,8 +265,8 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi matchedFileName := "test.txt" fileContent := "test-data\r\n1.1.1.1\r\n" - event := request.responseToDSLMap(fileContent, "/tmp", matchedFileName) - require.Len(t, event, 7, "could not get correct number of items in dsl map") + event := request.toDSLMap(&fileStatus{raw: fileContent, inputFilePath: "/tmp", matchedFileName: matchedFileName}) + require.Len(t, event, 10, "could not get correct number of items in dsl map") require.Equal(t, fileContent, event["raw"], "could not get correct resp") finalEvent := &output.InternalWrappedEvent{InternalEvent: event} diff --git a/v2/pkg/protocols/file/request.go b/v2/pkg/protocols/file/request.go index 3617e2100..37a1e894e 100644 --- a/v2/pkg/protocols/file/request.go +++ b/v2/pkg/protocols/file/request.go @@ -1,12 +1,14 @@ package file import ( + "bufio" "encoding/hex" - "io/ioutil" + "io" "os" "sort" "strings" + "github.com/docker/go-units" "github.com/pkg/errors" "github.com/remeh/sizedwaitgroup" @@ -15,7 +17,6 @@ import ( "github.com/projectdiscovery/nuclei/v2/pkg/protocols" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/eventcreator" "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/responsehighlighter" - "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/tostring" templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types" ) @@ -27,7 +28,7 @@ func (request *Request) Type() templateTypes.ProtocolType { } // ExecuteWithResults executes the protocol requests and returns results instead of writing them. -func (request *Request) ExecuteWithResults(input string, metadata /*TODO review unused parameter*/, previous output.InternalEvent, callback protocols.OutputEventCallback) error { +func (request *Request) ExecuteWithResults(input string, metadata, previous output.InternalEvent, callback protocols.OutputEventCallback) error { wg := sizedwaitgroup.New(request.options.Options.BulkSize) err := request.getInputPaths(input, func(data string) { @@ -49,30 +50,47 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review gologger.Error().Msgf("Could not stat file path %s: %s\n", filePath, err) return } - if stat.Size() >= int64(request.MaxSize) { - gologger.Verbose().Msgf("Could not process path %s: exceeded max size\n", filePath) - return + if stat.Size() >= request.maxSize { + gologger.Verbose().Msgf("Limiting %s processed data to %s bytes: exceeded max size\n", filePath, units.HumanSize(float64(request.maxSize))) } + totalBytes := units.BytesSize(float64(stat.Size())) + fileReader := io.LimitReader(file, request.maxSize) + var bytesCount, linesCount, wordsCount int + scanner := bufio.NewScanner(fileReader) + buffer := []byte{} + scanner.Buffer(buffer, int(chunkSize)) + for scanner.Scan() { + fileContent := scanner.Text() + n := len(fileContent) - buffer, err := ioutil.ReadAll(file) - if err != nil { - gologger.Error().Msgf("Could not read file path %s: %s\n", filePath, err) - return + // update counters + currentBytes := bytesCount + n + processedBytes := units.BytesSize(float64(currentBytes)) + + gologger.Verbose().Msgf("[%s] Processing file %s chunk %s/%s", request.options.TemplateID, filePath, processedBytes, totalBytes) + outputEvent := request.toDSLMap(&fileStatus{ + raw: fileContent, + inputFilePath: input, + matchedFileName: filePath, + lines: linesCount, + words: wordsCount, + bytes: bytesCount, + }) + for k, v := range previous { + outputEvent[k] = v + } + + event := eventcreator.CreateEvent(request, outputEvent, request.options.Options.Debug || request.options.Options.DebugResponse) + + dumpResponse(event, request.options, fileContent, filePath) + callback(event) + + currentLinesCount := 1 + strings.Count(fileContent, "\n") + linesCount += currentLinesCount + wordsCount += strings.Count(fileContent, " ") + bytesCount = currentBytes + request.options.Progress.IncrementRequests() } - fileContent := tostring.UnsafeToString(buffer) - - gologger.Verbose().Msgf("[%s] Sent FILE request to %s", request.options.TemplateID, filePath) - outputEvent := request.responseToDSLMap(fileContent, input, filePath) - for k, v := range previous { - outputEvent[k] = v - } - - event := eventcreator.CreateEvent(request, outputEvent, request.options.Options.Debug || request.options.Options.DebugResponse) - - dumpResponse(event, request.options, fileContent, filePath) - - callback(event) - request.options.Progress.IncrementRequests() }(data) }) wg.Wait() @@ -112,22 +130,15 @@ func getAllStringSubmatchIndex(content string, word string) []int { return indexes } -func calculateLineFunc(contents string, words map[string]struct{}) []int { +func calculateLineFunc(contents string, linesOffset int, words map[string]struct{}) []int { var lines []int for word := range words { matches := getAllStringSubmatchIndex(contents, word) for _, index := range matches { - lineCount := int(0) - for _, c := range contents[:index] { - if c == '\n' { - lineCount++ - } - } - if lineCount > 0 { - lines = append(lines, lineCount+1) - } + lineCount := 1 + strings.Count(contents[:index], "\n") + lines = append(lines, linesOffset+lineCount) } } sort.Ints(lines) diff --git a/v2/pkg/protocols/file/request_test.go b/v2/pkg/protocols/file/request_test.go index 889f81d76..f3e2dca95 100644 --- a/v2/pkg/protocols/file/request_test.go +++ b/v2/pkg/protocols/file/request_test.go @@ -24,7 +24,7 @@ func TestFileExecuteWithResults(t *testing.T) { templateID := "testing-file" request := &Request{ ID: templateID, - MaxSize: 1024, + MaxSize: "1Gb", NoRecursive: false, Extensions: []string{"all"}, DenyList: []string{".go"}, @@ -88,6 +88,6 @@ eeee RequestDataTooBig dd RequestDataTooBig3 -SuspiciousOperation`, map[string]struct{}{"SuspiciousOperation": {}, "RequestDataTooBig": {}}) +SuspiciousOperation`, 0, map[string]struct{}{"SuspiciousOperation": {}, "RequestDataTooBig": {}}) require.ElementsMatch(t, []int{4, 7, 9, 10}, lines, "could not calculate correct lines") } diff --git a/v2/pkg/protocols/headless/engine/page_actions_test.go b/v2/pkg/protocols/headless/engine/page_actions_test.go index f8fb28992..d624df653 100644 --- a/v2/pkg/protocols/headless/engine/page_actions_test.go +++ b/v2/pkg/protocols/headless/engine/page_actions_test.go @@ -2,7 +2,7 @@ package engine import ( "fmt" - "io/ioutil" + "io" "net/http" "net/http/httptest" "net/url" @@ -414,7 +414,7 @@ func TestActionSetBody(t *testing.T) { } handler := func(w http.ResponseWriter, r *http.Request) { - body, _ := ioutil.ReadAll(r.Body) + body, _ := io.ReadAll(r.Body) _, _ = fmt.Fprintln(w, string(body)) } diff --git a/v2/pkg/protocols/http/race/syncedreadcloser.go b/v2/pkg/protocols/http/race/syncedreadcloser.go index 160f64ced..4e48662c6 100644 --- a/v2/pkg/protocols/http/race/syncedreadcloser.go +++ b/v2/pkg/protocols/http/race/syncedreadcloser.go @@ -3,7 +3,6 @@ package race import ( "fmt" "io" - "io/ioutil" "time" ) @@ -23,7 +22,7 @@ func NewSyncedReadCloser(r io.ReadCloser) *SyncedReadCloser { s SyncedReadCloser err error ) - s.data, err = ioutil.ReadAll(r) + s.data, err = io.ReadAll(r) if err != nil { return nil } diff --git a/v2/pkg/protocols/http/raw/raw.go b/v2/pkg/protocols/http/raw/raw.go index 3098bec51..c64027f28 100644 --- a/v2/pkg/protocols/http/raw/raw.go +++ b/v2/pkg/protocols/http/raw/raw.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/url" "path" "strings" @@ -134,7 +133,7 @@ func Parse(request, baseURL string, unsafe bool) (*Request, error) { } // Set the request body - b, err := ioutil.ReadAll(reader) + b, err := io.ReadAll(reader) if err != nil { return nil, fmt.Errorf("could not read request body: %w", err) } diff --git a/v2/pkg/protocols/http/request.go b/v2/pkg/protocols/http/request.go index ec6777358..96fa18c5e 100644 --- a/v2/pkg/protocols/http/request.go +++ b/v2/pkg/protocols/http/request.go @@ -486,7 +486,7 @@ func (request *Request) executeRequest(reqURL string, generatedRequest *generate } else { bodyReader = resp.Body } - data, err := ioutil.ReadAll(bodyReader) + data, err := io.ReadAll(bodyReader) if err != nil { // Ignore body read due to server misconfiguration errors if stringsutil.ContainsAny(err.Error(), "gzip: invalid header") { diff --git a/v2/pkg/protocols/http/signer/aws.go b/v2/pkg/protocols/http/signer/aws.go index c934d0c11..967bf8c42 100644 --- a/v2/pkg/protocols/http/signer/aws.go +++ b/v2/pkg/protocols/http/signer/aws.go @@ -4,7 +4,7 @@ import ( "bytes" "context" "errors" - "io/ioutil" + "io" "net/http" "time" @@ -89,7 +89,7 @@ func (awsSigner *AwsSigner) SignHTTP(request *http.Request, args interface{}) er awsSigner.prepareRequest(request) var body *bytes.Reader if request.Body != nil { - bodyBytes, err := ioutil.ReadAll(request.Body) + bodyBytes, err := io.ReadAll(request.Body) if err != nil { return err } diff --git a/v2/pkg/protocols/http/utils.go b/v2/pkg/protocols/http/utils.go index bee2ef557..eb448c087 100644 --- a/v2/pkg/protocols/http/utils.go +++ b/v2/pkg/protocols/http/utils.go @@ -63,7 +63,7 @@ func dumpResponseWithRedirectChain(resp *http.Response, body []byte) ([]redirect break } if redirectResp.Body != nil { - body, _ = ioutil.ReadAll(redirectResp.Body) + body, _ = io.ReadAll(redirectResp.Body) } respObj := redirectedResponse{ headers: respData, @@ -162,7 +162,7 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte, } defer reader.Close() - bodyDec, err = ioutil.ReadAll(reader) + bodyDec, err = io.ReadAll(reader) if err != nil { return bodyOrig, err } @@ -173,7 +173,7 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte, func decodeGBK(s []byte) ([]byte, error) { I := bytes.NewReader(s) O := transform.NewReader(I, simplifiedchinese.GBK.NewDecoder()) - d, e := ioutil.ReadAll(O) + d, e := io.ReadAll(O) if e != nil { return nil, e } diff --git a/v2/pkg/protocols/offlinehttp/read_response_test.go b/v2/pkg/protocols/offlinehttp/read_response_test.go index a26f2371a..da1382bac 100644 --- a/v2/pkg/protocols/offlinehttp/read_response_test.go +++ b/v2/pkg/protocols/offlinehttp/read_response_test.go @@ -2,7 +2,7 @@ package offlinehttp import ( "fmt" - "io/ioutil" + "io" "net/http" "net/http/httptest" "net/http/httputil" @@ -149,7 +149,7 @@ Server: Google Frontend resp, err := readResponseFromString(tt.data) require.Nil(t, err, "could not read response from string") - respData, err := ioutil.ReadAll(resp.Body) + respData, err := io.ReadAll(resp.Body) require.Nil(t, err, "could not read response body") require.Equal(t, expectedBody, string(respData), "could not get correct parsed body") require.Equal(t, "Google Frontend", resp.Header.Get("Server"), "could not get correct headers") @@ -190,7 +190,7 @@ Server: Google Frontend respData, err := readResponseFromString(string(b)) require.Nil(t, err, "could not read response from string") - _, err = ioutil.ReadAll(respData.Body) + _, err = io.ReadAll(respData.Body) require.Nil(t, err, "could not read response body") require.Equal(t, "Google Frontend", respData.Header.Get("Server"), "could not get correct headers") diff --git a/v2/pkg/protocols/offlinehttp/request.go b/v2/pkg/protocols/offlinehttp/request.go index 10b77b599..2b794b69c 100644 --- a/v2/pkg/protocols/offlinehttp/request.go +++ b/v2/pkg/protocols/offlinehttp/request.go @@ -1,7 +1,7 @@ package offlinehttp import ( - "io/ioutil" + "io" "net/http" "net/http/httputil" "os" @@ -54,7 +54,7 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review return } - buffer, err := ioutil.ReadAll(file) + buffer, err := io.ReadAll(file) if err != nil { gologger.Error().Msgf("Could not read file path %s: %s\n", data, err) return @@ -79,7 +79,7 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review return } - body, err := ioutil.ReadAll(resp.Body) + body, err := io.ReadAll(resp.Body) if err != nil { gologger.Error().Msgf("Could not read raw http response body %s: %s\n", data, err) return diff --git a/v2/pkg/protocols/utils/utils.go b/v2/pkg/protocols/utils/utils.go index d139f1287..c39d580a9 100644 --- a/v2/pkg/protocols/utils/utils.go +++ b/v2/pkg/protocols/utils/utils.go @@ -3,7 +3,7 @@ package utils import ( "crypto/tls" "crypto/x509" - "io/ioutil" + "os" "github.com/projectdiscovery/nuclei/v2/pkg/types" ) @@ -22,7 +22,7 @@ func AddConfiguredClientCertToRequest(tlsConfig *tls.Config, options *types.Opti tlsConfig.Certificates = []tls.Certificate{cert} // Load the certificate authority PEM certificate into the TLS configuration - caCert, err := ioutil.ReadFile(options.ClientCAFile) + caCert, err := os.ReadFile(options.ClientCAFile) if err != nil { return nil, err } diff --git a/v2/pkg/reporting/exporters/es/elasticsearch.go b/v2/pkg/reporting/exporters/es/elasticsearch.go index fbb34f425..3737856ba 100644 --- a/v2/pkg/reporting/exporters/es/elasticsearch.go +++ b/v2/pkg/reporting/exporters/es/elasticsearch.go @@ -6,6 +6,7 @@ import ( "encoding/base64" "encoding/json" "fmt" + "io" "io/ioutil" "net/http" "time" @@ -108,7 +109,7 @@ func (exporter *Exporter) Export(event *output.ResultEvent) error { return err } - b, err = ioutil.ReadAll(res.Body) + b, err = io.ReadAll(res.Body) if err != nil { return errors.New(err.Error() + "error thrown by elasticsearch " + string(b)) } diff --git a/v2/pkg/reporting/trackers/jira/jira.go b/v2/pkg/reporting/trackers/jira/jira.go index dede2b2ed..327b77536 100644 --- a/v2/pkg/reporting/trackers/jira/jira.go +++ b/v2/pkg/reporting/trackers/jira/jira.go @@ -3,7 +3,7 @@ package jira import ( "bytes" "fmt" - "io/ioutil" + "io" "strings" "github.com/andygrunwald/go-jira" @@ -101,7 +101,7 @@ func (i *Integration) CreateNewIssue(event *output.ResultEvent) error { if err != nil { var data string if resp != nil && resp.Body != nil { - d, _ := ioutil.ReadAll(resp.Body) + d, _ := io.ReadAll(resp.Body) data = string(d) } return fmt.Errorf("%w => %s", err, data) @@ -138,7 +138,7 @@ func (i *Integration) FindExistingIssue(event *output.ResultEvent) (string, erro if err != nil { var data string if resp != nil && resp.Body != nil { - d, _ := ioutil.ReadAll(resp.Body) + d, _ := io.ReadAll(resp.Body) data = string(d) } return "", fmt.Errorf("%w => %s", err, data) diff --git a/v2/pkg/utils/utils.go b/v2/pkg/utils/utils.go index f5d30b2f3..f01dd636c 100644 --- a/v2/pkg/utils/utils.go +++ b/v2/pkg/utils/utils.go @@ -2,7 +2,7 @@ package utils import ( "errors" - "io/ioutil" + "io" "net/http" "net/url" "os" @@ -51,7 +51,7 @@ func ReadFromPathOrURL(templatePath string) (data []byte, err error) { return nil, err } defer resp.Body.Close() - data, err = ioutil.ReadAll(resp.Body) + data, err = io.ReadAll(resp.Body) if err != nil { return nil, err } @@ -61,7 +61,7 @@ func ReadFromPathOrURL(templatePath string) (data []byte, err error) { return nil, err } defer f.Close() - data, err = ioutil.ReadAll(f) + data, err = io.ReadAll(f) if err != nil { return nil, err }