Refactoring file templates to handle large files in chunks + removing deprecated io methods

This commit is contained in:
mzack 2022-02-23 13:54:46 +01:00
parent 1d11a8e40c
commit 6746071979
27 changed files with 161 additions and 112 deletions

View File

@ -49,7 +49,7 @@ func process() error {
return err
}
for _, path := range paths {
data, err := ioutil.ReadFile(path)
data, err := os.ReadFile(path)
if err != nil {
return err
}

View File

@ -69,6 +69,7 @@ require github.com/projectdiscovery/folderutil v0.0.0-20211206150108-b4e7ea80f36
require (
github.com/Ice3man543/nvd v1.0.8
github.com/docker/go-units v0.4.0
github.com/openrdap/rdap v0.9.1-0.20191017185644-af93e7ef17b7
github.com/projectdiscovery/iputil v0.0.0-20210804143329-3a30fcde43f3
github.com/stretchr/testify v1.7.0

View File

@ -134,6 +134,8 @@ github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUn
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=
github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=
github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=

View File

@ -3,7 +3,6 @@ package runner
import (
"bufio"
"encoding/json"
"io/ioutil"
"os"
"path/filepath"
"strings"
@ -158,7 +157,7 @@ func New(options *types.Options) (*Runner, error) {
resumeCfg := types.NewResumeCfg()
if runner.options.ShouldLoadResume() {
gologger.Info().Msg("Resuming from save checkpoint")
file, err := ioutil.ReadFile(types.DefaultResumeFilePath())
file, err := os.ReadFile(types.DefaultResumeFilePath())
if err != nil {
return nil, err
}

View File

@ -251,7 +251,7 @@ func (r *Runner) downloadReleaseAndUnzip(ctx context.Context, version, downloadU
return nil, fmt.Errorf("failed to download a release file from %s: Not successful status %d", downloadURL, res.StatusCode)
}
buf, err := ioutil.ReadAll(res.Body)
buf, err := io.ReadAll(res.Body)
if err != nil {
return nil, fmt.Errorf("failed to create buffer for zip file: %w", err)
}

View File

@ -3,7 +3,7 @@ package dsl
import (
"compress/gzip"
"fmt"
"io/ioutil"
"io"
"math"
"regexp"
"strings"
@ -46,7 +46,7 @@ func TestDSLGzipSerialize(t *testing.T) {
require.Nil(t, err, "could not evaluate compare time")
reader, _ := gzip.NewReader(strings.NewReader(types.ToString(result)))
data, _ := ioutil.ReadAll(reader)
data, _ := io.ReadAll(reader)
require.Equal(t, "hello world", string(data), "could not get gzip encoded data")
}

View File

@ -8,10 +8,10 @@ import (
func TestMakeDynamicValuesCallback(t *testing.T) {
input := map[string][]string{
"a": []string{"1", "2"},
"b": []string{"3"},
"c": []string{},
"d": []string{"A", "B", "C"},
"a": {"1", "2"},
"b": {"3"},
"c": {},
"d": {"A", "B", "C"},
}
count := 0
@ -24,9 +24,9 @@ func TestMakeDynamicValuesCallback(t *testing.T) {
t.Run("all", func(t *testing.T) {
input := map[string][]string{
"a": []string{"1"},
"b": []string{"2"},
"c": []string{"3"},
"a": {"1"},
"b": {"2"},
"c": {"3"},
}
count := 0
@ -40,10 +40,10 @@ func TestMakeDynamicValuesCallback(t *testing.T) {
t.Run("first", func(t *testing.T) {
input := map[string][]string{
"a": []string{"1", "2"},
"b": []string{"3"},
"c": []string{},
"d": []string{"A", "B", "C"},
"a": {"1", "2"},
"b": {"3"},
"c": {},
"d": {"A", "B", "C"},
}
count := 0

View File

@ -7,10 +7,11 @@ import (
// CreateEvent wraps the outputEvent with the result of the operators defined on the request
func CreateEvent(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool) *output.InternalWrappedEvent {
return CreateEventWithAdditionalOptions(request, outputEvent, isResponseDebug, func(internalWrappedEvent *output.InternalWrappedEvent) {})
return CreateEventWithAdditionalOptions(request, outputEvent, isResponseDebug, nil)
}
// CreateEventWithAdditionalOptions wraps the outputEvent with the result of the operators defined on the request and enables extending the resulting event with additional attributes or values.
// CreateEventWithAdditionalOptions wraps the outputEvent with the result of the operators defined on the request
// and enables extending the resulting event with additional attributes or values.
func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool,
addAdditionalOptions func(internalWrappedEvent *output.InternalWrappedEvent)) *output.InternalWrappedEvent {
event := &output.InternalWrappedEvent{InternalEvent: outputEvent}
@ -19,7 +20,9 @@ func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent out
result, ok := compiledOperator.Execute(outputEvent, request.Match, request.Extract, isResponseDebug)
if ok && result != nil {
event.OperatorsResult = result
if addAdditionalOptions != nil {
addAdditionalOptions(event)
}
event.Results = append(event.Results, request.MakeResultEvent(event)...)
}
}

View File

@ -4,12 +4,18 @@ import (
"path/filepath"
"strings"
"github.com/docker/go-units"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
)
var (
defaultMaxReadSize, _ = units.FromHumanSize("1Gb")
chunkSize, _ = units.FromHumanSize("100Mb")
)
// Request contains a File matching mechanism for local disk operations.
type Request struct {
// Operators for the current request go here.
@ -34,11 +40,14 @@ type Request struct {
// description: |
// MaxSize is the maximum size of the file to run request on.
//
// By default, nuclei will process 5 MB files and not go more than that.
// By default, nuclei will process 1 GB of content and not go more than that.
// It can be set to much lower or higher depending on use.
// If set to "no" then all content will be processed
// examples:
// - value: 2048
MaxSize int `yaml:"max-size,omitempty" jsonschema:"title=max size data to run request on,description=Maximum size of the file to run request on"`
// - value: 5Mb
MaxSize string `yaml:"max-size,omitempty" jsonschema:"title=max size data to run request on,description=Maximum size of the file to run request on"`
maxSize int64
CompiledOperators *operators.Operators `yaml:"-"`
// cache any variables that may be needed for operation.
@ -83,10 +92,21 @@ func (request *Request) Compile(options *protocols.ExecuterOptions) error {
}
request.CompiledOperators = compiled
}
// By default, use 1GB (1024 MB) as max size to read.
if request.MaxSize == 0 {
request.MaxSize = 1024 * 1024 * 1024
// By default, use default max size if not defined
switch {
case request.MaxSize != "":
maxSize, err := units.FromHumanSize(request.MaxSize)
if err != nil {
return errors.Wrap(err, "could not compile operators")
}
request.maxSize = maxSize
case request.MaxSize == "no":
request.maxSize = -1
default:
request.maxSize = defaultMaxReadSize
}
request.options = options
request.extensions = make(map[string]struct{})

View File

@ -17,7 +17,7 @@ func TestFileCompile(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"all", ".lock"},
DenyList: []string{".go"},

View File

@ -20,7 +20,7 @@ func TestFindInputPaths(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"all", ".lock"},
DenyList: []string{".go"},

View File

@ -67,12 +67,24 @@ func (request *Request) getMatchPart(part string, data output.InternalEvent) (st
return itemStr, true
}
// responseToDSLMap converts a file response to a map for use in DSL matching
func (request *Request) responseToDSLMap(raw, inputFilePath, matchedFileName string) output.InternalEvent {
type fileStatus struct {
raw string
inputFilePath string
matchedFileName string
lines int
words int
bytes int
}
// toDSLMap converts a file chunk elaboration to a map for use in DSL matching
func (request *Request) toDSLMap(state *fileStatus) output.InternalEvent {
return output.InternalEvent{
"path": inputFilePath,
"matched": matchedFileName,
"raw": raw,
"path": state.inputFilePath,
"matched": state.matchedFileName,
"raw": state.raw,
"lines": state.lines,
"words": state.words,
"bytes": state.bytes,
"type": request.Type().String(),
"template-id": request.options.TemplateID,
"template-info": request.options.TemplateInfo,
@ -89,6 +101,8 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []
return results
}
linesOffset := wrapped.InternalEvent["lines"].(int)
rawStr, ok := raw.(string)
if !ok {
return results
@ -109,7 +123,7 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []
lineWords[v] = struct{}{}
}
}
result.LineCount = calculateLineFunc(rawStr, lineWords)
result.LineCount = calculateLineFunc(rawStr, linesOffset, lineWords)
}
// Identify the position of match in file using a dirty hack.
@ -123,7 +137,7 @@ func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []
if result.FileToIndexPosition == nil {
result.FileToIndexPosition = make(map[string]int)
}
result.FileToIndexPosition[result.Matched] = line
result.FileToIndexPosition[result.Matched] = line + linesOffset
continue
}
line++

View File

@ -21,7 +21,7 @@ func TestResponseToDSLMap(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"*", ".lock"},
DenyList: []string{".go"},
@ -34,8 +34,8 @@ func TestResponseToDSLMap(t *testing.T) {
require.Nil(t, err, "could not compile file request")
resp := "test-data\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one")
require.Len(t, event, 7, "could not get correct number of items in dsl map")
event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"})
require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp")
}
@ -46,7 +46,7 @@ func TestFileOperatorMatch(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"*", ".lock"},
DenyList: []string{".go"},
@ -59,8 +59,8 @@ func TestFileOperatorMatch(t *testing.T) {
require.Nil(t, err, "could not compile file request")
resp := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one")
require.Len(t, event, 7, "could not get correct number of items in dsl map")
event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"})
require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp")
t.Run("valid", func(t *testing.T) {
@ -108,8 +108,8 @@ func TestFileOperatorMatch(t *testing.T) {
t.Run("caseInsensitive", func(t *testing.T) {
resp := "TEST-DATA\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one")
require.Len(t, event, 7, "could not get correct number of items in dsl map")
event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"})
require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp")
matcher := &matchers.Matcher{
@ -134,7 +134,7 @@ func TestFileOperatorExtract(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"*", ".lock"},
DenyList: []string{".go"},
@ -147,8 +147,8 @@ func TestFileOperatorExtract(t *testing.T) {
require.Nil(t, err, "could not compile file request")
resp := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one")
require.Len(t, event, 7, "could not get correct number of items in dsl map")
event := request.toDSLMap(&fileStatus{raw: resp, inputFilePath: "one.one.one.one", matchedFileName: "one.one.one.one"})
require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp")
t.Run("extract", func(t *testing.T) {
@ -241,7 +241,7 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"*", ".lock"},
DenyList: []string{".go"},
@ -265,8 +265,8 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi
matchedFileName := "test.txt"
fileContent := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(fileContent, "/tmp", matchedFileName)
require.Len(t, event, 7, "could not get correct number of items in dsl map")
event := request.toDSLMap(&fileStatus{raw: fileContent, inputFilePath: "/tmp", matchedFileName: matchedFileName})
require.Len(t, event, 10, "could not get correct number of items in dsl map")
require.Equal(t, fileContent, event["raw"], "could not get correct resp")
finalEvent := &output.InternalWrappedEvent{InternalEvent: event}

View File

@ -1,12 +1,14 @@
package file
import (
"bufio"
"encoding/hex"
"io/ioutil"
"io"
"os"
"sort"
"strings"
"github.com/docker/go-units"
"github.com/pkg/errors"
"github.com/remeh/sizedwaitgroup"
@ -15,7 +17,6 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/eventcreator"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/responsehighlighter"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/tostring"
templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
)
@ -27,7 +28,7 @@ func (request *Request) Type() templateTypes.ProtocolType {
}
// ExecuteWithResults executes the protocol requests and returns results instead of writing them.
func (request *Request) ExecuteWithResults(input string, metadata /*TODO review unused parameter*/, previous output.InternalEvent, callback protocols.OutputEventCallback) error {
func (request *Request) ExecuteWithResults(input string, metadata, previous output.InternalEvent, callback protocols.OutputEventCallback) error {
wg := sizedwaitgroup.New(request.options.Options.BulkSize)
err := request.getInputPaths(input, func(data string) {
@ -49,20 +50,32 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
gologger.Error().Msgf("Could not stat file path %s: %s\n", filePath, err)
return
}
if stat.Size() >= int64(request.MaxSize) {
gologger.Verbose().Msgf("Could not process path %s: exceeded max size\n", filePath)
return
if stat.Size() >= request.maxSize {
gologger.Verbose().Msgf("Limiting %s processed data to %s bytes: exceeded max size\n", filePath, units.HumanSize(float64(request.maxSize)))
}
totalBytes := units.BytesSize(float64(stat.Size()))
fileReader := io.LimitReader(file, request.maxSize)
var bytesCount, linesCount, wordsCount int
scanner := bufio.NewScanner(fileReader)
buffer := []byte{}
scanner.Buffer(buffer, int(chunkSize))
for scanner.Scan() {
fileContent := scanner.Text()
n := len(fileContent)
buffer, err := ioutil.ReadAll(file)
if err != nil {
gologger.Error().Msgf("Could not read file path %s: %s\n", filePath, err)
return
}
fileContent := tostring.UnsafeToString(buffer)
// update counters
currentBytes := bytesCount + n
processedBytes := units.BytesSize(float64(currentBytes))
gologger.Verbose().Msgf("[%s] Sent FILE request to %s", request.options.TemplateID, filePath)
outputEvent := request.responseToDSLMap(fileContent, input, filePath)
gologger.Verbose().Msgf("[%s] Processing file %s chunk %s/%s", request.options.TemplateID, filePath, processedBytes, totalBytes)
outputEvent := request.toDSLMap(&fileStatus{
raw: fileContent,
inputFilePath: input,
matchedFileName: filePath,
lines: linesCount,
words: wordsCount,
bytes: bytesCount,
})
for k, v := range previous {
outputEvent[k] = v
}
@ -70,9 +83,14 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
event := eventcreator.CreateEvent(request, outputEvent, request.options.Options.Debug || request.options.Options.DebugResponse)
dumpResponse(event, request.options, fileContent, filePath)
callback(event)
currentLinesCount := 1 + strings.Count(fileContent, "\n")
linesCount += currentLinesCount
wordsCount += strings.Count(fileContent, " ")
bytesCount = currentBytes
request.options.Progress.IncrementRequests()
}
}(data)
})
wg.Wait()
@ -112,22 +130,15 @@ func getAllStringSubmatchIndex(content string, word string) []int {
return indexes
}
func calculateLineFunc(contents string, words map[string]struct{}) []int {
func calculateLineFunc(contents string, linesOffset int, words map[string]struct{}) []int {
var lines []int
for word := range words {
matches := getAllStringSubmatchIndex(contents, word)
for _, index := range matches {
lineCount := int(0)
for _, c := range contents[:index] {
if c == '\n' {
lineCount++
}
}
if lineCount > 0 {
lines = append(lines, lineCount+1)
}
lineCount := 1 + strings.Count(contents[:index], "\n")
lines = append(lines, linesOffset+lineCount)
}
}
sort.Ints(lines)

View File

@ -24,7 +24,7 @@ func TestFileExecuteWithResults(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"all"},
DenyList: []string{".go"},
@ -88,6 +88,6 @@ eeee
RequestDataTooBig
dd
RequestDataTooBig3
SuspiciousOperation`, map[string]struct{}{"SuspiciousOperation": {}, "RequestDataTooBig": {}})
SuspiciousOperation`, 0, map[string]struct{}{"SuspiciousOperation": {}, "RequestDataTooBig": {}})
require.ElementsMatch(t, []int{4, 7, 9, 10}, lines, "could not calculate correct lines")
}

View File

@ -2,7 +2,7 @@ package engine
import (
"fmt"
"io/ioutil"
"io"
"net/http"
"net/http/httptest"
"net/url"
@ -414,7 +414,7 @@ func TestActionSetBody(t *testing.T) {
}
handler := func(w http.ResponseWriter, r *http.Request) {
body, _ := ioutil.ReadAll(r.Body)
body, _ := io.ReadAll(r.Body)
_, _ = fmt.Fprintln(w, string(body))
}

View File

@ -3,7 +3,6 @@ package race
import (
"fmt"
"io"
"io/ioutil"
"time"
)
@ -23,7 +22,7 @@ func NewSyncedReadCloser(r io.ReadCloser) *SyncedReadCloser {
s SyncedReadCloser
err error
)
s.data, err = ioutil.ReadAll(r)
s.data, err = io.ReadAll(r)
if err != nil {
return nil
}

View File

@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"io"
"io/ioutil"
"net/url"
"path"
"strings"
@ -134,7 +133,7 @@ func Parse(request, baseURL string, unsafe bool) (*Request, error) {
}
// Set the request body
b, err := ioutil.ReadAll(reader)
b, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("could not read request body: %w", err)
}

View File

@ -486,7 +486,7 @@ func (request *Request) executeRequest(reqURL string, generatedRequest *generate
} else {
bodyReader = resp.Body
}
data, err := ioutil.ReadAll(bodyReader)
data, err := io.ReadAll(bodyReader)
if err != nil {
// Ignore body read due to server misconfiguration errors
if stringsutil.ContainsAny(err.Error(), "gzip: invalid header") {

View File

@ -4,7 +4,7 @@ import (
"bytes"
"context"
"errors"
"io/ioutil"
"io"
"net/http"
"time"
@ -89,7 +89,7 @@ func (awsSigner *AwsSigner) SignHTTP(request *http.Request, args interface{}) er
awsSigner.prepareRequest(request)
var body *bytes.Reader
if request.Body != nil {
bodyBytes, err := ioutil.ReadAll(request.Body)
bodyBytes, err := io.ReadAll(request.Body)
if err != nil {
return err
}

View File

@ -63,7 +63,7 @@ func dumpResponseWithRedirectChain(resp *http.Response, body []byte) ([]redirect
break
}
if redirectResp.Body != nil {
body, _ = ioutil.ReadAll(redirectResp.Body)
body, _ = io.ReadAll(redirectResp.Body)
}
respObj := redirectedResponse{
headers: respData,
@ -162,7 +162,7 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte,
}
defer reader.Close()
bodyDec, err = ioutil.ReadAll(reader)
bodyDec, err = io.ReadAll(reader)
if err != nil {
return bodyOrig, err
}
@ -173,7 +173,7 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte,
func decodeGBK(s []byte) ([]byte, error) {
I := bytes.NewReader(s)
O := transform.NewReader(I, simplifiedchinese.GBK.NewDecoder())
d, e := ioutil.ReadAll(O)
d, e := io.ReadAll(O)
if e != nil {
return nil, e
}

View File

@ -2,7 +2,7 @@ package offlinehttp
import (
"fmt"
"io/ioutil"
"io"
"net/http"
"net/http/httptest"
"net/http/httputil"
@ -149,7 +149,7 @@ Server: Google Frontend
resp, err := readResponseFromString(tt.data)
require.Nil(t, err, "could not read response from string")
respData, err := ioutil.ReadAll(resp.Body)
respData, err := io.ReadAll(resp.Body)
require.Nil(t, err, "could not read response body")
require.Equal(t, expectedBody, string(respData), "could not get correct parsed body")
require.Equal(t, "Google Frontend", resp.Header.Get("Server"), "could not get correct headers")
@ -190,7 +190,7 @@ Server: Google Frontend
respData, err := readResponseFromString(string(b))
require.Nil(t, err, "could not read response from string")
_, err = ioutil.ReadAll(respData.Body)
_, err = io.ReadAll(respData.Body)
require.Nil(t, err, "could not read response body")
require.Equal(t, "Google Frontend", respData.Header.Get("Server"), "could not get correct headers")

View File

@ -1,7 +1,7 @@
package offlinehttp
import (
"io/ioutil"
"io"
"net/http"
"net/http/httputil"
"os"
@ -54,7 +54,7 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
return
}
buffer, err := ioutil.ReadAll(file)
buffer, err := io.ReadAll(file)
if err != nil {
gologger.Error().Msgf("Could not read file path %s: %s\n", data, err)
return
@ -79,7 +79,7 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
return
}
body, err := ioutil.ReadAll(resp.Body)
body, err := io.ReadAll(resp.Body)
if err != nil {
gologger.Error().Msgf("Could not read raw http response body %s: %s\n", data, err)
return

View File

@ -3,7 +3,7 @@ package utils
import (
"crypto/tls"
"crypto/x509"
"io/ioutil"
"os"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
)
@ -22,7 +22,7 @@ func AddConfiguredClientCertToRequest(tlsConfig *tls.Config, options *types.Opti
tlsConfig.Certificates = []tls.Certificate{cert}
// Load the certificate authority PEM certificate into the TLS configuration
caCert, err := ioutil.ReadFile(options.ClientCAFile)
caCert, err := os.ReadFile(options.ClientCAFile)
if err != nil {
return nil, err
}

View File

@ -6,6 +6,7 @@ import (
"encoding/base64"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"time"
@ -108,7 +109,7 @@ func (exporter *Exporter) Export(event *output.ResultEvent) error {
return err
}
b, err = ioutil.ReadAll(res.Body)
b, err = io.ReadAll(res.Body)
if err != nil {
return errors.New(err.Error() + "error thrown by elasticsearch " + string(b))
}

View File

@ -3,7 +3,7 @@ package jira
import (
"bytes"
"fmt"
"io/ioutil"
"io"
"strings"
"github.com/andygrunwald/go-jira"
@ -101,7 +101,7 @@ func (i *Integration) CreateNewIssue(event *output.ResultEvent) error {
if err != nil {
var data string
if resp != nil && resp.Body != nil {
d, _ := ioutil.ReadAll(resp.Body)
d, _ := io.ReadAll(resp.Body)
data = string(d)
}
return fmt.Errorf("%w => %s", err, data)
@ -138,7 +138,7 @@ func (i *Integration) FindExistingIssue(event *output.ResultEvent) (string, erro
if err != nil {
var data string
if resp != nil && resp.Body != nil {
d, _ := ioutil.ReadAll(resp.Body)
d, _ := io.ReadAll(resp.Body)
data = string(d)
}
return "", fmt.Errorf("%w => %s", err, data)

View File

@ -2,7 +2,7 @@ package utils
import (
"errors"
"io/ioutil"
"io"
"net/http"
"net/url"
"os"
@ -51,7 +51,7 @@ func ReadFromPathOrURL(templatePath string) (data []byte, err error) {
return nil, err
}
defer resp.Body.Close()
data, err = ioutil.ReadAll(resp.Body)
data, err = io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
@ -61,7 +61,7 @@ func ReadFromPathOrURL(templatePath string) (data []byte, err error) {
return nil, err
}
defer f.Close()
data, err = ioutil.ReadAll(f)
data, err = io.ReadAll(f)
if err != nil {
return nil, err
}