2023-11-18 16:25:37 +05:30
|
|
|
package pdcp
|
|
|
|
|
|
|
|
|
|
import (
|
2024-01-21 02:26:16 +05:30
|
|
|
"bufio"
|
|
|
|
|
"bytes"
|
|
|
|
|
"context"
|
2023-11-18 16:25:37 +05:30
|
|
|
"encoding/json"
|
|
|
|
|
"fmt"
|
|
|
|
|
"io"
|
|
|
|
|
"net/http"
|
|
|
|
|
"net/url"
|
2024-05-11 00:44:14 +05:30
|
|
|
"regexp"
|
2023-11-18 16:25:37 +05:30
|
|
|
"sync/atomic"
|
|
|
|
|
"time"
|
|
|
|
|
|
|
|
|
|
"github.com/projectdiscovery/gologger"
|
2024-03-25 10:08:26 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
|
2023-11-18 16:25:37 +05:30
|
|
|
"github.com/projectdiscovery/nuclei/v3/pkg/output"
|
|
|
|
|
"github.com/projectdiscovery/retryablehttp-go"
|
2024-01-11 19:51:54 +05:30
|
|
|
pdcpauth "github.com/projectdiscovery/utils/auth/pdcp"
|
2024-06-16 03:18:01 +05:30
|
|
|
"github.com/projectdiscovery/utils/env"
|
2023-11-18 16:25:37 +05:30
|
|
|
errorutil "github.com/projectdiscovery/utils/errors"
|
2024-05-15 15:34:59 +02:00
|
|
|
unitutils "github.com/projectdiscovery/utils/unit"
|
2024-03-25 10:08:26 +05:30
|
|
|
updateutils "github.com/projectdiscovery/utils/update"
|
2023-11-18 16:25:37 +05:30
|
|
|
urlutil "github.com/projectdiscovery/utils/url"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
const (
|
|
|
|
|
uploadEndpoint = "/v1/scans/import"
|
2024-01-21 02:26:16 +05:30
|
|
|
appendEndpoint = "/v1/scans/%s/import"
|
2024-05-15 15:34:59 +02:00
|
|
|
flushTimer = time.Minute
|
|
|
|
|
MaxChunkSize = 4 * unitutils.Mega // 4 MB
|
2024-05-11 00:44:14 +05:30
|
|
|
xidRe = `^[a-z0-9]{20}$`
|
2024-06-16 03:18:01 +05:30
|
|
|
teamIDHeader = "X-Team-Id"
|
2024-08-16 13:27:26 +05:30
|
|
|
NoneTeamID = "none"
|
2023-11-18 16:25:37 +05:30
|
|
|
)
|
|
|
|
|
|
2024-05-11 00:44:14 +05:30
|
|
|
var (
|
|
|
|
|
xidRegex = regexp.MustCompile(xidRe)
|
|
|
|
|
_ output.Writer = &UploadWriter{}
|
2024-06-16 03:18:01 +05:30
|
|
|
// teamID if given
|
2024-08-16 13:27:26 +05:30
|
|
|
TeamIDEnv = env.GetEnvOrDefault("PDCP_TEAM_ID", NoneTeamID)
|
2024-05-11 00:44:14 +05:30
|
|
|
)
|
2023-11-18 16:25:37 +05:30
|
|
|
|
|
|
|
|
// UploadWriter is a writer that uploads its output to pdcp
|
|
|
|
|
// server to enable web dashboard and more
|
|
|
|
|
type UploadWriter struct {
|
|
|
|
|
*output.StandardWriter
|
2024-01-11 19:51:54 +05:30
|
|
|
creds *pdcpauth.PDCPCredentials
|
2023-11-18 16:25:37 +05:30
|
|
|
uploadURL *url.URL
|
2024-01-21 02:26:16 +05:30
|
|
|
client *retryablehttp.Client
|
|
|
|
|
cancel context.CancelFunc
|
|
|
|
|
done chan struct{}
|
|
|
|
|
scanID string
|
2024-05-11 00:44:14 +05:30
|
|
|
scanName string
|
2024-01-21 02:26:16 +05:30
|
|
|
counter atomic.Int32
|
2024-08-16 13:27:26 +05:30
|
|
|
TeamID string
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// NewUploadWriter creates a new upload writer
|
2024-01-21 02:26:16 +05:30
|
|
|
func NewUploadWriter(ctx context.Context, creds *pdcpauth.PDCPCredentials) (*UploadWriter, error) {
|
2023-11-18 16:25:37 +05:30
|
|
|
if creds == nil {
|
|
|
|
|
return nil, fmt.Errorf("no credentials provided")
|
|
|
|
|
}
|
2024-01-21 02:26:16 +05:30
|
|
|
u := &UploadWriter{
|
2024-08-16 13:27:26 +05:30
|
|
|
creds: creds,
|
|
|
|
|
done: make(chan struct{}, 1),
|
|
|
|
|
TeamID: NoneTeamID,
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
var err error
|
2024-01-21 02:26:16 +05:30
|
|
|
reader, writer := io.Pipe()
|
|
|
|
|
// create standard writer
|
2023-11-18 16:25:37 +05:30
|
|
|
u.StandardWriter, err = output.NewWriter(
|
2024-01-21 02:26:16 +05:30
|
|
|
output.WithWriter(writer),
|
2023-11-30 22:00:28 +05:30
|
|
|
output.WithJson(true, true),
|
2023-11-18 16:25:37 +05:30
|
|
|
)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, errorutil.NewWithErr(err).Msgf("could not create output writer")
|
|
|
|
|
}
|
|
|
|
|
tmp, err := urlutil.Parse(creds.Server)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, errorutil.NewWithErr(err).Msgf("could not parse server url")
|
|
|
|
|
}
|
|
|
|
|
tmp.Path = uploadEndpoint
|
|
|
|
|
tmp.Update()
|
|
|
|
|
u.uploadURL = tmp.URL
|
2024-01-21 02:26:16 +05:30
|
|
|
|
|
|
|
|
// create http client
|
|
|
|
|
opts := retryablehttp.DefaultOptionsSingle
|
|
|
|
|
opts.NoAdjustTimeout = true
|
|
|
|
|
opts.Timeout = time.Duration(3) * time.Minute
|
|
|
|
|
u.client = retryablehttp.NewClient(opts)
|
|
|
|
|
|
|
|
|
|
// create context
|
|
|
|
|
ctx, u.cancel = context.WithCancel(ctx)
|
|
|
|
|
// start auto commit
|
|
|
|
|
// upload every 1 minute or when buffer is full
|
|
|
|
|
go u.autoCommit(ctx, reader)
|
2023-11-18 16:25:37 +05:30
|
|
|
return u, nil
|
|
|
|
|
}
|
|
|
|
|
|
2024-01-21 02:26:16 +05:30
|
|
|
// SetScanID sets the scan id for the upload writer
|
2024-05-11 00:44:14 +05:30
|
|
|
func (u *UploadWriter) SetScanID(id string) error {
|
|
|
|
|
if !xidRegex.MatchString(id) {
|
|
|
|
|
return fmt.Errorf("invalid scan id provided")
|
|
|
|
|
}
|
2024-01-21 02:26:16 +05:30
|
|
|
u.scanID = id
|
2024-05-11 00:44:14 +05:30
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// SetScanName sets the scan name for the upload writer
|
|
|
|
|
func (u *UploadWriter) SetScanName(name string) {
|
|
|
|
|
u.scanName = name
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
|
2024-08-16 13:27:26 +05:30
|
|
|
func (u *UploadWriter) SetTeamID(id string) {
|
|
|
|
|
if id == "" {
|
|
|
|
|
u.TeamID = NoneTeamID
|
|
|
|
|
} else {
|
|
|
|
|
u.TeamID = id
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-01-21 02:26:16 +05:30
|
|
|
func (u *UploadWriter) autoCommit(ctx context.Context, r *io.PipeReader) {
|
|
|
|
|
reader := bufio.NewReader(r)
|
|
|
|
|
ch := make(chan string, 4)
|
|
|
|
|
|
|
|
|
|
// continuously read from the reader and send to channel
|
|
|
|
|
go func() {
|
|
|
|
|
defer r.Close()
|
|
|
|
|
defer close(ch)
|
|
|
|
|
for {
|
|
|
|
|
data, err := reader.ReadString('\n')
|
|
|
|
|
if err != nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
u.counter.Add(1)
|
|
|
|
|
ch <- data
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
// wait for context to be done
|
|
|
|
|
defer func() {
|
|
|
|
|
u.done <- struct{}{}
|
|
|
|
|
close(u.done)
|
|
|
|
|
// if no scanid is generated no results were uploaded
|
|
|
|
|
if u.scanID == "" {
|
|
|
|
|
gologger.Verbose().Msgf("Scan results upload to cloud skipped, no results found to upload")
|
|
|
|
|
} else {
|
2024-08-16 13:27:26 +05:30
|
|
|
gologger.Info().Msgf("%v Scan results uploaded to cloud, you can view scan results at %v", u.counter.Load(), getScanDashBoardURL(u.scanID, u.TeamID))
|
2024-01-21 02:26:16 +05:30
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
// temporary buffer to store the results
|
|
|
|
|
buff := &bytes.Buffer{}
|
|
|
|
|
ticker := time.NewTicker(flushTimer)
|
2024-07-23 09:25:46 +08:00
|
|
|
defer ticker.Stop()
|
2024-01-21 02:26:16 +05:30
|
|
|
for {
|
|
|
|
|
select {
|
|
|
|
|
case <-ctx.Done():
|
|
|
|
|
// flush before exit
|
|
|
|
|
if buff.Len() > 0 {
|
|
|
|
|
if err := u.uploadChunk(buff); err != nil {
|
|
|
|
|
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return
|
|
|
|
|
case <-ticker.C:
|
|
|
|
|
// flush the buffer
|
|
|
|
|
if buff.Len() > 0 {
|
|
|
|
|
if err := u.uploadChunk(buff); err != nil {
|
|
|
|
|
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
case line, ok := <-ch:
|
|
|
|
|
if !ok {
|
|
|
|
|
if buff.Len() > 0 {
|
|
|
|
|
if err := u.uploadChunk(buff); err != nil {
|
|
|
|
|
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if buff.Len()+len(line) > MaxChunkSize {
|
|
|
|
|
// flush existing buffer
|
|
|
|
|
if err := u.uploadChunk(buff); err != nil {
|
|
|
|
|
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
buff.WriteString(line)
|
|
|
|
|
}
|
|
|
|
|
}
|
2023-12-08 11:21:01 +05:30
|
|
|
}
|
2024-01-21 02:26:16 +05:30
|
|
|
}
|
2023-11-18 16:25:37 +05:30
|
|
|
|
2024-01-21 02:26:16 +05:30
|
|
|
// uploadChunk uploads a chunk of data to the server
|
|
|
|
|
func (u *UploadWriter) uploadChunk(buff *bytes.Buffer) error {
|
|
|
|
|
if err := u.upload(buff.Bytes()); err != nil {
|
|
|
|
|
return errorutil.NewWithErr(err).Msgf("could not upload chunk")
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
2024-01-21 02:26:16 +05:30
|
|
|
// if successful, reset the buffer
|
|
|
|
|
buff.Reset()
|
|
|
|
|
// log in verbose mode
|
2024-08-16 13:27:26 +05:30
|
|
|
gologger.Warning().Msgf("Uploaded results chunk, you can view scan results at %v", getScanDashBoardURL(u.scanID, u.TeamID))
|
2024-01-21 02:26:16 +05:30
|
|
|
return nil
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
|
2024-01-21 02:26:16 +05:30
|
|
|
func (u *UploadWriter) upload(data []byte) error {
|
|
|
|
|
req, err := u.getRequest(data)
|
2023-11-18 16:25:37 +05:30
|
|
|
if err != nil {
|
2024-01-21 02:26:16 +05:30
|
|
|
return errorutil.NewWithErr(err).Msgf("could not create upload request")
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
2024-01-21 02:26:16 +05:30
|
|
|
resp, err := u.client.Do(req)
|
2023-11-18 16:25:37 +05:30
|
|
|
if err != nil {
|
2024-01-21 02:26:16 +05:30
|
|
|
return errorutil.NewWithErr(err).Msgf("could not upload results")
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
bin, err := io.ReadAll(resp.Body)
|
|
|
|
|
if err != nil {
|
2024-01-21 02:26:16 +05:30
|
|
|
return errorutil.NewWithErr(err).Msgf("could not get id from response")
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
2024-01-21 02:26:16 +05:30
|
|
|
return fmt.Errorf("could not upload results got status code %v on %v", resp.StatusCode, resp.Request.URL.String())
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
var uploadResp uploadResponse
|
|
|
|
|
if err := json.Unmarshal(bin, &uploadResp); err != nil {
|
2024-01-21 02:26:16 +05:30
|
|
|
return errorutil.NewWithErr(err).Msgf("could not unmarshal response got %v", string(bin))
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
2024-01-21 02:26:16 +05:30
|
|
|
if uploadResp.ID != "" && u.scanID == "" {
|
|
|
|
|
u.scanID = uploadResp.ID
|
|
|
|
|
}
|
|
|
|
|
return nil
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
|
2024-01-21 02:26:16 +05:30
|
|
|
// getRequest returns a new request for upload
|
|
|
|
|
// if scanID is not provided create new scan by uploading the data
|
|
|
|
|
// if scanID is provided append the data to existing scan
|
|
|
|
|
func (u *UploadWriter) getRequest(bin []byte) (*retryablehttp.Request, error) {
|
|
|
|
|
var method, url string
|
|
|
|
|
|
|
|
|
|
if u.scanID == "" {
|
|
|
|
|
u.uploadURL.Path = uploadEndpoint
|
|
|
|
|
method = http.MethodPost
|
|
|
|
|
url = u.uploadURL.String()
|
|
|
|
|
} else {
|
|
|
|
|
u.uploadURL.Path = fmt.Sprintf(appendEndpoint, u.scanID)
|
|
|
|
|
method = http.MethodPatch
|
|
|
|
|
url = u.uploadURL.String()
|
|
|
|
|
}
|
|
|
|
|
req, err := retryablehttp.NewRequest(method, url, bytes.NewReader(bin))
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, errorutil.NewWithErr(err).Msgf("could not create cloud upload request")
|
|
|
|
|
}
|
2024-03-25 10:08:26 +05:30
|
|
|
// add pdtm meta params
|
2024-05-11 00:44:14 +05:30
|
|
|
req.URL.Params.Merge(updateutils.GetpdtmParams(config.Version))
|
|
|
|
|
// if it is upload endpoint also include name if it exists
|
|
|
|
|
if u.scanName != "" && req.URL.Path == uploadEndpoint {
|
|
|
|
|
req.URL.Params.Add("name", u.scanName)
|
|
|
|
|
}
|
|
|
|
|
req.URL.Update()
|
|
|
|
|
|
2024-01-21 02:26:16 +05:30
|
|
|
req.Header.Set(pdcpauth.ApiKeyHeaderName, u.creds.APIKey)
|
2024-08-16 13:27:26 +05:30
|
|
|
if u.TeamID != NoneTeamID && u.TeamID != "" {
|
|
|
|
|
req.Header.Set(teamIDHeader, u.TeamID)
|
2024-06-16 03:18:01 +05:30
|
|
|
}
|
2024-01-21 02:26:16 +05:30
|
|
|
req.Header.Set("Content-Type", "application/octet-stream")
|
|
|
|
|
req.Header.Set("Accept", "application/json")
|
|
|
|
|
return req, nil
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Close closes the upload writer
|
|
|
|
|
func (u *UploadWriter) Close() {
|
2024-01-21 02:26:16 +05:30
|
|
|
u.cancel()
|
|
|
|
|
<-u.done
|
|
|
|
|
u.StandardWriter.Close()
|
2023-11-18 16:25:37 +05:30
|
|
|
}
|