diff --git a/integration_tests/headless/headless-basic.yaml b/integration_tests/headless/headless-basic.yaml
new file mode 100644
index 000000000..cfc7dcb3c
--- /dev/null
+++ b/integration_tests/headless/headless-basic.yaml
@@ -0,0 +1,18 @@
+id: headless-basic
+info:
+ name: Headless Basic
+ author: pdteam
+ severity: info
+ tags: headless
+
+headless:
+ - steps:
+ - action: navigate
+ args:
+ url: "{{BaseURL}}/"
+
+ - action: waitload
+ matchers:
+ - type: word
+ words:
+ - ""
\ No newline at end of file
diff --git a/integration_tests/headless/headless-extract-values.yaml b/integration_tests/headless/headless-extract-values.yaml
new file mode 100644
index 000000000..e780ac32c
--- /dev/null
+++ b/integration_tests/headless/headless-extract-values.yaml
@@ -0,0 +1,31 @@
+
+id: headless-extract-values
+info:
+ name: Headless Extract Value
+ author: pdteam
+ severity: info
+ tags: headless
+
+headless:
+ - steps:
+ - action: navigate
+ args:
+ url: "{{BaseURL}}"
+ - action: waitload
+ # From headless/extract-urls.yaml
+ - action: script
+ name: extract
+ args:
+ code: |
+ '\n' + [...new Set(Array.from(document.querySelectorAll('[src], [href], [url], [action]')).map(i => i.src || i.href || i.url || i.action))].join('\r\n') + '\n'
+
+ matchers:
+ - type: word
+ words:
+ - "test.html"
+
+ extractors:
+ - type: kval
+ part: extract
+ kval:
+ - extract
\ No newline at end of file
diff --git a/integration_tests/headless/headless-header-action.yaml b/integration_tests/headless/headless-header-action.yaml
new file mode 100644
index 000000000..ca3c329d8
--- /dev/null
+++ b/integration_tests/headless/headless-header-action.yaml
@@ -0,0 +1,24 @@
+id: headless-header-action
+info:
+ name: Headless Header Action
+ author: pdteam
+ severity: info
+ tags: headless
+
+headless:
+ - steps:
+ - action: setheader
+ args:
+ part: request
+ key: Test
+ value: test value
+
+ - action: navigate
+ args:
+ url: "{{BaseURL}}/"
+
+ - action: waitload
+ matchers:
+ - type: word
+ words:
+ - "test value"
\ No newline at end of file
diff --git a/integration_tests/http/dsl-matcher-variable.yaml b/integration_tests/http/dsl-matcher-variable.yaml
new file mode 100644
index 000000000..ecbe5f9e5
--- /dev/null
+++ b/integration_tests/http/dsl-matcher-variable.yaml
@@ -0,0 +1,23 @@
+id: dsl-matcher-variable
+
+info:
+ name: dsl-matcher-variable
+ author: pd-team
+ severity: info
+
+requests:
+ -
+ path:
+ - "{{BaseURL}}"
+ payloads:
+ VALUES:
+ - This
+ - is
+ - test
+ - matcher
+ - text
+ matchers:
+ -
+ dsl:
+ - 'contains(body,"{{VALUES}}")'
+ type: dsl
\ No newline at end of file
diff --git a/integration_tests/http/get-case-insensitive.yaml b/integration_tests/http/get-case-insensitive.yaml
new file mode 100644
index 000000000..e8c4054b7
--- /dev/null
+++ b/integration_tests/http/get-case-insensitive.yaml
@@ -0,0 +1,16 @@
+id: basic-get-case-insensitive
+
+info:
+ name: Basic GET Request
+ author: pdteam
+ severity: info
+
+requests:
+ - method: GET
+ path:
+ - "{{BaseURL}}"
+ matchers:
+ - type: word
+ case-insensitive: true
+ words:
+ - "ThIS is TEsT MAtcHEr TExT"
diff --git a/integration_tests/http/get-redirects-chain-headers.yaml b/integration_tests/http/get-redirects-chain-headers.yaml
new file mode 100644
index 000000000..512073018
--- /dev/null
+++ b/integration_tests/http/get-redirects-chain-headers.yaml
@@ -0,0 +1,23 @@
+id: basic-get-redirects-chain-headers
+
+info:
+ name: Basic GET Redirects Request With Chain header
+ author: pdteam
+ severity: info
+
+requests:
+ - method: GET
+ path:
+ - "{{BaseURL}}"
+ redirects: true
+ max-redirects: 3
+ matchers-condition: and
+ matchers:
+ - type: word
+ part: header
+ words:
+ - "TestRedirectHeaderMatch"
+
+ - type: status
+ status:
+ - 302
\ No newline at end of file
diff --git a/integration_tests/http/interactsh.yaml b/integration_tests/http/interactsh.yaml
new file mode 100644
index 000000000..28d9c5606
--- /dev/null
+++ b/integration_tests/http/interactsh.yaml
@@ -0,0 +1,19 @@
+id: interactsh-integration-test
+
+info:
+ name: Interactsh Integration Test
+ author: pdteam
+ severity: info
+
+requests:
+ - method: GET
+ path:
+ - "{{BaseURL}}"
+ headers:
+ url: 'http://{{interactsh-url}}'
+
+ matchers:
+ - type: word
+ part: interactsh_protocol # Confirms the HTTP Interaction
+ words:
+ - "http"
\ No newline at end of file
diff --git a/integration_tests/http/raw-unsafe-request.yaml b/integration_tests/http/raw-unsafe-request.yaml
index 0a84b9157..e7c45c983 100644
--- a/integration_tests/http/raw-unsafe-request.yaml
+++ b/integration_tests/http/raw-unsafe-request.yaml
@@ -7,7 +7,7 @@ info:
requests:
- raw:
- - |
+ - |+
GET / HTTP/1.1
Host:
Content-Length: 4
diff --git a/integration_tests/loader/basic.yaml b/integration_tests/loader/basic.yaml
new file mode 100644
index 000000000..f49193b18
--- /dev/null
+++ b/integration_tests/loader/basic.yaml
@@ -0,0 +1,10 @@
+id: workflow-example
+
+info:
+ name: Test Workflow Template
+ author: pdteam
+ severity: info
+
+workflows:
+ - template: workflow/match-1.yaml
+ - template: workflow/match-2.yaml
\ No newline at end of file
diff --git a/integration_tests/loader/condition-matched.yaml b/integration_tests/loader/condition-matched.yaml
new file mode 100644
index 000000000..8b0a65732
--- /dev/null
+++ b/integration_tests/loader/condition-matched.yaml
@@ -0,0 +1,11 @@
+id: condition-matched-workflow
+
+info:
+ name: Condition Matched Workflow
+ author: pdteam
+ severity: info
+
+workflows:
+ - template: workflow/match-1.yaml
+ subtemplates:
+ - template: workflow/match-2.yaml
\ No newline at end of file
diff --git a/integration_tests/loader/get-headers.yaml b/integration_tests/loader/get-headers.yaml
new file mode 100644
index 000000000..bae367052
--- /dev/null
+++ b/integration_tests/loader/get-headers.yaml
@@ -0,0 +1,17 @@
+id: basic-get-headers
+
+info:
+ name: Basic GET Headers Request
+ author: pdteam
+ severity: info
+
+requests:
+ - method: GET
+ path:
+ - "{{BaseURL}}"
+ headers:
+ test: nuclei
+ matchers:
+ - type: word
+ words:
+ - "This is test headers matcher text"
\ No newline at end of file
diff --git a/integration_tests/loader/get.yaml b/integration_tests/loader/get.yaml
new file mode 100644
index 000000000..c7e07e8cf
--- /dev/null
+++ b/integration_tests/loader/get.yaml
@@ -0,0 +1,15 @@
+id: basic-get
+
+info:
+ name: Basic GET Request
+ author: pdteam
+ severity: info
+
+requests:
+ - method: GET
+ path:
+ - "{{BaseURL}}"
+ matchers:
+ - type: word
+ words:
+ - "This is test matcher text"
\ No newline at end of file
diff --git a/integration_tests/loader/template-list.yaml b/integration_tests/loader/template-list.yaml
new file mode 100644
index 000000000..fae00d6ce
--- /dev/null
+++ b/integration_tests/loader/template-list.yaml
@@ -0,0 +1,2 @@
+loader/get.yaml
+loader/get-headers.yaml
diff --git a/integration_tests/loader/workflow-list.yaml b/integration_tests/loader/workflow-list.yaml
new file mode 100644
index 000000000..3f56730e7
--- /dev/null
+++ b/integration_tests/loader/workflow-list.yaml
@@ -0,0 +1,2 @@
+loader/basic.yaml
+loader/condition-matched.yaml
diff --git a/integration_tests/test-issue-tracker-config1.yaml b/integration_tests/test-issue-tracker-config1.yaml
index b7c1f73ce..2f8b587f7 100644
--- a/integration_tests/test-issue-tracker-config1.yaml
+++ b/integration_tests/test-issue-tracker-config1.yaml
@@ -3,35 +3,35 @@ allow-list:
deny-list:
severity: low
-# github contains configuration options for github issue tracker
+# GitHub contains configuration options for GitHub issue tracker
github:
- # base-url is the optional self-hosted github application url
+ # base-url is the optional self-hosted GitHub application url
base-url: https://localhost:8443/github
- # username is the username of the github user
+ # username is the username of the GitHub user
username: test-username
- # owner is the owner name of the repository for issues.
+ # owner is the owner name of the repository for issues
owner: test-owner
- # token is the token for github account.
+ # token is the token for GitHub account
token: test-token
- # project-name is the name of the repository.
+ # project-name is the name of the repository
project-name: test-project
# issue-label is the label of the created issue type
issue-label: bug
-# gitlab contains configuration options for gitlab issue tracker
+# GitLab contains configuration options for gitlab issue tracker
gitlab:
- # base-url is the optional self-hosted gitlab application url
+ # base-url is the optional self-hosted GitLab application url
base-url: https://localhost:8443/gitlab
- # username is the username of the gitlab user
+ # username is the username of the GitLab user
username: test-username
- # token is the token for gitlab account.
+ # token is the token for GitLab account
token: test-token
- # project-id is the ID of the repository.
- project-id: 1234
+ # project-name is the name/id of the project(repository)
+ project-name: "1234"
# issue-label is the label of the created issue type
issue-label: bug
-# jira contains configuration options for jira issue tracker
+# Jira contains configuration options for Jira issue tracker
jira:
# cloud is the boolean which tells if Jira instance is running in the cloud or on-prem version is used
cloud: true
@@ -39,11 +39,11 @@ jira:
update-existing: false
# URL is the jira application url
url: https://localhost/jira
- # account-id is the account-id of the jira user or username in case of on-prem Jira
+ # account-id is the account-id of the Jira user or username in case of on-prem Jira
account-id: test-account-id
- # email is the email of the user for jira instance
+ # email is the email of the user for Jira instance
email: test@test.com
- # token is the token for jira instance or password in case of on-prem Jira
+ # token is the token for Jira instance or password in case of on-prem Jira
token: test-token
# project-name is the name of the project.
project-name: test-project-name
diff --git a/integration_tests/test-issue-tracker-config2.yaml b/integration_tests/test-issue-tracker-config2.yaml
index eeb6eaa37..f548dbfbd 100644
--- a/integration_tests/test-issue-tracker-config2.yaml
+++ b/integration_tests/test-issue-tracker-config2.yaml
@@ -5,47 +5,47 @@ allow-list:
deny-list:
severity: low
-# github contains configuration options for github issue tracker
-github:
- # base-url is the optional self-hosted github application url
- base-url: https://localhost:8443/github
- # username is the username of the github user
+# GitHub contains configuration options for GitHub issue tracker
+GitHub:
+ # base-url is the optional self-hosted GitHub application url
+ base-url: https://localhost:8443/GitHub
+ # username is the username of the GitHub user
username: test-username
# owner is the owner name of the repository for issues.
owner: test-owner
- # token is the token for github account.
+ # token is the token for GitHub account.
token: test-token
# project-name is the name of the repository.
project-name: test-project
# issue-label is the label of the created issue type
issue-label: bug
-# gitlab contains configuration options for gitlab issue tracker
-gitlab:
- # base-url is the optional self-hosted gitlab application url
- base-url: https://localhost:8443/gitlab
- # username is the username of the gitlab user
+# GitLab contains configuration options for GitLab issue tracker
+GitLab:
+ # base-url is the optional self-hosted GitLab application url
+ base-url: https://localhost:8443/GitLab
+ # username is the username of the GitLab user
username: test-username
- # token is the token for gitlab account.
+ # token is the token for GitLab account.
token: test-token
- # project-id is the ID of the repository.
- project-id: 1234
+ # project-name is the name/id of the project(repository).
+ project-name: "1234"
# issue-label is the label of the created issue type
issue-label: bug
-# jira contains configuration options for jira issue tracker
-jira:
+# Jira contains configuration options for Jira issue tracker
+Jira:
# cloud is the boolean which tells if Jira instance is running in the cloud or on-prem version is used
cloud: true
# update-existing is the boolean which tells if the existing, opened issue should be updated or new one should be created
update-existing: false
- # URL is the jira application url
- url: https://localhost/jira
- # account-id is the account-id of the jira user or username in case of on-prem Jira
+ # URL is the Jira application url
+ url: https://localhost/Jira
+ # account-id is the account-id of the Jira user or username in case of on-prem Jira
account-id: test-account-id
- # email is the email of the user for jira instance
+ # email is the email of the user for Jira instance
email: test@test.com
- # token is the token for jira instance or password in case of on-prem Jira
+ # token is the token for Jira instance or password in case of on-prem Jira
token: test-token
# project-name is the name of the project.
project-name: test-project-name
diff --git a/integration_tests/websocket/basic.yaml b/integration_tests/websocket/basic.yaml
new file mode 100644
index 000000000..c09378301
--- /dev/null
+++ b/integration_tests/websocket/basic.yaml
@@ -0,0 +1,16 @@
+id: basic-request
+
+info:
+ name: Basic Request
+ author: pdteam
+ severity: info
+
+websocket:
+ - address: '{{Scheme}}://{{Hostname}}'
+ inputs:
+ - data: hello
+ matchers:
+ - type: word
+ words:
+ - world
+ part: response
\ No newline at end of file
diff --git a/integration_tests/websocket/cswsh.yaml b/integration_tests/websocket/cswsh.yaml
new file mode 100644
index 000000000..80f75d7ac
--- /dev/null
+++ b/integration_tests/websocket/cswsh.yaml
@@ -0,0 +1,16 @@
+id: basic-cswsh-request
+
+info:
+ name: Basic cswsh Request
+ author: pdteam
+ severity: info
+
+websocket:
+ - address: '{{Scheme}}://{{Hostname}}'
+ headers:
+ Origin: 'http://evil.com'
+ matchers:
+ - type: word
+ words:
+ - true
+ part: success
\ No newline at end of file
diff --git a/integration_tests/websocket/no-cswsh.yaml b/integration_tests/websocket/no-cswsh.yaml
new file mode 100644
index 000000000..6833d804e
--- /dev/null
+++ b/integration_tests/websocket/no-cswsh.yaml
@@ -0,0 +1,16 @@
+id: basic-nocswsh-request
+
+info:
+ name: Basic Non-Vulnerable cswsh Request
+ author: pdteam
+ severity: info
+
+websocket:
+ - address: '{{Scheme}}://{{Hostname}}'
+ headers:
+ Origin: 'http://evil.com'
+ matchers:
+ - type: word
+ words:
+ - true
+ part: success
\ No newline at end of file
diff --git a/integration_tests/websocket/path.yaml b/integration_tests/websocket/path.yaml
new file mode 100644
index 000000000..d3607b30a
--- /dev/null
+++ b/integration_tests/websocket/path.yaml
@@ -0,0 +1,16 @@
+id: basic-request-path
+
+info:
+ name: Basic Request Path
+ author: pdteam
+ severity: info
+
+websocket:
+ - address: '{{Scheme}}://{{Hostname}}'
+ inputs:
+ - data: hello
+ matchers:
+ - type: word
+ words:
+ - world
+ part: response
\ No newline at end of file
diff --git a/nuclei-jsonschema.json b/nuclei-jsonschema.json
index 56253b7ed..156090895 100755
--- a/nuclei-jsonschema.json
+++ b/nuclei-jsonschema.json
@@ -130,15 +130,8 @@
"description": "Name of the extractor"
},
"type": {
- "enum": [
- "regex",
- "kval",
- "json",
- "xpath"
- ],
- "type": "string",
- "title": "type of the extractor",
- "description": "Type of the extractor"
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/extractors.ExtractorTypeHolder"
},
"regex": {
"items": {
@@ -191,26 +184,35 @@
"type": "boolean",
"title": "mark extracted value for internal variable use",
"description": "Internal when set to true will allow using the value extracted in the next request for some protocols"
+ },
+ "case-insensitive": {
+ "type": "boolean",
+ "title": "use case insensitive extract",
+ "description": "use case insensitive extract"
}
},
"additionalProperties": false,
"type": "object"
},
+ "extractors.ExtractorTypeHolder": {
+ "enum": [
+ "regex",
+ "kval",
+ "xpath",
+ "json"
+ ],
+ "type": "string",
+ "title": "type of the extractor",
+ "description": "Type of the extractor"
+ },
"matchers.Matcher": {
"required": [
"type"
],
"properties": {
"type": {
- "enum": [
- "status",
- "size",
- "word",
- "regex",
- "binary",
- "dsl"
- ],
- "type": "string",
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/matchers.MatcherTypeHolder",
"title": "type of matcher",
"description": "Type of the matcher"
},
@@ -293,11 +295,55 @@
"type": "string",
"title": "encoding for word field",
"description": "Optional encoding for the word fields"
+ },
+ "case-insensitive": {
+ "type": "boolean",
+ "title": "use case insensitive match",
+ "description": "use case insensitive match"
}
},
"additionalProperties": false,
"type": "object"
},
+ "matchers.MatcherTypeHolder": {
+ "enum": [
+ "word",
+ "regex",
+ "binary",
+ "status",
+ "size",
+ "dsl"
+ ],
+ "type": "string",
+ "title": "type of the matcher",
+ "description": "Type of the matcher,enum=status,enum=size,enum=word,enum=regex,enum=binary,enum=dsl"
+ },
+ "generators.AttackTypeHolder": {
+ "enum": [
+ "batteringram",
+ "pitchfork",
+ "clusterbomb"
+ ],
+ "type": "string",
+ "title": "type of the attack",
+ "description": "Type of the attack"
+ },
+ "dns.DNSRequestTypeHolder": {
+ "enum": [
+ "A",
+ "NS",
+ "DS",
+ "CNAME",
+ "SOA",
+ "PTR",
+ "MX",
+ "TXT",
+ "AAAA"
+ ],
+ "type": "string",
+ "title": "type of DNS request to make",
+ "description": "Type is the type of DNS request to make,enum=A,enum=NS,enum=DS,enum=CNAME,enum=SOA,enum=PTR,enum=MX,enum=TXT,enum=AAAA"
+ },
"dns.Request": {
"properties": {
"matchers": {
@@ -336,18 +382,8 @@
"description": "Name is the Hostname to make DNS request for"
},
"type": {
- "enum": [
- "A",
- "NS",
- "DS",
- "CNAME",
- "SOA",
- "PTR",
- "MX",
- "TXT",
- "AAAA"
- ],
- "type": "string",
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/dns.DNSRequestTypeHolder",
"title": "type of dns request to make",
"description": "Type is the type of DNS request to make"
},
@@ -369,6 +405,16 @@
"title": "retries for dns request",
"description": "Retries is the number of retries for the DNS request"
},
+ "trace": {
+ "type": "boolean",
+ "title": "trace operation",
+ "description": "Trace performs a trace operation for the target."
+ },
+ "trace-max-recursion": {
+ "type": "integer",
+ "title": "trace-max-recursion level for dns request",
+ "description": "TraceMaxRecursion is the number of max recursion allowed for trace operations"
+ },
"recursion": {
"type": "boolean",
"title": "recurse all servers",
@@ -519,30 +565,8 @@
"description": "Description of the headless action"
},
"action": {
- "enum": [
- "navigate",
- "script",
- "click",
- "rightclick",
- "text",
- "screenshot",
- "time",
- "select",
- "files",
- "waitload",
- "getresource",
- "extract",
- "setmethod",
- "addheader",
- "setheader",
- "deleteheader",
- "setbody",
- "waitevent",
- "keyboard",
- "debug",
- "sleep"
- ],
- "type": "string",
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/engine.ActionTypeHolder",
"title": "action to perform",
"description": "Type of actions to perform"
}
@@ -550,6 +574,52 @@
"additionalProperties": false,
"type": "object"
},
+ "engine.ActionTypeHolder": {
+ "enum": [
+ "navigate",
+ "script",
+ "click",
+ "rightclick",
+ "text",
+ "screenshot",
+ "time",
+ "select",
+ "files",
+ "waitload",
+ "getresource",
+ "extract",
+ "set-method",
+ "addheader",
+ "setheader",
+ "deleteheader",
+ "setbody",
+ "waitevent",
+ "keyboard",
+ "debug",
+ "sleep",
+ "waitvisible"
+ ],
+ "type": "string",
+ "title": "action to perform",
+ "description": "Type of actions to perform,enum=navigate,enum=script,enum=click,enum=rightclick,enum=text,enum=screenshot,enum=time,enum=select,enum=files,enum=waitload,enum=getresource,enum=extract,enum=setmethod,enum=addheader,enum=setheader,enum=deleteheader,enum=setbody,enum=waitevent,enum=keyboard,enum=debug,enum=sleep"
+ },
+ "http.HTTPMethodTypeHolder": {
+ "enum": [
+ "GET",
+ "HEAD",
+ "POST",
+ "PUT",
+ "DELETE",
+ "CONNECT",
+ "OPTIONS",
+ "TRACE",
+ "PATCH",
+ "PURGE"
+ ],
+ "type": "string",
+ "title": "method is the HTTP request method",
+ "description": "Method is the HTTP Request Method,enum=GET,enum=HEAD,enum=POST,enum=PUT,enum=DELETE,enum=CONNECT,enum=OPTIONS,enum=TRACE,enum=PATCH,enum=PURGE"
+ },
"http.Request": {
"properties": {
"matchers": {
@@ -605,29 +675,14 @@
"description": "Optional name for the HTTP Request"
},
"attack": {
- "enum": [
- "batteringram",
- "pitchfork",
- "clusterbomb"
- ],
- "type": "string",
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/generators.AttackTypeHolder",
"title": "attack is the payload combination",
"description": "Attack is the type of payload combinations to perform"
},
"method": {
- "enum": [
- "GET",
- "HEAD",
- "POST",
- "PUT",
- "DELETE",
- "CONNECT",
- "OPTIONS",
- "TRACE",
- "PATCH",
- "PURGE"
- ],
- "type": "string",
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/http.HTTPMethodTypeHolder",
"title": "method is the http request method",
"description": "Method is the HTTP Request Method"
},
@@ -725,6 +780,11 @@
"type": "boolean",
"title": "skip variable checks",
"description": "Skips the check for unresolved variables in request"
+ },
+ "iterate-all": {
+ "type": "boolean",
+ "title": "iterate all the values",
+ "description": "Iterates all the values extracted from internal extractors"
}
},
"additionalProperties": false,
@@ -738,11 +798,8 @@
"description": "Data is the data to send as the input"
},
"type": {
- "enum": [
- "hex",
- "text"
- ],
- "type": "string",
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/network.NetworkInputTypeHolder",
"title": "type is the type of input data",
"description": "Type of input specified in data field"
},
@@ -760,6 +817,15 @@
"additionalProperties": false,
"type": "object"
},
+ "network.NetworkInputTypeHolder": {
+ "enum": [
+ "hex",
+ "text"
+ ],
+ "type": "string",
+ "title": "type is the type of input data",
+ "description": "description=Type of input specified in data field,enum=hex,enum=text"
+ },
"network.Request": {
"properties": {
"id": {
@@ -776,12 +842,7 @@
"description": "Host to send network requests to"
},
"attack": {
- "enum": [
- "batteringram",
- "pitchfork",
- "clusterbomb"
- ],
- "type": "string",
+ "$ref": "#/definitions/generators.AttackTypeHolder",
"title": "attack is the payload combination",
"description": "Attack is the type of payload combinations to perform"
},
@@ -809,6 +870,11 @@
"title": "size of network response to read",
"description": "Size of response to read at the end. Default is 1024 bytes"
},
+ "read-all": {
+ "type": "boolean",
+ "title": "read all response stream",
+ "description": "Read all response stream till the server stops sending"
+ },
"matchers": {
"items": {
"$ref": "#/definitions/matchers.Matcher"
@@ -838,6 +904,128 @@
"additionalProperties": false,
"type": "object"
},
+ "ssl.Request": {
+ "properties": {
+ "matchers": {
+ "items": {
+ "$ref": "#/definitions/matchers.Matcher"
+ },
+ "type": "array",
+ "title": "matchers to run on response",
+ "description": "Detection mechanism to identify whether the request was successful by doing pattern matching"
+ },
+ "extractors": {
+ "items": {
+ "$ref": "#/definitions/extractors.Extractor"
+ },
+ "type": "array",
+ "title": "extractors to run on response",
+ "description": "Extractors contains the extraction mechanism for the request to identify and extract parts of the response"
+ },
+ "matchers-condition": {
+ "enum": [
+ "and",
+ "or"
+ ],
+ "type": "string",
+ "title": "condition between the matchers",
+ "description": "Conditions between the matchers"
+ },
+ "address": {
+ "type": "string",
+ "title": "address for the ssl request",
+ "description": "Address contains address for the request"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ },
+ "websocket.Input": {
+ "properties": {
+ "data": {
+ "type": "string",
+ "title": "data to send as input",
+ "description": "Data is the data to send as the input"
+ },
+ "name": {
+ "type": "string",
+ "title": "optional name for data read",
+ "description": "Optional name of the data read to provide matching on"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ },
+ "websocket.Request": {
+ "properties": {
+ "matchers": {
+ "items": {
+ "$ref": "#/definitions/matchers.Matcher"
+ },
+ "type": "array",
+ "title": "matchers to run on response",
+ "description": "Detection mechanism to identify whether the request was successful by doing pattern matching"
+ },
+ "extractors": {
+ "items": {
+ "$ref": "#/definitions/extractors.Extractor"
+ },
+ "type": "array",
+ "title": "extractors to run on response",
+ "description": "Extractors contains the extraction mechanism for the request to identify and extract parts of the response"
+ },
+ "matchers-condition": {
+ "enum": [
+ "and",
+ "or"
+ ],
+ "type": "string",
+ "title": "condition between the matchers",
+ "description": "Conditions between the matchers"
+ },
+ "address": {
+ "type": "string",
+ "title": "address for the websocket request",
+ "description": "Address contains address for the request"
+ },
+ "inputs": {
+ "items": {
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/websocket.Input"
+ },
+ "type": "array",
+ "title": "inputs for the websocket request",
+ "description": "Inputs contains any input/output for the current request"
+ },
+ "headers": {
+ "patternProperties": {
+ ".*": {
+ "type": "string"
+ }
+ },
+ "type": "object",
+ "title": "headers contains the request headers",
+ "description": "Headers contains headers for the request"
+ },
+ "attack": {
+ "$ref": "#/definitions/generators.AttackTypeHolder",
+ "title": "attack is the payload combination",
+ "description": "Attack is the type of payload combinations to perform"
+ },
+ "payloads": {
+ "patternProperties": {
+ ".*": {
+ "additionalProperties": true
+ }
+ },
+ "type": "object",
+ "title": "payloads for the webosocket request",
+ "description": "Payloads contains any payloads for the current request"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ },
"templates.Template": {
"required": [
"id",
@@ -845,6 +1033,7 @@
],
"properties": {
"id": {
+ "pattern": "^([a-zA-Z0-9]+[-_])*[a-zA-Z0-9]+$",
"type": "string",
"title": "id of the template",
"description": "The Unique ID for the template",
@@ -903,6 +1092,24 @@
"title": "headless requests to make",
"description": "Headless requests to make for the template"
},
+ "ssl": {
+ "items": {
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/ssl.Request"
+ },
+ "type": "array",
+ "title": "ssl requests to make",
+ "description": "SSL requests to make for the template"
+ },
+ "websocket": {
+ "items": {
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "$ref": "#/definitions/websocket.Request"
+ },
+ "type": "array",
+ "title": "websocket requests to make",
+ "description": "Websocket requests to make for the template"
+ },
"workflows": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
@@ -916,6 +1123,11 @@
"type": "boolean",
"title": "mark requests as self-contained",
"description": "Mark Requests for the template as self-contained"
+ },
+ "stop-at-first-match": {
+ "type": "boolean",
+ "title": "stop at first match",
+ "description": "Stop at first match for the template"
}
},
"additionalProperties": false,
diff --git a/v2/Makefile b/v2/Makefile
index a24c1194f..cfcd3847f 100644
--- a/v2/Makefile
+++ b/v2/Makefile
@@ -18,5 +18,9 @@ docs:
./cmd/docgen/docgen docs.md nuclei-jsonschema.json
test:
$(GOTEST) -v ./...
+integration:
+ bash ../integration_tests/run.sh
+functional:
+ bash cmd/functional-tests/run.sh
tidy:
$(GOMOD) tidy
\ No newline at end of file
diff --git a/v2/cmd/cve-annotate/main.go b/v2/cmd/cve-annotate/main.go
index 4786ad245..c2ccaa765 100644
--- a/v2/cmd/cve-annotate/main.go
+++ b/v2/cmd/cve-annotate/main.go
@@ -10,6 +10,7 @@ import (
"strings"
"github.com/Ice3man543/nvd"
+
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
)
@@ -116,26 +117,26 @@ func getCVEData(client *nvd.Client, filePath, data string) {
}
if !strings.Contains(infoBlockClean, "classification") && (cvssScore != 0 && cvssMetrics != "") {
changed = true
- newInfoBlock = newInfoBlock + fmt.Sprintf("\n classification:\n cvss-metrics: %s\n cvss-score: %.2f\n cve-id: %s", cvssMetrics, cvssScore, cveName)
+ newInfoBlock += fmt.Sprintf("\n classification:\n cvss-metrics: %s\n cvss-score: %.2f\n cve-id: %s", cvssMetrics, cvssScore, cveName)
if len(cweID) > 0 && (cweID[0] != "NVD-CWE-Other" && cweID[0] != "NVD-CWE-noinfo") {
- newInfoBlock = newInfoBlock + fmt.Sprintf("\n cwe-id: %s", strings.Join(cweID, ","))
+ newInfoBlock += fmt.Sprintf("\n cwe-id: %s", strings.Join(cweID, ","))
}
}
// If there is no description field, fill the description from CVE information
if !strings.Contains(infoBlockClean, "description:") && len(cveItem.CVE.Description.DescriptionData) > 0 {
changed = true
- newInfoBlock = newInfoBlock + fmt.Sprintf("\n description: %s", fmt.Sprintf("%q", cveItem.CVE.Description.DescriptionData[0].Value))
+ newInfoBlock += fmt.Sprintf("\n description: %s", fmt.Sprintf("%q", cveItem.CVE.Description.DescriptionData[0].Value))
}
if !strings.Contains(infoBlockClean, "reference:") && len(cveItem.CVE.References.ReferenceData) > 0 {
changed = true
- newInfoBlock = newInfoBlock + "\n reference:"
+ newInfoBlock += "\n reference:"
for _, reference := range cveItem.CVE.References.ReferenceData {
- newInfoBlock = newInfoBlock + fmt.Sprintf("\n - %s", reference.URL)
+ newInfoBlock += fmt.Sprintf("\n - %s", reference.URL)
}
}
newTemplate := strings.ReplaceAll(data, infoBlockClean, newInfoBlock)
if changed {
- _ = ioutil.WriteFile(filePath, []byte(newTemplate), 0777)
+ _ = ioutil.WriteFile(filePath, []byte(newTemplate), 0644)
fmt.Printf("Wrote updated template to %s\n", filePath)
}
}
diff --git a/v2/cmd/docgen/docgen.go b/v2/cmd/docgen/docgen.go
index 5cd359c94..7cfdaf275 100644
--- a/v2/cmd/docgen/docgen.go
+++ b/v2/cmd/docgen/docgen.go
@@ -10,10 +10,11 @@ import (
"strings"
"github.com/alecthomas/jsonschema"
+
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
)
-var pathRegex = regexp.MustCompile(`github.com/projectdiscovery/nuclei/v2/(?:internal|pkg)/(?:.*/)?([A-Za-z\.]+)`)
+var pathRegex = regexp.MustCompile(`github\.com/projectdiscovery/nuclei/v2/(?:internal|pkg)/(?:.*/)?([A-Za-z.]+)`)
func main() {
// Generate yaml syntax documentation
@@ -21,7 +22,7 @@ func main() {
if err != nil {
log.Fatalf("Could not encode docs: %s\n", err)
}
- err = ioutil.WriteFile(os.Args[1], data, 0777)
+ err = ioutil.WriteFile(os.Args[1], data, 0644)
if err != nil {
log.Fatalf("Could not write docs: %s\n", err)
}
@@ -43,7 +44,7 @@ func main() {
for _, match := range pathRegex.FindAllStringSubmatch(schema, -1) {
schema = strings.ReplaceAll(schema, match[0], match[1])
}
- err = ioutil.WriteFile(os.Args[2], []byte(schema), 0777)
+ err = ioutil.WriteFile(os.Args[2], []byte(schema), 0644)
if err != nil {
log.Fatalf("Could not write jsonschema: %s\n", err)
}
diff --git a/v2/cmd/functional-test/main.go b/v2/cmd/functional-test/main.go
index fc96fde80..7f05014c1 100644
--- a/v2/cmd/functional-test/main.go
+++ b/v2/cmd/functional-test/main.go
@@ -11,7 +11,7 @@ import (
"github.com/logrusorgru/aurora"
"github.com/pkg/errors"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
var (
diff --git a/v2/cmd/integration-test/dns.go b/v2/cmd/integration-test/dns.go
index 3e1ae8146..5bb2ed2c4 100644
--- a/v2/cmd/integration-test/dns.go
+++ b/v2/cmd/integration-test/dns.go
@@ -1,7 +1,7 @@
package main
import (
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
var dnsTestCases = map[string]testutils.TestCase{
diff --git a/v2/cmd/integration-test/headless.go b/v2/cmd/integration-test/headless.go
new file mode 100644
index 000000000..6039fdb64
--- /dev/null
+++ b/v2/cmd/integration-test/headless.go
@@ -0,0 +1,81 @@
+package main
+
+import (
+ "net/http"
+ "net/http/httptest"
+
+ "github.com/julienschmidt/httprouter"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
+)
+
+var headlessTestcases = map[string]testutils.TestCase{
+ "headless/headless-basic.yaml": &headlessBasic{},
+ "headless/headless-header-action.yaml": &headlessHeaderActions{},
+ "headless/headless-extract-values.yaml": &headlessExtractValues{},
+}
+
+type headlessBasic struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *headlessBasic) Execute(filePath string) error {
+ router := httprouter.New()
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ _, _ = w.Write([]byte(""))
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug, "-headless")
+ if err != nil {
+ return err
+ }
+ if len(results) != 1 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type headlessHeaderActions struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *headlessHeaderActions) Execute(filePath string) error {
+ router := httprouter.New()
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ testValue := r.Header.Get("test")
+ if r.Header.Get("test") != "" {
+ _, _ = w.Write([]byte("" + testValue + ""))
+ }
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug, "-headless")
+ if err != nil {
+ return err
+ }
+ if len(results) != 1 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type headlessExtractValues struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *headlessExtractValues) Execute(filePath string) error {
+ router := httprouter.New()
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ _, _ = w.Write([]byte("
test"))
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug, "-headless")
+ if err != nil {
+ return err
+ }
+ if len(results) != 3 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
diff --git a/v2/cmd/integration-test/http.go b/v2/cmd/integration-test/http.go
index c3e23a7a0..ebf245317 100644
--- a/v2/cmd/integration-test/http.go
+++ b/v2/cmd/integration-test/http.go
@@ -11,27 +11,58 @@ import (
"github.com/julienschmidt/httprouter"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
var httpTestcases = map[string]testutils.TestCase{
- "http/get-headers.yaml": &httpGetHeaders{},
- "http/get-query-string.yaml": &httpGetQueryString{},
- "http/get-redirects.yaml": &httpGetRedirects{},
- "http/get.yaml": &httpGet{},
- "http/post-body.yaml": &httpPostBody{},
- "http/post-json-body.yaml": &httpPostJSONBody{},
- "http/post-multipart-body.yaml": &httpPostMultipartBody{},
- "http/raw-cookie-reuse.yaml": &httpRawCookieReuse{},
- "http/raw-dynamic-extractor.yaml": &httpRawDynamicExtractor{},
- "http/raw-get-query.yaml": &httpRawGetQuery{},
- "http/raw-get.yaml": &httpRawGet{},
- "http/raw-payload.yaml": &httpRawPayload{},
- "http/raw-post-body.yaml": &httpRawPostBody{},
- "http/raw-unsafe-request.yaml": &httpRawUnsafeRequest{},
- "http/request-condition.yaml": &httpRequestCondition{},
- "http/request-condition-new.yaml": &httpRequestCondition{},
- "http/self-contained.yaml": &httpRequestSelContained{},
+ "http/get-headers.yaml": &httpGetHeaders{},
+ "http/get-query-string.yaml": &httpGetQueryString{},
+ "http/get-redirects.yaml": &httpGetRedirects{},
+ "http/get.yaml": &httpGet{},
+ "http/post-body.yaml": &httpPostBody{},
+ "http/post-json-body.yaml": &httpPostJSONBody{},
+ "http/post-multipart-body.yaml": &httpPostMultipartBody{},
+ "http/raw-cookie-reuse.yaml": &httpRawCookieReuse{},
+ "http/raw-dynamic-extractor.yaml": &httpRawDynamicExtractor{},
+ "http/raw-get-query.yaml": &httpRawGetQuery{},
+ "http/raw-get.yaml": &httpRawGet{},
+ "http/raw-payload.yaml": &httpRawPayload{},
+ "http/raw-post-body.yaml": &httpRawPostBody{},
+ "http/raw-unsafe-request.yaml": &httpRawUnsafeRequest{},
+ "http/request-condition.yaml": &httpRequestCondition{},
+ "http/request-condition-new.yaml": &httpRequestCondition{},
+ "http/interactsh.yaml": &httpInteractshRequest{},
+ "http/self-contained.yaml": &httpRequestSelContained{},
+ "http/get-case-insensitive.yaml": &httpGetCaseInsensitive{},
+ "http/get.yaml,http/get-case-insensitive.yaml": &httpGetCaseInsensitiveCluster{},
+ "http/get-redirects-chain-headers.yaml": &httpGetRedirectsChainHeaders{},
+ "http/dsl-matcher-variable.yaml": &httpDSLVariable{},
+}
+
+type httpInteractshRequest struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *httpInteractshRequest) Execute(filePath string) error {
+ router := httprouter.New()
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ value := r.Header.Get("url")
+ if value != "" {
+ if resp, _ := http.DefaultClient.Get(value); resp != nil {
+ resp.Body.Close()
+ }
+ }
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug)
+ if err != nil {
+ return err
+ }
+ if len(results) != 1 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
}
type httpGetHeaders struct{}
@@ -125,6 +156,27 @@ func (h *httpGet) Execute(filePath string) error {
return nil
}
+type httpDSLVariable struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *httpDSLVariable) Execute(filePath string) error {
+ router := httprouter.New()
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ fmt.Fprintf(w, "This is test matcher text")
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug)
+ if err != nil {
+ return err
+ }
+ if len(results) != 5 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
type httpPostBody struct{}
// Execute executes a test case and returns an error if occurred
@@ -526,3 +578,75 @@ func (h *httpRequestSelContained) Execute(filePath string) error {
}
return nil
}
+
+type httpGetCaseInsensitive struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *httpGetCaseInsensitive) Execute(filePath string) error {
+ router := httprouter.New()
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ fmt.Fprintf(w, "THIS IS TEST MATCHER TEXT")
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug)
+ if err != nil {
+ return err
+ }
+ if len(results) != 1 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type httpGetCaseInsensitiveCluster struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *httpGetCaseInsensitiveCluster) Execute(filesPath string) error {
+ router := httprouter.New()
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ fmt.Fprintf(w, "This is test matcher text")
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ files := strings.Split(filesPath, ",")
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(files[0], ts.URL, debug, "-t", files[1])
+ if err != nil {
+ return err
+ }
+ if len(results) != 2 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type httpGetRedirectsChainHeaders struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *httpGetRedirectsChainHeaders) Execute(filePath string) error {
+ router := httprouter.New()
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ http.Redirect(w, r, "/redirected", http.StatusFound)
+ })
+ router.GET("/redirected", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ w.Header().Set("Secret", "TestRedirectHeaderMatch")
+ http.Redirect(w, r, "/final", http.StatusFound)
+ })
+ router.GET("/final", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ _, _ = w.Write([]byte("ok"))
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, ts.URL, debug)
+ if err != nil {
+ return err
+ }
+ if len(results) != 1 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
diff --git a/v2/cmd/integration-test/integration-test.go b/v2/cmd/integration-test/integration-test.go
index baa743e8d..70bb193ac 100644
--- a/v2/cmd/integration-test/integration-test.go
+++ b/v2/cmd/integration-test/integration-test.go
@@ -6,7 +6,8 @@ import (
"strings"
"github.com/logrusorgru/aurora"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
var (
@@ -22,10 +23,13 @@ func main() {
failed := aurora.Red("[✘]").String()
protocolTests := map[string]map[string]testutils.TestCase{
- "http": httpTestcases,
- "network": networkTestcases,
- "dns": dnsTestCases,
- "workflow": workflowTestcases,
+ "http": httpTestcases,
+ "network": networkTestcases,
+ "dns": dnsTestCases,
+ "workflow": workflowTestcases,
+ "loader": loaderTestcases,
+ "websocket": websocketTestCases,
+ "headless": headlessTestcases,
}
for proto, tests := range protocolTests {
if protocol == "" || protocol == proto {
@@ -50,5 +54,5 @@ func main() {
}
func errIncorrectResultsCount(results []string) error {
- return fmt.Errorf("incorrect number of results %s", strings.Join(results, "\n\t"))
+ return fmt.Errorf("incorrect number of results \n\t%s", strings.Join(results, "\n\t"))
}
diff --git a/v2/cmd/integration-test/loader.go b/v2/cmd/integration-test/loader.go
new file mode 100644
index 000000000..3507b8a31
--- /dev/null
+++ b/v2/cmd/integration-test/loader.go
@@ -0,0 +1,124 @@
+package main
+
+import (
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strings"
+
+ "github.com/julienschmidt/httprouter"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
+)
+
+var loaderTestcases = map[string]testutils.TestCase{
+ "loader/template-list.yaml": &remoteTemplateList{},
+ "loader/workflow-list.yaml": &remoteWorkflowList{},
+ "loader/nonexistent-template-list.yaml": &nonExistentTemplateList{},
+ "loader/nonexistent-workflow-list.yaml": &nonExistentWorkflowList{},
+}
+
+type remoteTemplateList struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *remoteTemplateList) Execute(templateList string) error {
+ router := httprouter.New()
+
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ fmt.Fprintf(w, "This is test matcher text")
+ if strings.EqualFold(r.Header.Get("test"), "nuclei") {
+ fmt.Fprintf(w, "This is test headers matcher text")
+ }
+ })
+
+ router.GET("/template_list", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ file, err := os.ReadFile(templateList)
+ if err != nil {
+ w.WriteHeader(500)
+ }
+ _, err = w.Write(file)
+ if err != nil {
+ w.WriteHeader(500)
+ }
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiBareArgsAndGetResults(debug, "-target", ts.URL, "-tu", ts.URL+"/template_list")
+ if err != nil {
+ return err
+ }
+ if len(results) != 2 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type remoteWorkflowList struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *remoteWorkflowList) Execute(workflowList string) error {
+ router := httprouter.New()
+
+ router.GET("/", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ fmt.Fprintf(w, "This is test matcher text")
+ if strings.EqualFold(r.Header.Get("test"), "nuclei") {
+ fmt.Fprintf(w, "This is test headers matcher text")
+ }
+ })
+
+ router.GET("/workflow_list", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
+ file, err := os.ReadFile(workflowList)
+ if err != nil {
+ w.WriteHeader(500)
+ }
+ _, err = w.Write(file)
+ if err != nil {
+ w.WriteHeader(500)
+ }
+ })
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiBareArgsAndGetResults(debug, "-target", ts.URL, "-wu", ts.URL+"/workflow_list")
+ if err != nil {
+ return err
+ }
+ if len(results) != 3 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type nonExistentTemplateList struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *nonExistentTemplateList) Execute(nonExistingTemplateList string) error {
+ router := httprouter.New()
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ _, err := testutils.RunNucleiBareArgsAndGetResults(debug, "-target", ts.URL, "-tu", ts.URL+"/404")
+ if err == nil {
+ return fmt.Errorf("expected error for nonexisting workflow url")
+ }
+
+ return nil
+}
+
+type nonExistentWorkflowList struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *nonExistentWorkflowList) Execute(nonExistingWorkflowList string) error {
+ router := httprouter.New()
+ ts := httptest.NewServer(router)
+ defer ts.Close()
+
+ _, err := testutils.RunNucleiBareArgsAndGetResults(debug, "-target", ts.URL, "-wu", ts.URL+"/404")
+ if err == nil {
+ return fmt.Errorf("expected error for nonexisting workflow url")
+ }
+
+ return nil
+}
diff --git a/v2/cmd/integration-test/network.go b/v2/cmd/integration-test/network.go
index e170b07b9..ac34c5fd5 100644
--- a/v2/cmd/integration-test/network.go
+++ b/v2/cmd/integration-test/network.go
@@ -3,7 +3,7 @@ package main
import (
"net"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
var networkTestcases = map[string]testutils.TestCase{
diff --git a/v2/cmd/integration-test/websocket.go b/v2/cmd/integration-test/websocket.go
new file mode 100644
index 000000000..af6d07451
--- /dev/null
+++ b/v2/cmd/integration-test/websocket.go
@@ -0,0 +1,115 @@
+package main
+
+import (
+ "net"
+ "strings"
+
+ "github.com/gobwas/ws/wsutil"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
+)
+
+var websocketTestCases = map[string]testutils.TestCase{
+ "websocket/basic.yaml": &websocketBasic{},
+ "websocket/cswsh.yaml": &websocketCswsh{},
+ "websocket/no-cswsh.yaml": &websocketNoCswsh{},
+ "websocket/path.yaml": &websocketWithPath{},
+}
+
+type websocketBasic struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *websocketBasic) Execute(filePath string) error {
+ connHandler := func(conn net.Conn) {
+ for {
+ msg, op, _ := wsutil.ReadClientData(conn)
+ if string(msg) != "hello" {
+ return
+ }
+ _ = wsutil.WriteServerMessage(conn, op, []byte("world"))
+ }
+ }
+ originValidate := func(origin string) bool {
+ return true
+ }
+ ts := testutils.NewWebsocketServer("", connHandler, originValidate)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, strings.ReplaceAll(ts.URL, "http", "ws"), debug)
+ if err != nil {
+ return err
+ }
+ if len(results) != 1 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type websocketCswsh struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *websocketCswsh) Execute(filePath string) error {
+ connHandler := func(conn net.Conn) {
+
+ }
+ originValidate := func(origin string) bool {
+ return true
+ }
+ ts := testutils.NewWebsocketServer("", connHandler, originValidate)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, strings.ReplaceAll(ts.URL, "http", "ws"), debug)
+ if err != nil {
+ return err
+ }
+ if len(results) != 1 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type websocketNoCswsh struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *websocketNoCswsh) Execute(filePath string) error {
+ connHandler := func(conn net.Conn) {
+
+ }
+ originValidate := func(origin string) bool {
+ return origin == "https://google.com"
+ }
+ ts := testutils.NewWebsocketServer("", connHandler, originValidate)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, strings.ReplaceAll(ts.URL, "http", "ws"), debug)
+ if err != nil {
+ return err
+ }
+ if len(results) != 0 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
+
+type websocketWithPath struct{}
+
+// Execute executes a test case and returns an error if occurred
+func (h *websocketWithPath) Execute(filePath string) error {
+ connHandler := func(conn net.Conn) {
+
+ }
+ originValidate := func(origin string) bool {
+ return origin == "https://google.com"
+ }
+ ts := testutils.NewWebsocketServer("/test", connHandler, originValidate)
+ defer ts.Close()
+
+ results, err := testutils.RunNucleiTemplateAndGetResults(filePath, strings.ReplaceAll(ts.URL, "http", "ws"), debug)
+ if err != nil {
+ return err
+ }
+ if len(results) != 0 {
+ return errIncorrectResultsCount(results)
+ }
+ return nil
+}
diff --git a/v2/cmd/integration-test/workflow.go b/v2/cmd/integration-test/workflow.go
index 5f39b4ebf..31202d090 100644
--- a/v2/cmd/integration-test/workflow.go
+++ b/v2/cmd/integration-test/workflow.go
@@ -7,7 +7,7 @@ import (
"github.com/julienschmidt/httprouter"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
var workflowTestcases = map[string]testutils.TestCase{
diff --git a/v2/cmd/nuclei/issue-tracker-config.yaml b/v2/cmd/nuclei/issue-tracker-config.yaml
index 5364db5f5..508446243 100644
--- a/v2/cmd/nuclei/issue-tracker-config.yaml
+++ b/v2/cmd/nuclei/issue-tracker-config.yaml
@@ -5,51 +5,51 @@
#deny-list:
# severity: info, low, medium
-# github contains configuration options for github issue tracker
-#github:
-# # base-url (optional) is the self-hosted github application url
+# GitHub contains configuration options for GitHub issue tracker
+#GitHub:
+# # base-url (optional) is the self-hosted GitHub application url
# base-url: ""
-# # username is the username of the github user
+# # username is the username of the GitHub user
# username: ""
# # owner is the owner name of the repository for issues.
# owner: ""
-# # token is the token for github account.
+# # token is the token for GitHub account.
# token: ""
# # project-name is the name of the repository.
# project-name: ""
# # issue-label (optional) is the label of the created issue type
# issue-label: ""
-# # severity-as-label (optional) sets the sevetiry as the label of the created issue type
+# # severity-as-label (optional) sets the severity as the label of the created issue type
# severity-as-label: false
-# gitlab contains configuration options for gitlab issue tracker
-#gitlab:
-# # base-url (optional) is the self-hosted gitlab application url
+# GitLab contains configuration options for GitLab issue tracker
+#GitLab:
+# # base-url (optional) is the self-hosted GitLab application url
# base-url: ""
-# # username is the username of the gitlab user
+# # username is the username of the GitLab user
# username: ""
-# # token is the token for gitlab account.
+# # token is the token for GitLab account.
# token: ""
# # project-id is the ID of the repository.
# project-id: ""
# # issue-label (optional) is the label of the created issue type
# issue-label: ""
-# # severity-as-label (optional) sets the sevetiry as the label of the created issue type
+# # severity-as-label (optional) sets the severity as the label of the created issue type
# severity-as-label: false
-# jira contains configuration options for jira issue tracker
-#jira:
+# Jira contains configuration options for Jira issue tracker
+#Jira:
# # cloud (optional) is the boolean which tells if Jira instance is running in the cloud or on-prem version is used
# cloud: true
# # update-existing (optional) is the boolean which tells if the existing, opened issue should be updated or new one should be created
# update-existing: false
-# # URL is the jira application url
+# # URL is the Jira application URL
# url: ""
-# # account-id is the account-id of the jira user or username in case of on-prem Jira
+# # account-id is the account-id of the Jira user or username in case of on-prem Jira
# account-id: ""
-# # email is the email of the user for jira instance
+# # email is the email of the user for Jira instance
# email: ""
-# # token is the token for jira instance or password in case of on-prem Jira
+# # token is the token for Jira instance or password in case of on-prem Jira
# token: ""
# # project-name is the name of the project.
# project-name: ""
diff --git a/v2/cmd/nuclei/main.go b/v2/cmd/nuclei/main.go
index d2f8ebb9f..bd015fede 100644
--- a/v2/cmd/nuclei/main.go
+++ b/v2/cmd/nuclei/main.go
@@ -9,6 +9,7 @@ import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v2/internal/runner"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
+ templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
)
@@ -54,8 +55,10 @@ on extensive configurability, massive extensibility and ease of use.`)
createGroup(flagSet, "templates", "Templates",
flagSet.StringSliceVarP(&options.Templates, "templates", "t", []string{}, "template or template directory paths to include in the scan"),
+ flagSet.StringSliceVarP(&options.TemplateURLs, "template-url", "tu", []string{}, "URL containing list of templates to run"),
flagSet.BoolVarP(&options.NewTemplates, "new-templates", "nt", false, "run only new templates added in latest nuclei-templates release"),
flagSet.StringSliceVarP(&options.Workflows, "workflows", "w", []string{}, "workflow or workflow directory paths to include in the scan"),
+ flagSet.StringSliceVarP(&options.WorkflowURLs, "workflow-url", "wu", []string{}, "URL containing list of workflows to run"),
flagSet.BoolVar(&options.Validate, "validate", false, "validate the passed templates to nuclei"),
flagSet.BoolVar(&options.TemplateList, "tl", false, "list all available templates"),
)
@@ -68,7 +71,9 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.StringSliceVarP(&options.ExcludedTemplates, "exclude-templates", "et", []string{}, "template or template directory paths to exclude"),
flagSet.VarP(&options.Severities, "severity", "s", fmt.Sprintf("Templates to run based on severity. Possible values: %s", severity.GetSupportedSeverities().String())),
flagSet.VarP(&options.ExcludeSeverities, "exclude-severity", "es", fmt.Sprintf("Templates to exclude based on severity. Possible values: %s", severity.GetSupportedSeverities().String())),
- flagSet.NormalizedStringSliceVarP(&options.Author, "author", "a", []string{}, "execute templates that are (co-)created by the specified authors"),
+ flagSet.VarP(&options.Protocols, "type", "pt", fmt.Sprintf("protocol types to be executed. Possible values: %s", templateTypes.GetSupportedProtocolTypes())),
+ flagSet.VarP(&options.ExcludeProtocols, "exclude-type", "ept", fmt.Sprintf("protocol types to not be executed. Possible values: %s", templateTypes.GetSupportedProtocolTypes())),
+ flagSet.NormalizedStringSliceVarP(&options.Authors, "author", "a", []string{}, "execute templates that are (co-)created by the specified authors"),
)
createGroup(flagSet, "output", "Output",
@@ -80,6 +85,7 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.BoolVarP(&options.NoMeta, "no-meta", "nm", false, "don't display match metadata"),
flagSet.BoolVarP(&options.NoTimestamp, "no-timestamp", "nts", false, "don't display timestamp metadata in CLI output"),
flagSet.StringVarP(&options.ReportingDB, "report-db", "rdb", "", "local nuclei reporting database (always use this to persist report data)"),
+ flagSet.BoolVarP(&options.MatcherStatus, "matcher-status", "ms", false, "show optional match failure status"),
flagSet.StringVarP(&options.MarkdownExportDirectory, "markdown-export", "me", "", "directory to export results in markdown format"),
flagSet.StringVarP(&options.SarifExport, "sarif-export", "se", "", "file to export results in SARIF format"),
)
@@ -93,6 +99,9 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.BoolVarP(&options.SystemResolvers, "system-resolvers", "sr", false, "use system DNS resolving as error fallback"),
flagSet.BoolVar(&options.OfflineHTTP, "passive", false, "enable passive HTTP response processing mode"),
flagSet.BoolVarP(&options.EnvironmentVariables, "env-vars", "ev", false, "enable environment variables to be used in template"),
+ flagSet.StringVarP(&options.ClientCertFile, "client-cert", "cc", "", "client certificate file (PEM-encoded) used for authenticating against scanned hosts"),
+ flagSet.StringVarP(&options.ClientKeyFile, "client-key", "ck", "", "client key file (PEM-encoded) used for authenticating against scanned hosts"),
+ flagSet.StringVarP(&options.ClientCAFile, "client-ca", "ca", "", "client certificate authority file (PEM-encoded) used for authenticating against scanned hosts"),
)
createGroup(flagSet, "interactsh", "interactsh",
@@ -101,7 +110,7 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.IntVar(&options.InteractionsCacheSize, "interactions-cache-size", 5000, "number of requests to keep in the interactions cache"),
flagSet.IntVar(&options.InteractionsEviction, "interactions-eviction", 60, "number of seconds to wait before evicting requests from cache"),
flagSet.IntVar(&options.InteractionsPollDuration, "interactions-poll-duration", 5, "number of seconds to wait before each interaction poll request"),
- flagSet.IntVar(&options.InteractionsColldownPeriod, "interactions-cooldown-period", 5, "extra time for interaction polling before exiting"),
+ flagSet.IntVar(&options.InteractionsCoolDownPeriod, "interactions-cooldown-period", 5, "extra time for interaction polling before exiting"),
flagSet.BoolVarP(&options.NoInteractsh, "no-interactsh", "ni", false, "disable interactsh server for OAST testing, exclude OAST based templates"),
)
@@ -110,6 +119,8 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.IntVarP(&options.RateLimitMinute, "rate-limit-minute", "rlm", 0, "maximum number of requests to send per minute"),
flagSet.IntVarP(&options.BulkSize, "bulk-size", "bs", 25, "maximum number of hosts to be analyzed in parallel per template"),
flagSet.IntVarP(&options.TemplateThreads, "concurrency", "c", 25, "maximum number of templates to be executed in parallel"),
+ flagSet.IntVarP(&options.HeadlessBulkSize, "headless-bulk-size", "hbs", 10, "maximum number of headless hosts to be analyzed in parallel per template"),
+ flagSet.IntVarP(&options.HeadlessTemplateThreads, "headless-concurrency", "hc", 10, "maximum number of headless templates to be executed in parallel"),
)
createGroup(flagSet, "optimization", "Optimizations",
@@ -123,7 +134,7 @@ on extensive configurability, massive extensibility and ease of use.`)
)
createGroup(flagSet, "headless", "Headless",
- flagSet.BoolVar(&options.Headless, "headless", false, "enable templates that require headless browser support"),
+ flagSet.BoolVar(&options.Headless, "headless", false, "enable templates that require headless browser support (root user on linux will disable sandbox)"),
flagSet.IntVar(&options.PageTimeout, "page-timeout", 20, "seconds to wait for each page in headless mode"),
flagSet.BoolVarP(&options.ShowBrowser, "show-browser", "sb", false, "show the browser on the screen when running templates with headless mode"),
flagSet.BoolVarP(&options.UseInstalledChrome, "system-chrome", "sc", false, "Use local installed chrome browser instead of nuclei installed"),
@@ -133,12 +144,9 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.BoolVar(&options.Debug, "debug", false, "show all requests and responses"),
flagSet.BoolVar(&options.DebugRequests, "debug-req", false, "show all sent requests"),
flagSet.BoolVar(&options.DebugResponse, "debug-resp", false, "show all received responses"),
-
- /* TODO why the separation? http://proxy:port vs socks5://proxy:port etc
- TODO should auto-set the HTTP_PROXY variable for the process? */
- flagSet.StringVarP(&options.ProxyURL, "proxy-url", "proxy", "", "URL of the HTTP proxy server"),
- flagSet.StringVar(&options.ProxySocksURL, "proxy-socks-url", "", "URL of the SOCKS proxy server"),
+ flagSet.NormalizedStringSliceVarP(&options.Proxy, "proxy", "p", []string{}, "List of HTTP(s)/SOCKS5 proxy to use (comma separated or file input)"),
flagSet.StringVarP(&options.TraceLogFile, "trace-log", "tlog", "", "file to write sent requests trace log"),
+ flagSet.StringVarP(&options.ErrorLogFile, "error-log", "elog", "", "file to write sent requests error log"),
flagSet.BoolVar(&options.Version, "version", false, "show nuclei version"),
flagSet.BoolVarP(&options.Verbose, "verbose", "v", false, "show verbose output"),
flagSet.BoolVar(&options.VerboseVerbose, "vv", false, "display templates loaded for scan"),
@@ -175,10 +183,3 @@ func createGroup(flagSet *goflags.FlagSet, groupName, description string, flags
currentFlag.Group(groupName)
}
}
-
-/*
-HacktoberFest update: Below, you can find our ticket recommendations. Tasks with the "good first issue" label are suitable for first time contributors. If you have other ideas, or need help with getting started, join our Discord channel or reach out to @forgedhallpass.
-
-https://github.com/issues?q=is%3Aopen+is%3Aissue+user%3Aprojectdiscovery+label%3AHacktoberfest
-
-*/
diff --git a/v2/go.mod b/v2/go.mod
index e5e74498f..490908476 100644
--- a/v2/go.mod
+++ b/v2/go.mod
@@ -5,79 +5,79 @@ go 1.17
require (
github.com/Ice3man543/nvd v1.0.8
github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible
- github.com/akrylysov/pogreb v0.10.1 // indirect
- github.com/alecthomas/jsonschema v0.0.0-20210818095345-1014919a589c
+ github.com/alecthomas/jsonschema v0.0.0-20211022214203-8b29eab41725
github.com/andygrunwald/go-jira v1.14.0
- github.com/antchfx/htmlquery v1.2.3
+ github.com/antchfx/htmlquery v1.2.4
github.com/apex/log v1.9.0
github.com/blang/semver v3.5.1+incompatible
github.com/bluele/gcache v0.0.2
- github.com/c4milo/unpackit v0.1.0 // indirect
github.com/corpix/uarand v0.1.1
- github.com/go-rod/rod v0.101.7
+ github.com/go-playground/validator/v10 v10.9.0
+ github.com/go-rod/rod v0.101.8
+ github.com/gobwas/ws v1.1.0
github.com/google/go-github v17.0.0+incompatible
- github.com/gosuri/uilive v0.0.4 // indirect
- github.com/gosuri/uiprogress v0.0.1 // indirect
- github.com/itchyny/gojq v0.12.4
+ github.com/itchyny/gojq v0.12.5
github.com/json-iterator/go v1.1.12
github.com/julienschmidt/httprouter v1.3.0
github.com/karlseguin/ccache v2.0.3+incompatible
github.com/karrick/godirwalk v1.16.1
github.com/logrusorgru/aurora v2.0.3+incompatible
- github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/miekg/dns v1.1.43
github.com/olekukonko/tablewriter v0.0.5
github.com/owenrumney/go-sarif v1.0.11
github.com/pkg/errors v0.9.1
github.com/projectdiscovery/clistats v0.0.8
- github.com/projectdiscovery/fastdialer v0.0.13-0.20210917073912-cad93d88e69e
+ github.com/projectdiscovery/cryptoutil v0.0.0-20210805184155-b5d2512f9345
+ github.com/projectdiscovery/fastdialer v0.0.13
github.com/projectdiscovery/filekv v0.0.0-20210915124239-3467ef45dd08
github.com/projectdiscovery/fileutil v0.0.0-20210928100737-cab279c5d4b5
- github.com/projectdiscovery/goflags v0.0.8-0.20211007103353-9b9229e8a240
+ github.com/projectdiscovery/goflags v0.0.8-0.20211028121123-edf02bc05b1a
github.com/projectdiscovery/gologger v1.1.4
github.com/projectdiscovery/hmap v0.0.2-0.20210917080408-0fd7bd286bfa
github.com/projectdiscovery/interactsh v0.0.6
- github.com/projectdiscovery/nuclei-updatecheck-api v0.0.0-20210914222811-0a072d262f77
+ github.com/projectdiscovery/nuclei-updatecheck-api v0.0.0-20211006155443-c0a8d610a4df
github.com/projectdiscovery/rawhttp v0.0.7
- github.com/projectdiscovery/retryabledns v1.0.13-0.20210916165024-76c5b76fd59a
+ github.com/projectdiscovery/retryabledns v1.0.13-0.20211109182249-43d38df59660
github.com/projectdiscovery/retryablehttp-go v1.0.2
- github.com/projectdiscovery/stringsutil v0.0.0-20211013053023-e7b2e104d80d
- github.com/projectdiscovery/yamldoc-go v1.0.2
+ github.com/projectdiscovery/stringsutil v0.0.0-20210830151154-f567170afdd9
+ github.com/projectdiscovery/yamldoc-go v1.0.3-0.20211126104922-00d2c6bb43b6
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rs/xid v1.3.0
github.com/segmentio/ksuid v1.0.4
- github.com/shirou/gopsutil/v3 v3.21.7
+ github.com/shirou/gopsutil/v3 v3.21.9
github.com/spaolacci/murmur3 v1.1.0
github.com/spf13/cast v1.4.1
github.com/stretchr/testify v1.7.0
github.com/syndtr/goleveldb v1.0.0
github.com/tj/go-update v2.2.5-0.20200519121640-62b4b798fd68+incompatible
github.com/valyala/fasttemplate v1.2.1
- github.com/xanzy/go-gitlab v0.50.3
+ github.com/weppos/publicsuffix-go v0.15.1-0.20210928183822-5ee35905bd95
+ github.com/xanzy/go-gitlab v0.51.1
github.com/ysmood/gson v0.6.4 // indirect
github.com/ysmood/leakless v0.7.0 // indirect
go.uber.org/atomic v1.9.0
go.uber.org/multierr v1.7.0
go.uber.org/ratelimit v0.2.0
- golang.org/x/net v0.0.0-20210916014120-12bc252f5db8
- golang.org/x/oauth2 v0.0.0-20210817223510-7df4dd6e12ab
- golang.org/x/sys v0.0.0-20210915083310-ed5796bab164 // indirect
+ golang.org/x/net v0.0.0-20211020060615-d418f374d309
+ golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1
golang.org/x/text v0.3.7
- google.golang.org/appengine v1.6.7 // indirect
gopkg.in/yaml.v2 v2.4.0
moul.io/http2curl v1.0.0
)
+require github.com/projectdiscovery/folderutil v0.0.0-20211203091551-e81604e6940e
+
require (
git.mills.io/prologic/smtpd v0.0.0-20210710122116-a525b76c287a // indirect
github.com/PuerkitoBio/goquery v1.6.0 // indirect
github.com/StackExchange/wmi v1.2.1 // indirect
+ github.com/akrylysov/pogreb v0.10.1 // indirect
github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 // indirect
github.com/andybalholm/cascadia v1.1.0 // indirect
- github.com/antchfx/xpath v1.1.6 // indirect
- github.com/aymerick/douceur v0.2.0 // indirect
+ github.com/antchfx/xpath v1.2.0 // indirect
github.com/bits-and-blooms/bitset v1.2.0 // indirect
github.com/bits-and-blooms/bloom/v3 v3.0.1 // indirect
+ github.com/c4milo/unpackit v0.1.0 // indirect
github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dimchansky/utfbom v1.1.1 // indirect
@@ -85,13 +85,18 @@ require (
github.com/eggsampler/acme/v3 v3.2.1 // indirect
github.com/fatih/structs v1.1.0 // indirect
github.com/go-ole/go-ole v1.2.5 // indirect
+ github.com/go-playground/locales v0.14.0 // indirect
+ github.com/go-playground/universal-translator v0.18.0 // indirect
+ github.com/gobwas/httphead v0.1.0 // indirect
+ github.com/gobwas/pool v0.2.1 // indirect
github.com/golang-jwt/jwt v3.2.1+incompatible // indirect
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/go-querystring v1.0.0 // indirect
github.com/google/uuid v1.3.0 // indirect
- github.com/gorilla/css v1.0.0 // indirect
+ github.com/gosuri/uilive v0.0.4 // indirect
+ github.com/gosuri/uiprogress v0.0.1 // indirect
github.com/hashicorp/go-cleanhttp v0.5.1 // indirect
github.com/hashicorp/go-retryablehttp v0.6.8 // indirect
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 // indirect
@@ -100,20 +105,19 @@ require (
github.com/karlseguin/ccache/v2 v2.0.8 // indirect
github.com/klauspost/compress v1.13.6 // indirect
github.com/klauspost/pgzip v1.2.5 // indirect
+ github.com/leodido/go-urn v1.2.1 // indirect
github.com/mattn/go-isatty v0.0.13 // indirect
- github.com/microcosm-cc/bluemonday v1.0.15 // indirect
+ github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/projectdiscovery/blackrock v0.0.0-20210415162320-b38689ae3a2e // indirect
- github.com/projectdiscovery/cryptoutil v0.0.0-20210805184155-b5d2512f9345 // indirect
github.com/projectdiscovery/iputil v0.0.0-20210804143329-3a30fcde43f3 // indirect
github.com/projectdiscovery/mapcidr v0.0.8 // indirect
github.com/projectdiscovery/networkpolicy v0.0.1 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
- github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca // indirect
- github.com/tklauser/go-sysconf v0.3.7 // indirect
- github.com/tklauser/numcpus v0.2.3 // indirect
+ github.com/tklauser/go-sysconf v0.3.9 // indirect
+ github.com/tklauser/numcpus v0.3.0 // indirect
github.com/trivago/tgo v1.0.7 // indirect
github.com/ulikunitz/xz v0.5.10 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
@@ -121,7 +125,10 @@ require (
github.com/ysmood/goob v0.3.0 // indirect
github.com/zclconf/go-cty v1.8.4 // indirect
go.etcd.io/bbolt v1.3.6 // indirect
+ golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 // indirect
+ golang.org/x/sys v0.0.0-20210915083310-ed5796bab164 // indirect
golang.org/x/time v0.0.0-20191024005414-555d28b269f0 // indirect
+ google.golang.org/appengine v1.6.7 // indirect
google.golang.org/protobuf v1.27.1 // indirect
gopkg.in/corvus-ch/zbase32.v1 v1.0.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
diff --git a/v2/go.sum b/v2/go.sum
index b95089c7e..bd73d8ced 100644
--- a/v2/go.sum
+++ b/v2/go.sum
@@ -67,8 +67,9 @@ github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY
github.com/akrylysov/pogreb v0.10.0/go.mod h1:pNs6QmpQ1UlTJKDezuRWmaqkgUE2TuU0YTWyqJZ7+lI=
github.com/akrylysov/pogreb v0.10.1 h1:FqlR8VR7uCbJdfUob916tPM+idpKgeESDXOA1K0DK4w=
github.com/akrylysov/pogreb v0.10.1/go.mod h1:pNs6QmpQ1UlTJKDezuRWmaqkgUE2TuU0YTWyqJZ7+lI=
-github.com/alecthomas/jsonschema v0.0.0-20210818095345-1014919a589c h1:oJsq4z4xKgZWWOhrSZuLZ5KyYfRFytddLL1E5+psfIY=
github.com/alecthomas/jsonschema v0.0.0-20210818095345-1014919a589c/go.mod h1:/n6+1/DWPltRLWL/VKyUxg6tzsl5kHUCcraimt4vr60=
+github.com/alecthomas/jsonschema v0.0.0-20211022214203-8b29eab41725 h1:NjwIgLQlD46o79bheVG4SCdRnnOz4XtgUN1WABX5DLA=
+github.com/alecthomas/jsonschema v0.0.0-20211022214203-8b29eab41725/go.mod h1:/n6+1/DWPltRLWL/VKyUxg6tzsl5kHUCcraimt4vr60=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@@ -79,10 +80,12 @@ github.com/andybalholm/cascadia v1.1.0 h1:BuuO6sSfQNFRu1LppgbD25Hr2vLYW25JvxHs5z
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/andygrunwald/go-jira v1.14.0 h1:7GT/3qhar2dGJ0kq8w0d63liNyHOnxZsUZ9Pe4+AKBI=
github.com/andygrunwald/go-jira v1.14.0/go.mod h1:KMo2f4DgMZA1C9FdImuLc04x4WQhn5derQpnsuBFgqE=
-github.com/antchfx/htmlquery v1.2.3 h1:sP3NFDneHx2stfNXCKbhHFo8XgNjCACnU/4AO5gWz6M=
github.com/antchfx/htmlquery v1.2.3/go.mod h1:B0ABL+F5irhhMWg54ymEZinzMSi0Kt3I2if0BLYa3V0=
-github.com/antchfx/xpath v1.1.6 h1:6sVh6hB5T6phw1pFpHRQ+C4bd8sNI+O58flqtg7h0R0=
+github.com/antchfx/htmlquery v1.2.4 h1:qLteofCMe/KGovBI6SQgmou2QNyedFUW+pE+BpeZ494=
+github.com/antchfx/htmlquery v1.2.4/go.mod h1:2xO6iu3EVWs7R2JYqBbp8YzG50gj/ofqs5/0VZoDZLc=
github.com/antchfx/xpath v1.1.6/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
+github.com/antchfx/xpath v1.2.0 h1:mbwv7co+x0RwgeGAOHdrKy89GvHaGvxxBtPK0uF9Zr8=
+github.com/antchfx/xpath v1.2.0/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apex/log v1.9.0 h1:FHtw/xuaM8AgmvDDTI9fiwoAL25Sq2cxojnZICUU8l0=
@@ -96,14 +99,11 @@ github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
-github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
-github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
-github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
@@ -230,16 +230,30 @@ github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTg
github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8=
github.com/go-ole/go-ole v1.2.5 h1:t4MGB5xEDZvXI+0rMjjsfBsD7yAgp/s9ZDkL1JndXwY=
github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
+github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
+github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU=
+github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
+github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho=
+github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
+github.com/go-playground/validator/v10 v10.9.0 h1:NgTtmN58D0m8+UuxtYmGztBJB7VnPgjj221I1QHci2A=
+github.com/go-playground/validator/v10 v10.9.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
github.com/go-redis/redis v6.15.5+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
github.com/go-rod/rod v0.91.1/go.mod h1:/W4lcZiCALPD603MnJGIvhtywP3R6yRB9EDfFfsHiiI=
-github.com/go-rod/rod v0.101.7 h1:kbI5CNvcRhf7feybBln4xDutsM0mbsF0ENNZfKcF6WA=
-github.com/go-rod/rod v0.101.7/go.mod h1:N/zlT53CfSpq74nb6rOR0K8UF0SPUPBmzBnArrms+mY=
+github.com/go-rod/rod v0.101.8 h1:oV0O97uwjkCVyAP0hD6K6bBE8FUMIjs0dtF7l6kEBsU=
+github.com/go-rod/rod v0.101.8/go.mod h1:N/zlT53CfSpq74nb6rOR0K8UF0SPUPBmzBnArrms+mY=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
+github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
+github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
+github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
+github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM=
+github.com/gobwas/ws v1.1.0 h1:7RFti/xnNkMJnrK7D1yQ/iCIB5OrrY/54/H930kIbHA=
+github.com/gobwas/ws v1.1.0/go.mod h1:nzvNcVha5eUziGrbxFCo6qFIojQHjJV5cLYIbezhfL0=
github.com/gogo/googleapis v0.0.0-20180223154316-0cd9801be74a/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s=
github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s=
github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4=
@@ -328,8 +342,6 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
-github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
-github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
@@ -385,8 +397,9 @@ github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/
github.com/iris-contrib/i18n v0.0.0-20171121225848-987a633949d0/go.mod h1:pMCz62A0xJL6I+umB2YTlFRwWXaDFA0jy+5HzGiJjqI=
github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw=
github.com/itchyny/go-flags v1.5.0/go.mod h1:lenkYuCobuxLBAd/HGFE4LRoW8D3B6iXRQfWYJ+MNbA=
-github.com/itchyny/gojq v0.12.4 h1:8zgOZWMejEWCLjbF/1mWY7hY7QEARm7dtuhC6Bp4R8o=
github.com/itchyny/gojq v0.12.4/go.mod h1:EQUSKgW/YaOxmXpAwGiowFDO4i2Rmtk5+9dFyeiymAg=
+github.com/itchyny/gojq v0.12.5 h1:6SJ1BQ1VAwJAlIvLSIZmqHP/RUEq3qfVWvsRxrqhsD0=
+github.com/itchyny/gojq v0.12.5/go.mod h1:3e1hZXv+Kwvdp6V9HXpVrvddiHVApi5EDZwS+zLFeiE=
github.com/itchyny/timefmt-go v0.1.3 h1:7M3LGVDsqcd0VZH2U+x393obrzZisp7C0uEe921iRkU=
github.com/itchyny/timefmt-go v0.1.3/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A=
github.com/jasonlvhit/gocron v0.0.1 h1:qTt5qF3b3srDjeOIR4Le1LfeyvoYzJlYpqvG7tJX5YU=
@@ -451,6 +464,8 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
+github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
+github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
@@ -479,8 +494,6 @@ github.com/mediocregopher/mediocre-go-lib v0.0.0-20181029021733-cb65787f37ed/go.
github.com/mediocregopher/radix/v3 v3.3.0/go.mod h1:EmfVyvspXz1uZEyPBMyGK+kjWiKQGvsUt6O3Pj+LDCQ=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
-github.com/microcosm-cc/bluemonday v1.0.15 h1:J4uN+qPng9rvkBZBoBb8YGR+ijuklIMpSOZZLjYpbeY=
-github.com/microcosm-cc/bluemonday v1.0.15/go.mod h1:ZLvAzeakRwrGnzQEvstVzVt3ZpqOF2+sdFr0Om+ce30=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
@@ -573,17 +586,22 @@ github.com/projectdiscovery/cryptoutil v0.0.0-20210805184155-b5d2512f9345 h1:jT6
github.com/projectdiscovery/cryptoutil v0.0.0-20210805184155-b5d2512f9345/go.mod h1:clhQmPnt35ziJW1AhJRKyu8aygXCSoyWj6dtmZBRjjc=
github.com/projectdiscovery/fastdialer v0.0.12/go.mod h1:RkRbxqDCcCFhfNUbkzBIz/ieD4uda2JuUA4WJ+RLee0=
github.com/projectdiscovery/fastdialer v0.0.13-0.20210824195254-0113c1406542/go.mod h1:TuapmLiqtunJOxpM7g0tpTy/TUF/0S+XFyx0B0Wx0DQ=
-github.com/projectdiscovery/fastdialer v0.0.13-0.20210917073912-cad93d88e69e h1:xMAFYJgRxopAwKrj7HDwMBKJGCGDbHqopS8f959xges=
-github.com/projectdiscovery/fastdialer v0.0.13-0.20210917073912-cad93d88e69e/go.mod h1:O1l6+vAQy1QRo9FqyuyJ57W3CwpIXXg7oGo14Le6ZYQ=
+github.com/projectdiscovery/fastdialer v0.0.13 h1:BCe7JsFxRk1kAUQcy4X+9lqEuT7Y6LRSlHXfia03XOo=
+github.com/projectdiscovery/fastdialer v0.0.13/go.mod h1:Mex24omi3RxrmhA8Ote7rw+6LWMiaBvbJq8CNp0ksII=
github.com/projectdiscovery/filekv v0.0.0-20210915124239-3467ef45dd08 h1:NwD1R/du1dqrRKN3SJl9kT6tN3K9puuWFXEvYF2ihew=
github.com/projectdiscovery/filekv v0.0.0-20210915124239-3467ef45dd08/go.mod h1:paLCnwV8sL7ppqIwVQodQrk3F6mnWafwTDwRd7ywZwQ=
github.com/projectdiscovery/fileutil v0.0.0-20210804142714-ebba15fa53ca/go.mod h1:U+QCpQnX8o2N2w0VUGyAzjM3yBAe4BKedVElxiImsx0=
github.com/projectdiscovery/fileutil v0.0.0-20210914153648-31f843feaad4/go.mod h1:U+QCpQnX8o2N2w0VUGyAzjM3yBAe4BKedVElxiImsx0=
+github.com/projectdiscovery/fileutil v0.0.0-20210926202739-6050d0acf73c/go.mod h1:U+QCpQnX8o2N2w0VUGyAzjM3yBAe4BKedVElxiImsx0=
github.com/projectdiscovery/fileutil v0.0.0-20210928100737-cab279c5d4b5 h1:2dbm7UhrAKnccZttr78CAmG768sSCd+MBn4ayLVDeqA=
github.com/projectdiscovery/fileutil v0.0.0-20210928100737-cab279c5d4b5/go.mod h1:U+QCpQnX8o2N2w0VUGyAzjM3yBAe4BKedVElxiImsx0=
+github.com/projectdiscovery/folderutil v0.0.0-20210804143510-68474319fd84 h1:+VqGxv8ywpIHwGGSCOcGn/q5kkuB6F1AZtY42I8VnXc=
+github.com/projectdiscovery/folderutil v0.0.0-20210804143510-68474319fd84/go.mod h1:BMqXH4jNGByVdE2iLtKvc/6XStaiZRuCIaKv1vw9PnI=
+github.com/projectdiscovery/folderutil v0.0.0-20211203091551-e81604e6940e h1:ozfSeEc5j1f7NCEZAiAskP/KYfBD/TzPmFTIfh+CEwE=
+github.com/projectdiscovery/folderutil v0.0.0-20211203091551-e81604e6940e/go.mod h1:BMqXH4jNGByVdE2iLtKvc/6XStaiZRuCIaKv1vw9PnI=
github.com/projectdiscovery/goflags v0.0.7/go.mod h1:Jjwsf4eEBPXDSQI2Y+6fd3dBumJv/J1U0nmpM+hy2YY=
-github.com/projectdiscovery/goflags v0.0.8-0.20211007103353-9b9229e8a240 h1:b7zDUSsgN5f4/IlhKF6RVGsp/NkHIuty0o1YjzAMKUs=
-github.com/projectdiscovery/goflags v0.0.8-0.20211007103353-9b9229e8a240/go.mod h1:Jjwsf4eEBPXDSQI2Y+6fd3dBumJv/J1U0nmpM+hy2YY=
+github.com/projectdiscovery/goflags v0.0.8-0.20211028121123-edf02bc05b1a h1:EzwVm8i4zmzqZX55vrDtyfogwHh8AAZ3cWCJe4fEduk=
+github.com/projectdiscovery/goflags v0.0.8-0.20211028121123-edf02bc05b1a/go.mod h1:Jjwsf4eEBPXDSQI2Y+6fd3dBumJv/J1U0nmpM+hy2YY=
github.com/projectdiscovery/gologger v1.0.1/go.mod h1:Ok+axMqK53bWNwDSU1nTNwITLYMXMdZtRc8/y1c7sWE=
github.com/projectdiscovery/gologger v1.1.4 h1:qWxGUq7ukHWT849uGPkagPKF3yBPYAsTtMKunQ8O2VI=
github.com/projectdiscovery/gologger v1.1.4/go.mod h1:Bhb6Bdx2PV1nMaFLoXNBmHIU85iROS9y1tBuv7T5pMY=
@@ -591,7 +609,6 @@ github.com/projectdiscovery/hmap v0.0.1/go.mod h1:VDEfgzkKQdq7iGTKz8Ooul0NuYHQ8q
github.com/projectdiscovery/hmap v0.0.2-0.20210616215655-7b78e7f33d1f/go.mod h1:FH+MS/WNKTXJQtdRn+/Zg5WlKCiMN0Z1QUedUIuM5n8=
github.com/projectdiscovery/hmap v0.0.2-0.20210727180307-d63d35146e97/go.mod h1:FH+MS/WNKTXJQtdRn+/Zg5WlKCiMN0Z1QUedUIuM5n8=
github.com/projectdiscovery/hmap v0.0.2-0.20210825180603-fca7166c158f/go.mod h1:RLM8b1z2HEq74u5AXN1Lbvfq+1BZWpnTQJcwLnMLA54=
-github.com/projectdiscovery/hmap v0.0.2-0.20210917073634-bfb0e9c03800/go.mod h1:FH+MS/WNKTXJQtdRn+/Zg5WlKCiMN0Z1QUedUIuM5n8=
github.com/projectdiscovery/hmap v0.0.2-0.20210917080408-0fd7bd286bfa h1:9sZWFUAshIa/ea0RKjGRuuZiS5PzYXAFjTRUnSbezr0=
github.com/projectdiscovery/hmap v0.0.2-0.20210917080408-0fd7bd286bfa/go.mod h1:lV5f/PNPmCCjCN/dR317/chN9s7VG5h/xcbFfXOz8Fo=
github.com/projectdiscovery/interactsh v0.0.4/go.mod h1:PtJrddeBW1/LeOVgTvvnjUl3Hu/17jTkoIi8rXeEODE=
@@ -609,25 +626,26 @@ github.com/projectdiscovery/mapcidr v0.0.8 h1:16U05F2x3o/jSTsxSCY2hCuCs9xOSwVxjo
github.com/projectdiscovery/mapcidr v0.0.8/go.mod h1:7CzdUdjuLVI0s33dQ33lWgjg3vPuLFw2rQzZ0RxkT00=
github.com/projectdiscovery/networkpolicy v0.0.1 h1:RGRuPlxE8WLFF9tdKSjTsYiTIKHNHW20Kl0nGGiRb1I=
github.com/projectdiscovery/networkpolicy v0.0.1/go.mod h1:asvdg5wMy3LPVMGALatebKeOYH5n5fV5RCTv6DbxpIs=
-github.com/projectdiscovery/nuclei-updatecheck-api v0.0.0-20210914222811-0a072d262f77 h1:SNtAiRRrJtDJJDroaa/bFXt/Tix2LA6+rHRib0ORlJQ=
-github.com/projectdiscovery/nuclei-updatecheck-api v0.0.0-20210914222811-0a072d262f77/go.mod h1:pxWVDgq88t9dWv4+J2AIaWgY+EqOE1AyfHS0Tn23w4M=
+github.com/projectdiscovery/nuclei-updatecheck-api v0.0.0-20211006155443-c0a8d610a4df h1:CvTNAUD5JbLMqpMFoGNgfk2gOcN0NC57ICu0+oK84vs=
+github.com/projectdiscovery/nuclei-updatecheck-api v0.0.0-20211006155443-c0a8d610a4df/go.mod h1:pxWVDgq88t9dWv4+J2AIaWgY+EqOE1AyfHS0Tn23w4M=
github.com/projectdiscovery/nuclei/v2 v2.5.1/go.mod h1:sU2qcY0MQFS0CqP1BgkR8ZnUyFhqK0BdnY6bvTKNjXY=
github.com/projectdiscovery/rawhttp v0.0.7 h1:5m4peVgjbl7gqDcRYMTVEuX+Xs/nh76ohTkkvufucLg=
github.com/projectdiscovery/rawhttp v0.0.7/go.mod h1:PQERZAhAv7yxI/hR6hdDPgK1WTU56l204BweXrBec+0=
github.com/projectdiscovery/retryabledns v1.0.11/go.mod h1:4sMC8HZyF01HXukRleSQYwz4870bwgb4+hTSXTMrkf4=
github.com/projectdiscovery/retryabledns v1.0.12/go.mod h1:4sMC8HZyF01HXukRleSQYwz4870bwgb4+hTSXTMrkf4=
-github.com/projectdiscovery/retryabledns v1.0.13-0.20210916165024-76c5b76fd59a h1:WJQjr9qi/VjWhdNiGyNqcFi0967Gp0W3I769bCpHOJE=
github.com/projectdiscovery/retryabledns v1.0.13-0.20210916165024-76c5b76fd59a/go.mod h1:tXaLDs4n3pRZHwfa8mdXpUWe/AYDNK3HlWDjldhRbjI=
+github.com/projectdiscovery/retryabledns v1.0.13-0.20211109182249-43d38df59660 h1:Ooa5htghPkdyfpzy6Y5KLdyv4w8ePZWmfzFSPQlJStQ=
+github.com/projectdiscovery/retryabledns v1.0.13-0.20211109182249-43d38df59660/go.mod h1:UfszkO3x+GLKVOpXB7boddJKbwNCr+tMPSkfgCSNhl4=
github.com/projectdiscovery/retryablehttp-go v1.0.1/go.mod h1:SrN6iLZilNG1X4neq1D+SBxoqfAF4nyzvmevkTkWsek=
github.com/projectdiscovery/retryablehttp-go v1.0.2 h1:LV1/KAQU+yeWhNVlvveaYFsjBYRwXlNEq0PvrezMV0U=
github.com/projectdiscovery/retryablehttp-go v1.0.2/go.mod h1:dx//aY9V247qHdsRf0vdWHTBZuBQ2vm6Dq5dagxrDYI=
github.com/projectdiscovery/stringsutil v0.0.0-20210804142656-fd3c28dbaafe/go.mod h1:oTRc18WBv9t6BpaN9XBY+QmG28PUpsyDzRht56Qf49I=
github.com/projectdiscovery/stringsutil v0.0.0-20210823090203-2f5f137e8e1d/go.mod h1:oTRc18WBv9t6BpaN9XBY+QmG28PUpsyDzRht56Qf49I=
+github.com/projectdiscovery/stringsutil v0.0.0-20210830151154-f567170afdd9 h1:xbL1/7h0k6HE3RzPdYk9W/8pUxESrGWewTaZdIB5Pes=
github.com/projectdiscovery/stringsutil v0.0.0-20210830151154-f567170afdd9/go.mod h1:oTRc18WBv9t6BpaN9XBY+QmG28PUpsyDzRht56Qf49I=
-github.com/projectdiscovery/stringsutil v0.0.0-20211013053023-e7b2e104d80d h1:YBYwsm8MrSp9t7mLehyqGwUKZWB08fG+YRePQRo5iFw=
-github.com/projectdiscovery/stringsutil v0.0.0-20211013053023-e7b2e104d80d/go.mod h1:JK4F9ACNPgO+Lbm80khX2q1ABInBMbwIOmbsEE61Sn4=
-github.com/projectdiscovery/yamldoc-go v1.0.2 h1:SKb7PHgSOXm27Zci05ba0FxpyQiu6bGEiVMEcjCK1rQ=
github.com/projectdiscovery/yamldoc-go v1.0.2/go.mod h1:7uSxfMXaBmzvw8m5EhOEjB6nhz0rK/H9sUjq1ciZu24=
+github.com/projectdiscovery/yamldoc-go v1.0.3-0.20211126104922-00d2c6bb43b6 h1:DvWRQpw7Ib2CRL3ogYm/BWM+X0UGPfz1n9Ix9YKgFM8=
+github.com/projectdiscovery/yamldoc-go v1.0.3-0.20211126104922-00d2c6bb43b6/go.mod h1:8OfZj8p/axkUM/TJoS/O9LDjj/S8u17rxRbqluE9CU4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
@@ -664,8 +682,6 @@ github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
-github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI=
-github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
@@ -674,8 +690,9 @@ github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c
github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
-github.com/shirou/gopsutil/v3 v3.21.7 h1:PnTqQamUjwEDSgn+nBGu0qSDV/CfvyiR/gwTH3i7HTU=
github.com/shirou/gopsutil/v3 v3.21.7/go.mod h1:RGl11Y7XMTQPmHh8F0ayC6haKNBgH4PXMJuTAcMOlz4=
+github.com/shirou/gopsutil/v3 v3.21.9 h1:Vn4MUz2uXhqLSiCbGFRc0DILbMVLAY92DSkT8bsYrHg=
+github.com/shirou/gopsutil/v3 v3.21.9/go.mod h1:YWp/H8Qs5fVmf17v7JNZzA0mPJ+mS2e9JdiUF9LlKzQ=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
@@ -726,10 +743,12 @@ github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPf
github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4=
github.com/tj/go-update v2.2.5-0.20200519121640-62b4b798fd68+incompatible h1:guTq1YxwB8XSILkI9q4IrOmrCOS6Hc1L3hmOhi4Swcs=
github.com/tj/go-update v2.2.5-0.20200519121640-62b4b798fd68+incompatible/go.mod h1:waFwwyiAhGey2e+dNoYQ/iLhIcFqhCW7zL/+vDU1WLo=
-github.com/tklauser/go-sysconf v0.3.7 h1:HT7h4+536gjqeq1ZIJPgOl1rg1XFatQGVZWp7Py53eg=
github.com/tklauser/go-sysconf v0.3.7/go.mod h1:JZIdXh4RmBvZDBZ41ld2bGxRV3n4daiiqA3skYhAoQ4=
-github.com/tklauser/numcpus v0.2.3 h1:nQ0QYpiritP6ViFhrKYsiv6VVxOpum2Gks5GhnJbS/8=
+github.com/tklauser/go-sysconf v0.3.9 h1:JeUVdAOWhhxVcU6Eqr/ATFHgXk/mmiItdKeJPev3vTo=
+github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs=
github.com/tklauser/numcpus v0.2.3/go.mod h1:vpEPS/JC+oZGGQ/My/vJnNsvMDQL6PwOqt8dsCw5j+E=
+github.com/tklauser/numcpus v0.3.0 h1:ILuRUQBtssgnxw0XXIjKUC56fgnOrFoQQ/4+DeU2biQ=
+github.com/tklauser/numcpus v0.3.0/go.mod h1:yFGUr7TUHQRAhyqBcEg0Ge34zDBAsIvJJcyE6boqnA8=
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/trivago/tgo v1.0.7 h1:uaWH/XIy9aWYWpjm2CU3RpcqZXmX2ysQ9/Go+d9gyrM=
github.com/trivago/tgo v1.0.7/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkFnhc=
@@ -750,10 +769,13 @@ github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+
github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio=
github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4=
github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
+github.com/weppos/publicsuffix-go v0.15.1-0.20210928183822-5ee35905bd95 h1:DyAZOw3JsVd6LJHqhl4MpKQdYQEmat0C6pPPwom39Ow=
+github.com/weppos/publicsuffix-go v0.15.1-0.20210928183822-5ee35905bd95/go.mod h1:HYux0V0Zi04bHNwOHy4cXJVz/TQjYonnF6aoYhj+3QE=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM=
-github.com/xanzy/go-gitlab v0.50.3 h1:M7ncgNhCN4jaFNyXxarJhCLa9Qi6fdmCxFFhMTQPZiY=
github.com/xanzy/go-gitlab v0.50.3/go.mod h1:Q+hQhV508bDPoBijv7YjK/Lvlb4PhVhJdKqXVQrUoAE=
+github.com/xanzy/go-gitlab v0.51.1 h1:wWKLalwx4omxFoHh3PLs9zDgAD4GXDP/uoxwMRCSiWM=
+github.com/xanzy/go-gitlab v0.51.1/go.mod h1:Q+hQhV508bDPoBijv7YjK/Lvlb4PhVhJdKqXVQrUoAE=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
@@ -834,6 +856,8 @@ golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 h1:/UOmuWzQfxxo9UtlXMwuQU8CMgg1eZXqTRwkSQJWKOI=
+golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -922,16 +946,18 @@ golang.org/x/net v0.0.0-20210521195947-fe42d452be8f/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/net v0.0.0-20210916014120-12bc252f5db8 h1:/6y1LfuqNuQdHAm0jjtPtgRcxIxjVZgm5OTu8/QhZvk=
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20211020060615-d418f374d309 h1:A0lJIi+hcTR6aajJH4YqKWwohY4aW9RO7oRMcdv+HKI=
+golang.org/x/net v0.0.0-20211020060615-d418f374d309/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20210817223510-7df4dd6e12ab h1:llrcWN/wOwO+6gAyfBzxb5hZ+c3mriU/0+KNgYu6adA=
golang.org/x/oauth2 v0.0.0-20210817223510-7df4dd6e12ab/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1 h1:B333XXssMuKQeBwiNODx4TupZy7bf4sxFZnN2ZOcvUE=
+golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -995,6 +1021,7 @@ golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201113233024-12cec1faf1ba/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201207223542-d4d67f95c62d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -1004,9 +1031,13 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210601080250-7ecdf8ef093b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210816074244-15123e1e1f71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210915083310-ed5796bab164 h1:7ZDGnxgHAMw7thfC5bEos0RDAccZKxioiWBhfIe+tvw=
golang.org/x/sys v0.0.0-20210915083310-ed5796bab164/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
diff --git a/v2/internal/runner/banner.go b/v2/internal/runner/banner.go
index b75f09c33..56bf4ea4b 100644
--- a/v2/internal/runner/banner.go
+++ b/v2/internal/runner/banner.go
@@ -20,6 +20,6 @@ func showBanner() {
gologger.Print().Msgf("%s\n", banner)
gologger.Print().Msgf("\t\tprojectdiscovery.io\n\n")
- gologger.Error().Label("WRN").Msgf("Use with caution. You are responsible for your actions.\n")
- gologger.Error().Label("WRN").Msgf("Developers assume no liability and are not responsible for any misuse or damage.\n")
+ gologger.Print().Label("WRN").Msgf("Use with caution. You are responsible for your actions.\n")
+ gologger.Print().Label("WRN").Msgf("Developers assume no liability and are not responsible for any misuse or damage.\n")
}
diff --git a/v2/internal/runner/options.go b/v2/internal/runner/options.go
index 59791fe63..54a79ac8d 100644
--- a/v2/internal/runner/options.go
+++ b/v2/internal/runner/options.go
@@ -2,12 +2,14 @@ package runner
import (
"bufio"
- "errors"
- "net/url"
"os"
"path/filepath"
"strings"
+ "github.com/pkg/errors"
+
+ "github.com/go-playground/validator/v10"
+
"github.com/projectdiscovery/fileutil"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/gologger/formatter"
@@ -24,7 +26,6 @@ func ParseOptions(options *types.Options) {
// Read the inputs and configure the logging
configureOutput(options)
-
// Show the user the banner
showBanner()
@@ -47,13 +48,6 @@ func ParseOptions(options *types.Options) {
gologger.Fatal().Msgf("Program exiting: %s\n", err)
}
- // Auto adjust rate limits when using headless mode if the user
- // hasn't specified any custom limits.
- if options.Headless && options.BulkSize == 25 && options.TemplateThreads == 10 {
- options.BulkSize = 2
- options.TemplateThreads = 2
- }
-
// Load the resolvers if user asked for them
loadResolvers(options)
@@ -73,56 +67,56 @@ func ParseOptions(options *types.Options) {
// hasStdin returns true if we have stdin input
func hasStdin() bool {
- stat, err := os.Stdin.Stat()
+ fi, err := os.Stdin.Stat()
if err != nil {
return false
}
-
- isPipedFromChrDev := (stat.Mode() & os.ModeCharDevice) == 0
- isPipedFromFIFO := (stat.Mode() & os.ModeNamedPipe) != 0
-
- return isPipedFromChrDev || isPipedFromFIFO
+ if fi.Mode()&os.ModeNamedPipe == 0 {
+ return false
+ }
+ return true
}
// validateOptions validates the configuration options passed
func validateOptions(options *types.Options) error {
+ validate := validator.New()
+ if err := validate.Struct(options); err != nil {
+ if _, ok := err.(*validator.InvalidValidationError); ok {
+ return err
+ }
+ errs := []string{}
+ for _, err := range err.(validator.ValidationErrors) {
+ errs = append(errs, err.Namespace()+": "+err.Tag())
+ }
+ return errors.Wrap(errors.New(strings.Join(errs, ", ")), "validation failed for these fields")
+ }
if options.Verbose && options.Silent {
return errors.New("both verbose and silent mode specified")
}
-
- if err := validateProxyURL(options.ProxyURL, "invalid http proxy format (It should be http://username:password@host:port)"); err != nil {
+ // loading the proxy server list from file or cli and test the connectivity
+ if err := loadProxyServers(options); err != nil {
return err
}
-
- if err := validateProxyURL(options.ProxySocksURL, "invalid socks proxy format (It should be socks5://username:password@host:port)"); err != nil {
- return err
- }
-
if options.Validate {
options.Headless = true // required for correct validation of headless templates
validateTemplatePaths(options.TemplatesDirectory, options.Templates, options.Workflows)
}
- return nil
-}
-
-func validateProxyURL(proxyURL, message string) error {
- if proxyURL != "" && !isValidURL(proxyURL) {
- return errors.New(message)
+ // Verify if any of the client certificate options were set since it requires all three to work properly
+ if len(options.ClientCertFile) > 0 || len(options.ClientKeyFile) > 0 || len(options.ClientCAFile) > 0 {
+ if len(options.ClientCertFile) == 0 || len(options.ClientKeyFile) == 0 || len(options.ClientCAFile) == 0 {
+ return errors.New("if a client certification option is provided, then all three must be provided")
+ }
+ validateCertificatePaths([]string{options.ClientCertFile, options.ClientKeyFile, options.ClientCAFile})
}
return nil
}
-func isValidURL(urlString string) bool {
- _, err := url.Parse(urlString)
- return err == nil
-}
-
// configureOutput configures the output logging levels to be displayed on the screen
func configureOutput(options *types.Options) {
// If the user desires verbose output, show verbose output
- if options.Verbose {
+ if options.Verbose || options.Validate {
gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose)
}
if options.Debug {
@@ -164,7 +158,6 @@ func loadResolvers(options *types.Options) {
func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPaths []string) {
allGivenTemplatePaths := append(templatePaths, workflowPaths...)
-
for _, templatePath := range allGivenTemplatePaths {
if templatesDirectory != templatePath && filepath.IsAbs(templatePath) {
fileInfo, err := os.Stat(templatePath)
@@ -179,3 +172,14 @@ func validateTemplatePaths(templatesDirectory string, templatePaths, workflowPat
}
}
}
+
+func validateCertificatePaths(certificatePaths []string) {
+ for _, certificatePath := range certificatePaths {
+ if _, err := os.Stat(certificatePath); os.IsNotExist(err) {
+ // The provided path to the PEM certificate does not exist for the client authentication. As this is
+ // required for successful authentication, log and return an error
+ gologger.Fatal().Msgf("The given path (%s) to the certificate does not exist!", certificatePath)
+ break
+ }
+ }
+}
diff --git a/v2/internal/runner/processor.go b/v2/internal/runner/processor.go
deleted file mode 100644
index 6b14b5f91..000000000
--- a/v2/internal/runner/processor.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package runner
-
-import (
- "github.com/projectdiscovery/gologger"
- "github.com/projectdiscovery/nuclei/v2/pkg/templates"
- "github.com/remeh/sizedwaitgroup"
- "go.uber.org/atomic"
-)
-
-// processSelfContainedTemplates execute a self-contained template.
-func (r *Runner) processSelfContainedTemplates(template *templates.Template) bool {
- match, err := template.Executer.Execute("")
- if err != nil {
- gologger.Warning().Msgf("[%s] Could not execute step: %s\n", r.colorizer.BrightBlue(template.ID), err)
- }
- return match
-}
-
-// processTemplateWithList execute a template against the list of user provided targets
-func (r *Runner) processTemplateWithList(template *templates.Template) bool {
- results := &atomic.Bool{}
- wg := sizedwaitgroup.New(r.options.BulkSize)
- processItem := func(k, _ []byte) error {
- URL := string(k)
-
- // Skip if the host has had errors
- if r.hostErrors != nil && r.hostErrors.Check(URL) {
- return nil
- }
- wg.Add()
- go func(URL string) {
- defer wg.Done()
-
- match, err := template.Executer.Execute(URL)
- if err != nil {
- gologger.Warning().Msgf("[%s] Could not execute step: %s\n", r.colorizer.BrightBlue(template.ID), err)
- }
- results.CAS(false, match)
- }(URL)
- return nil
- }
- if r.options.Stream {
- _ = r.hostMapStream.Scan(processItem)
- } else {
- r.hostMap.Scan(processItem)
- }
-
- wg.Wait()
- return results.Load()
-}
-
-// processTemplateWithList process a template on the URL list
-func (r *Runner) processWorkflowWithList(template *templates.Template) bool {
- results := &atomic.Bool{}
- wg := sizedwaitgroup.New(r.options.BulkSize)
-
- processItem := func(k, _ []byte) error {
- URL := string(k)
-
- // Skip if the host has had errors
- if r.hostErrors != nil && r.hostErrors.Check(URL) {
- return nil
- }
- wg.Add()
- go func(URL string) {
- defer wg.Done()
- match := template.CompiledWorkflow.RunWorkflow(URL)
- results.CAS(false, match)
- }(URL)
- return nil
- }
-
- if r.options.Stream {
- _ = r.hostMapStream.Scan(processItem)
- } else {
- r.hostMap.Scan(processItem)
- }
-
- wg.Wait()
- return results.Load()
-}
diff --git a/v2/internal/runner/proxy.go b/v2/internal/runner/proxy.go
new file mode 100644
index 000000000..6aca4be6a
--- /dev/null
+++ b/v2/internal/runner/proxy.go
@@ -0,0 +1,123 @@
+package runner
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "net"
+ "net/url"
+ "os"
+ "strings"
+ "time"
+
+ "github.com/projectdiscovery/fileutil"
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v2/pkg/types"
+)
+
+var proxyURLList []url.URL
+
+// loadProxyServers load list of proxy servers from file or comma seperated
+func loadProxyServers(options *types.Options) error {
+ if len(options.Proxy) == 0 {
+ return nil
+ }
+ for _, p := range options.Proxy {
+ if proxyURL, err := validateProxyURL(p); err == nil {
+ proxyURLList = append(proxyURLList, proxyURL)
+ } else if fileutil.FileExists(p) {
+ file, err := os.Open(p)
+ if err != nil {
+ return fmt.Errorf("could not open proxy file: %w", err)
+ }
+ defer file.Close()
+ scanner := bufio.NewScanner(file)
+ for scanner.Scan() {
+ proxy := scanner.Text()
+ if strings.TrimSpace(proxy) == "" {
+ continue
+ }
+ if proxyURL, err := validateProxyURL(proxy); err != nil {
+ return err
+ } else {
+ proxyURLList = append(proxyURLList, proxyURL)
+ }
+ }
+ } else {
+ return fmt.Errorf("invalid proxy file or URL provided for %s", p)
+ }
+ }
+ return processProxyList(options)
+}
+
+func processProxyList(options *types.Options) error {
+ if len(proxyURLList) == 0 {
+ return fmt.Errorf("could not find any valid proxy")
+ } else {
+ done := make(chan bool)
+ exitCounter := make(chan bool)
+ counter := 0
+ for _, url := range proxyURLList {
+ go runProxyConnectivity(url, options, done, exitCounter)
+ }
+ for {
+ select {
+ case <-done:
+ {
+ close(done)
+ return nil
+ }
+ case <-exitCounter:
+ {
+ if counter += 1; counter == len(proxyURLList) {
+ return errors.New("no reachable proxy found")
+ }
+ }
+ }
+ }
+ }
+}
+
+func runProxyConnectivity(proxyURL url.URL, options *types.Options, done chan bool, exitCounter chan bool) {
+ if err := testProxyConnection(proxyURL, options.Timeout); err == nil {
+ if types.ProxyURL == "" && types.ProxySocksURL == "" {
+ assignProxyURL(proxyURL, options)
+ done <- true
+ }
+ }
+ exitCounter <- true
+}
+
+func testProxyConnection(proxyURL url.URL, timeoutDelay int) error {
+ timeout := time.Duration(timeoutDelay) * time.Second
+ _, err := net.DialTimeout("tcp", fmt.Sprintf("%s:%s", proxyURL.Hostname(), proxyURL.Port()), timeout)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func assignProxyURL(proxyURL url.URL, options *types.Options) {
+ os.Setenv(types.HTTP_PROXY_ENV, proxyURL.String())
+ if proxyURL.Scheme == types.HTTP || proxyURL.Scheme == types.HTTPS {
+ types.ProxyURL = proxyURL.String()
+ types.ProxySocksURL = ""
+ gologger.Verbose().Msgf("Using %s as proxy server", proxyURL.String())
+ } else if proxyURL.Scheme == types.SOCKS5 {
+ types.ProxyURL = ""
+ types.ProxySocksURL = proxyURL.String()
+ gologger.Verbose().Msgf("Using %s as socket proxy server", proxyURL.String())
+ }
+}
+
+func validateProxyURL(proxy string) (url.URL, error) {
+ if url, err := url.Parse(proxy); err == nil && isSupportedProtocol(url.Scheme) {
+ return *url, nil
+ }
+ return url.URL{}, errors.New("invalid proxy format (It should be http[s]/socks5://[username:password@]host:port)")
+}
+
+// isSupportedProtocol checks given protocols are supported
+func isSupportedProtocol(value string) bool {
+ return value == types.HTTP || value == types.HTTPS || value == types.SOCKS5
+}
diff --git a/v2/internal/runner/runner.go b/v2/internal/runner/runner.go
index b3f806952..b71587267 100644
--- a/v2/internal/runner/runner.go
+++ b/v2/internal/runner/runner.go
@@ -2,7 +2,6 @@ package runner
import (
"bufio"
- "fmt"
"os"
"path/filepath"
"strings"
@@ -10,27 +9,21 @@ import (
"github.com/logrusorgru/aurora"
"github.com/pkg/errors"
- "github.com/remeh/sizedwaitgroup"
- "github.com/rs/xid"
- "go.uber.org/atomic"
"go.uber.org/ratelimit"
- "gopkg.in/yaml.v2"
- "github.com/projectdiscovery/filekv"
- "github.com/projectdiscovery/fileutil"
"github.com/projectdiscovery/gologger"
- "github.com/projectdiscovery/hmap/store/hybrid"
"github.com/projectdiscovery/nuclei/v2/internal/colorizer"
"github.com/projectdiscovery/nuclei/v2/pkg/catalog"
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader"
+ "github.com/projectdiscovery/nuclei/v2/pkg/core"
+ "github.com/projectdiscovery/nuclei/v2/pkg/core/inputs/hybrid"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
"github.com/projectdiscovery/nuclei/v2/pkg/parsers"
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
"github.com/projectdiscovery/nuclei/v2/pkg/projectfile"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
- "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/clusterer"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/hosterrorscache"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/interactsh"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/protocolinit"
@@ -42,26 +35,25 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/types"
"github.com/projectdiscovery/nuclei/v2/pkg/utils"
"github.com/projectdiscovery/nuclei/v2/pkg/utils/stats"
+ yamlwrapper "github.com/projectdiscovery/nuclei/v2/pkg/utils/yaml"
)
// Runner is a client for running the enumeration process.
type Runner struct {
- hostMap *hybrid.HybridMap
- hostMapStream *filekv.FileDB
- output output.Writer
- interactsh *interactsh.Client
- inputCount int64
- templatesConfig *config.Config
- options *types.Options
- projectFile *projectfile.ProjectFile
- catalog *catalog.Catalog
- progress progress.Progress
- colorizer aurora.Aurora
- issuesClient *reporting.Client
- addColor func(severity.Severity) string
- browser *engine.Browser
- ratelimiter ratelimit.Limiter
- hostErrors *hosterrorscache.Cache
+ output output.Writer
+ interactsh *interactsh.Client
+ templatesConfig *config.Config
+ options *types.Options
+ projectFile *projectfile.ProjectFile
+ catalog *catalog.Catalog
+ progress progress.Progress
+ colorizer aurora.Aurora
+ issuesClient *reporting.Client
+ addColor func(severity.Severity) string
+ hmapInputProvider *hybrid.Input
+ browser *engine.Browser
+ ratelimiter ratelimit.Limiter
+ hostErrors *hosterrorscache.Cache
}
// New creates a new client for running enumeration process.
@@ -77,11 +69,16 @@ func New(options *types.Options) (*Runner, error) {
}
if options.Validate {
parsers.ShouldValidate = true
+ // Does not update the templates when validate flag is used
+ options.NoUpdateTemplates = true
}
if err := runner.updateTemplates(); err != nil {
gologger.Warning().Msgf("Could not update templates: %s\n", err)
}
if options.Headless {
+ if engine.MustDisableSandbox() {
+ gologger.Warning().Msgf("The current platform and privileged user will run the browser without sandbox\n")
+ }
browser, err := engine.New(options)
if err != nil {
return nil, err
@@ -116,106 +113,16 @@ func New(options *types.Options) (*Runner, error) {
if (len(options.Templates) == 0 || !options.NewTemplates || (options.TargetsFilePath == "" && !options.Stdin && len(options.Targets) == 0)) && options.UpdateTemplates {
os.Exit(0)
}
- hm, err := hybrid.New(hybrid.DefaultDiskOptions)
+
+ // Initialize the input source
+ hmapInput, err := hybrid.New(options)
if err != nil {
- return nil, errors.Wrap(err, "could not create temporary input file")
- }
- runner.hostMap = hm
-
- if options.Stream {
- fkvOptions := filekv.DefaultOptions
- if tmpFileName, err := fileutil.GetTempFileName(); err != nil {
- return nil, errors.Wrap(err, "could not create temporary input file")
- } else {
- fkvOptions.Path = tmpFileName
- }
- fkv, err := filekv.Open(fkvOptions)
- if err != nil {
- return nil, errors.Wrap(err, "could not create temporary unsorted input file")
- }
- runner.hostMapStream = fkv
- }
-
- runner.inputCount = 0
- dupeCount := 0
-
- // Handle multiple targets
- if len(options.Targets) != 0 {
- for _, target := range options.Targets {
- url := strings.TrimSpace(target)
- if url == "" {
- continue
- }
-
- if _, ok := runner.hostMap.Get(url); ok {
- dupeCount++
- continue
- }
-
- runner.inputCount++
- // nolint:errcheck // ignoring error
- runner.hostMap.Set(url, nil)
- if options.Stream {
- _ = runner.hostMapStream.Set([]byte(url), nil)
- }
- }
- }
-
- // Handle stdin
- if options.Stdin {
- scanner := bufio.NewScanner(os.Stdin)
- for scanner.Scan() {
- url := strings.TrimSpace(scanner.Text())
- if url == "" {
- continue
- }
-
- if _, ok := runner.hostMap.Get(url); ok {
- dupeCount++
- continue
- }
-
- runner.inputCount++
- // nolint:errcheck // ignoring error
- runner.hostMap.Set(url, nil)
- if options.Stream {
- _ = runner.hostMapStream.Set([]byte(url), nil)
- }
- }
- }
-
- // Handle target file
- if options.TargetsFilePath != "" {
- input, inputErr := os.Open(options.TargetsFilePath)
- if inputErr != nil {
- return nil, errors.Wrap(inputErr, "could not open targets file")
- }
- scanner := bufio.NewScanner(input)
- for scanner.Scan() {
- url := strings.TrimSpace(scanner.Text())
- if url == "" {
- continue
- }
- if _, ok := runner.hostMap.Get(url); ok {
- dupeCount++
- continue
- }
- runner.inputCount++
- // nolint:errcheck // ignoring error
- runner.hostMap.Set(url, nil)
- if options.Stream {
- _ = runner.hostMapStream.Set([]byte(url), nil)
- }
- }
- input.Close()
- }
-
- if dupeCount > 0 {
- gologger.Info().Msgf("Supplied input was automatically deduplicated (%d removed).", dupeCount)
+ return nil, errors.Wrap(err, "could not create input provider")
}
+ runner.hmapInputProvider = hmapInput
// Create the output file if asked
- outputWriter, err := output.NewStandardWriter(!options.NoColor, options.NoMeta, options.NoTimestamp, options.JSON, options.JSONRequests, options.Output, options.TraceLogFile)
+ outputWriter, err := output.NewStandardWriter(!options.NoColor, options.NoMeta, options.NoTimestamp, options.JSON, options.JSONRequests, options.MatcherStatus, options.Output, options.TraceLogFile, options.ErrorLogFile)
if err != nil {
return nil, errors.Wrap(err, "could not create output file")
}
@@ -243,24 +150,21 @@ func New(options *types.Options) (*Runner, error) {
}
}
- if !options.NoInteractsh {
- interactshClient, err := interactsh.New(&interactsh.Options{
- ServerURL: options.InteractshURL,
- Authorization: options.InteractshToken,
- CacheSize: int64(options.InteractionsCacheSize),
- Eviction: time.Duration(options.InteractionsEviction) * time.Second,
- ColldownPeriod: time.Duration(options.InteractionsColldownPeriod) * time.Second,
- PollDuration: time.Duration(options.InteractionsPollDuration) * time.Second,
- Output: runner.output,
- IssuesClient: runner.issuesClient,
- Progress: runner.progress,
- Debug: runner.options.Debug,
- })
- if err != nil {
- gologger.Error().Msgf("Could not create interactsh client: %s", err)
- } else {
- runner.interactsh = interactshClient
- }
+ opts := interactsh.NewDefaultOptions(runner.output, runner.issuesClient, runner.progress)
+ opts.Debug = runner.options.Debug
+ opts.ServerURL = options.InteractshURL
+ opts.Authorization = options.InteractshToken
+ opts.CacheSize = int64(options.InteractionsCacheSize)
+ opts.Eviction = time.Duration(options.InteractionsEviction) * time.Second
+ opts.ColldownPeriod = time.Duration(options.InteractionsCoolDownPeriod) * time.Second
+ opts.PollDuration = time.Duration(options.InteractionsPollDuration) * time.Second
+ opts.NoInteractsh = runner.options.NoInteractsh
+
+ interactshClient, err := interactsh.New(opts)
+ if err != nil {
+ gologger.Error().Msgf("Could not create interactsh client: %s", err)
+ } else {
+ runner.interactsh = interactshClient
}
if options.RateLimitMinute > 0 {
@@ -282,9 +186,9 @@ func createReportingOptions(options *types.Options) (*reporting.Options, error)
}
reportingOptions = &reporting.Options{}
- if parseErr := yaml.NewDecoder(file).Decode(reportingOptions); parseErr != nil {
+ if err := yamlwrapper.DecodeAndValidate(file, reportingOptions); err != nil {
file.Close()
- return nil, errors.Wrap(parseErr, "could not parse reporting config file")
+ return nil, errors.Wrap(err, "could not parse reporting config file")
}
file.Close()
}
@@ -312,13 +216,10 @@ func (r *Runner) Close() {
if r.output != nil {
r.output.Close()
}
- r.hostMap.Close()
if r.projectFile != nil {
r.projectFile.Close()
}
- if r.options.Stream {
- r.hostMapStream.Close()
- }
+ r.hmapInputProvider.Close()
protocolinit.Close()
}
@@ -335,15 +236,20 @@ func (r *Runner) RunEnumeration() error {
}
r.options.Templates = append(r.options.Templates, templatesLoaded...)
}
- ignoreFile := config.ReadIgnoreFile()
- r.options.ExcludeTags = append(r.options.ExcludeTags, ignoreFile.Tags...)
- r.options.ExcludedTemplates = append(r.options.ExcludedTemplates, ignoreFile.Files...)
-
+ // Exclude ignored file for validation
+ if !r.options.Validate {
+ ignoreFile := config.ReadIgnoreFile()
+ r.options.ExcludeTags = append(r.options.ExcludeTags, ignoreFile.Tags...)
+ r.options.ExcludedTemplates = append(r.options.ExcludedTemplates, ignoreFile.Files...)
+ }
var cache *hosterrorscache.Cache
if r.options.MaxHostError > 0 {
cache = hosterrorscache.New(r.options.MaxHostError, hosterrorscache.DefaultMaxHostsCount).SetVerbose(r.options.Verbose)
}
r.hostErrors = cache
+
+ // Create the executer options which will be used throughout the execution
+ // stage by the nuclei engine modules.
executerOpts := protocols.ExecuterOptions{
Output: r.output,
Options: r.options,
@@ -355,31 +261,18 @@ func (r *Runner) RunEnumeration() error {
ProjectFile: r.projectFile,
Browser: r.browser,
HostErrorsCache: cache,
+ Colorizer: r.colorizer,
}
+ engine := core.New(r.options)
+ engine.SetExecuterOptions(executerOpts)
workflowLoader, err := parsers.NewLoader(&executerOpts)
if err != nil {
return errors.Wrap(err, "Could not create loader.")
}
-
executerOpts.WorkflowLoader = workflowLoader
- loaderConfig := loader.Config{
- Templates: r.options.Templates,
- Workflows: r.options.Workflows,
- ExcludeTemplates: r.options.ExcludedTemplates,
- Tags: r.options.Tags,
- ExcludeTags: r.options.ExcludeTags,
- IncludeTemplates: r.options.IncludeTemplates,
- Authors: r.options.Author,
- Severities: r.options.Severities,
- ExcludeSeverities: r.options.ExcludeSeverities,
- IncludeTags: r.options.IncludeTags,
- TemplatesDirectory: r.options.TemplatesDirectory,
- Catalog: r.catalog,
- ExecutorOptions: executerOpts,
- }
- store, err := loader.New(&loaderConfig)
+ store, err := loader.New(loader.NewConfig(r.options, r.catalog, executerOpts))
if err != nil {
return errors.Wrap(err, "could not load templates from config")
}
@@ -389,7 +282,7 @@ func (r *Runner) RunEnumeration() error {
if err := store.ValidateTemplates(r.options.Templates, r.options.Workflows); err != nil {
return err
}
- if stats.GetValue(parsers.SyntaxErrorStats) == 0 && stats.GetValue(parsers.SyntaxWarningStats) == 0 {
+ if stats.GetValue(parsers.SyntaxErrorStats) == 0 && stats.GetValue(parsers.SyntaxWarningStats) == 0 && stats.GetValue(parsers.RuntimeWarningsStats) == 0 {
gologger.Info().Msgf("All templates validated successfully\n")
} else {
return errors.New("encountered errors while performing template validation")
@@ -397,9 +290,82 @@ func (r *Runner) RunEnumeration() error {
return nil // exit
}
+ r.displayExecutionInfo(store)
+
+ var unclusteredRequests int64
+ for _, template := range store.Templates() {
+ // workflows will dynamically adjust the totals while running, as
+ // it can't be known in advance which requests will be called
+ if len(template.Workflows) > 0 {
+ continue
+ }
+ unclusteredRequests += int64(template.TotalRequests) * r.hmapInputProvider.Count()
+ }
+
+ if r.options.VerboseVerbose {
+ for _, template := range store.Templates() {
+ r.logAvailableTemplate(template.Path)
+ }
+ for _, template := range store.Workflows() {
+ r.logAvailableTemplate(template.Path)
+ }
+ }
+
+ // Cluster the templates first because we want info on how many
+ // templates did we cluster for showing to user in CLI
+ originalTemplatesCount := len(store.Templates())
+ finalTemplates, clusterCount := templates.ClusterTemplates(store.Templates(), engine.ExecuterOptions())
+ finalTemplates = append(finalTemplates, store.Workflows()...)
+
+ var totalRequests int64
+ for _, t := range finalTemplates {
+ if len(t.Workflows) > 0 {
+ continue
+ }
+ totalRequests += int64(t.TotalRequests) * r.hmapInputProvider.Count()
+ }
+ if totalRequests < unclusteredRequests {
+ gologger.Info().Msgf("Templates clustered: %d (Reduced %d HTTP Requests)", clusterCount, unclusteredRequests-totalRequests)
+ }
+ workflowCount := len(store.Workflows())
+ templateCount := originalTemplatesCount + workflowCount
+
+ // 0 matches means no templates were found in directory
+ if templateCount == 0 {
+ return errors.New("no valid templates were found")
+ }
+
+ // tracks global progress and captures stdout/stderr until p.Wait finishes
+ r.progress.Init(r.hmapInputProvider.Count(), templateCount, totalRequests)
+
+ results := engine.ExecuteWithOpts(finalTemplates, r.hmapInputProvider, true)
+
+ if r.interactsh != nil {
+ matched := r.interactsh.Close()
+ if matched {
+ results.CAS(false, true)
+ }
+ }
+ r.progress.Stop()
+
+ if r.issuesClient != nil {
+ r.issuesClient.Close()
+ }
+ if !results.Load() {
+ gologger.Info().Msgf("No results found. Better luck next time!")
+ }
+ if r.browser != nil {
+ r.browser.Close()
+ }
+ return nil
+}
+
+// displayExecutionInfo displays misc info about the nuclei engine execution
+func (r *Runner) displayExecutionInfo(store *loader.Store) {
// Display stats for any loaded templates' syntax warnings or errors
stats.Display(parsers.SyntaxWarningStats)
stats.Display(parsers.SyntaxErrorStats)
+ stats.Display(parsers.RuntimeWarningsStats)
builder := &strings.Builder{}
if r.templatesConfig != nil && r.templatesConfig.NucleiLatestVersion != "" {
@@ -445,128 +411,6 @@ func (r *Runner) RunEnumeration() error {
if len(store.Workflows()) > 0 {
gologger.Info().Msgf("Workflows loaded for scan: %d", len(store.Workflows()))
}
-
- // pre-parse all the templates, apply filters
- finalTemplates := []*templates.Template{}
-
- var unclusteredRequests int64
- for _, template := range store.Templates() {
- // workflows will dynamically adjust the totals while running, as
- // it can't be known in advance which requests will be called
- if len(template.Workflows) > 0 {
- continue
- }
- unclusteredRequests += int64(template.TotalRequests) * r.inputCount
- }
-
- if r.options.VerboseVerbose {
- for _, template := range store.Templates() {
- r.logAvailableTemplate(template.Path)
- }
- for _, template := range store.Workflows() {
- r.logAvailableTemplate(template.Path)
- }
- }
- templatesMap := make(map[string]*templates.Template)
- for _, v := range store.Templates() {
- templatesMap[v.Path] = v
- }
- originalTemplatesCount := len(store.Templates())
- clusterCount := 0
- clusters := clusterer.Cluster(templatesMap)
- for _, cluster := range clusters {
- if len(cluster) > 1 && !r.options.OfflineHTTP {
- executerOpts := protocols.ExecuterOptions{
- Output: r.output,
- Options: r.options,
- Progress: r.progress,
- Catalog: r.catalog,
- RateLimiter: r.ratelimiter,
- IssuesClient: r.issuesClient,
- Browser: r.browser,
- ProjectFile: r.projectFile,
- Interactsh: r.interactsh,
- HostErrorsCache: cache,
- }
- clusterID := fmt.Sprintf("cluster-%s", xid.New().String())
-
- finalTemplates = append(finalTemplates, &templates.Template{
- ID: clusterID,
- RequestsHTTP: cluster[0].RequestsHTTP,
- Executer: clusterer.NewExecuter(cluster, &executerOpts),
- TotalRequests: len(cluster[0].RequestsHTTP),
- })
- clusterCount += len(cluster)
- } else {
- finalTemplates = append(finalTemplates, cluster...)
- }
- }
-
- finalTemplates = append(finalTemplates, store.Workflows()...)
-
- var totalRequests int64
- for _, t := range finalTemplates {
- if len(t.Workflows) > 0 {
- continue
- }
- totalRequests += int64(t.TotalRequests) * r.inputCount
- }
- if totalRequests < unclusteredRequests {
- gologger.Info().Msgf("Templates clustered: %d (Reduced %d HTTP Requests)", clusterCount, unclusteredRequests-totalRequests)
- }
- workflowCount := len(store.Workflows())
- templateCount := originalTemplatesCount + workflowCount
-
- // 0 matches means no templates were found in directory
- if templateCount == 0 {
- return errors.New("no valid templates were found")
- }
-
- /*
- TODO does it make sense to run the logic below if there are no targets specified?
- Can we safely assume the user is just experimenting with the template/workflow filters before running them?
- */
-
- results := &atomic.Bool{}
- wgtemplates := sizedwaitgroup.New(r.options.TemplateThreads)
-
- // tracks global progress and captures stdout/stderr until p.Wait finishes
- r.progress.Init(r.inputCount, templateCount, totalRequests)
-
- for _, t := range finalTemplates {
- wgtemplates.Add()
- go func(template *templates.Template) {
- defer wgtemplates.Done()
-
- if template.SelfContained {
- results.CAS(false, r.processSelfContainedTemplates(template))
- } else if len(template.Workflows) > 0 {
- results.CAS(false, r.processWorkflowWithList(template))
- } else {
- results.CAS(false, r.processTemplateWithList(template))
- }
- }(t)
- }
- wgtemplates.Wait()
-
- if r.interactsh != nil {
- matched := r.interactsh.Close()
- if matched {
- results.CAS(false, true)
- }
- }
- r.progress.Stop()
-
- if r.issuesClient != nil {
- r.issuesClient.Close()
- }
- if !results.Load() {
- gologger.Info().Msgf("No results found. Better luck next time!")
- }
- if r.browser != nil {
- r.browser.Close()
- }
- return nil
}
// readNewTemplatesFile reads newly added templates from directory if it exists
diff --git a/v2/internal/runner/update.go b/v2/internal/runner/update.go
index 217f34008..99d94cc88 100644
--- a/v2/internal/runner/update.go
+++ b/v2/internal/runner/update.go
@@ -54,7 +54,7 @@ func (r *Runner) updateTemplates() error { // TODO this method does more than ju
return err
}
configDir := filepath.Join(home, ".config", "nuclei")
- _ = os.MkdirAll(configDir, os.ModePerm)
+ _ = os.MkdirAll(configDir, 0755)
if err := r.readInternalConfigurationFile(home, configDir); err != nil {
return errors.Wrap(err, "could not read configuration file")
@@ -242,12 +242,12 @@ func (r *Runner) getLatestReleaseFromGithub(latestTag string) (*github.Repositor
func (r *Runner) downloadReleaseAndUnzip(ctx context.Context, version, downloadURL string) (*templateUpdateResults, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, downloadURL, nil)
if err != nil {
- return nil, fmt.Errorf("failed to create HTTP request to %s: %s", downloadURL, err)
+ return nil, fmt.Errorf("failed to create HTTP request to %s: %w", downloadURL, err)
}
res, err := http.DefaultClient.Do(req)
if err != nil {
- return nil, fmt.Errorf("failed to download a release file from %s: %s", downloadURL, err)
+ return nil, fmt.Errorf("failed to download a release file from %s: %w", downloadURL, err)
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
@@ -256,23 +256,23 @@ func (r *Runner) downloadReleaseAndUnzip(ctx context.Context, version, downloadU
buf, err := ioutil.ReadAll(res.Body)
if err != nil {
- return nil, fmt.Errorf("failed to create buffer for zip file: %s", err)
+ return nil, fmt.Errorf("failed to create buffer for zip file: %w", err)
}
reader := bytes.NewReader(buf)
zipReader, err := zip.NewReader(reader, reader.Size())
if err != nil {
- return nil, fmt.Errorf("failed to uncompress zip file: %s", err)
+ return nil, fmt.Errorf("failed to uncompress zip file: %w", err)
}
// Create the template folder if it doesn't exist
- if err := os.MkdirAll(r.templatesConfig.TemplatesDirectory, os.ModePerm); err != nil {
- return nil, fmt.Errorf("failed to create template base folder: %s", err)
+ if err := os.MkdirAll(r.templatesConfig.TemplatesDirectory, 0755); err != nil {
+ return nil, fmt.Errorf("failed to create template base folder: %w", err)
}
results, err := r.compareAndWriteTemplates(zipReader)
if err != nil {
- return nil, fmt.Errorf("failed to write templates: %s", err)
+ return nil, fmt.Errorf("failed to write templates: %w", err)
}
if r.options.Verbose {
@@ -291,7 +291,7 @@ func (r *Runner) downloadReleaseAndUnzip(ctx context.Context, version, downloadU
buffer.WriteString("\n")
}
- if err := ioutil.WriteFile(additionsFile, buffer.Bytes(), os.ModePerm); err != nil {
+ if err := ioutil.WriteFile(additionsFile, buffer.Bytes(), 0644); err != nil {
return nil, errors.Wrap(err, "could not write new additions file")
}
return results, err
@@ -316,60 +316,42 @@ func (r *Runner) compareAndWriteTemplates(zipReader *zip.Reader) (*templateUpdat
// If the path isn't found in new update after being read from the previous checksum,
// it is removed. This allows us fine-grained control over the download process
// as well as solves a long problem with nuclei-template updates.
- checksumFile := filepath.Join(r.templatesConfig.TemplatesDirectory, ".checksum")
+ configuredTemplateDirectory := r.templatesConfig.TemplatesDirectory
+ checksumFile := filepath.Join(configuredTemplateDirectory, ".checksum")
templateChecksumsMap, _ := createTemplateChecksumsMap(checksumFile)
for _, zipTemplateFile := range zipReader.File {
- directory, name := filepath.Split(zipTemplateFile.Name)
- if name == "" {
+ templateAbsolutePath, skipFile, err := calculateTemplateAbsolutePath(zipTemplateFile.Name, configuredTemplateDirectory)
+ if err != nil {
+ return nil, err
+ }
+ if skipFile {
continue
}
- paths := strings.Split(directory, string(os.PathSeparator))
- finalPath := filepath.Join(paths[1:]...)
-
- if strings.HasPrefix(name, ".") || strings.HasPrefix(finalPath, ".") || strings.EqualFold(name, "README.md") {
- continue
- }
- results.totalCount++
- templateDirectory := filepath.Join(r.templatesConfig.TemplatesDirectory, finalPath)
- if err := os.MkdirAll(templateDirectory, os.ModePerm); err != nil {
- return nil, fmt.Errorf("failed to create template folder %s : %s", templateDirectory, err)
- }
-
- templatePath := filepath.Join(templateDirectory, name)
isAddition := false
- if _, statErr := os.Stat(templatePath); os.IsNotExist(statErr) {
+ if _, statErr := os.Stat(templateAbsolutePath); os.IsNotExist(statErr) {
isAddition = true
}
- templateFile, err := os.OpenFile(templatePath, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, 0777)
+
+ newTemplateChecksum, err := writeUnZippedTemplateFile(templateAbsolutePath, zipTemplateFile)
if err != nil {
- templateFile.Close()
- return nil, fmt.Errorf("could not create uncompressed file: %s", err)
+ return nil, err
}
- zipTemplateFileReader, err := zipTemplateFile.Open()
+ oldTemplateChecksum, checksumOk := templateChecksumsMap[templateAbsolutePath]
+
+ relativeTemplatePath, err := filepath.Rel(configuredTemplateDirectory, templateAbsolutePath)
if err != nil {
- templateFile.Close()
- return nil, fmt.Errorf("could not open archive to extract file: %s", err)
+ return nil, fmt.Errorf("could not calculate relative path for template: %s. %w", templateAbsolutePath, err)
}
- hasher := md5.New()
- // Save file and also read into hasher for md5
- if _, err := io.Copy(templateFile, io.TeeReader(zipTemplateFileReader, hasher)); err != nil {
- templateFile.Close()
- return nil, fmt.Errorf("could not write template file: %s", err)
- }
- templateFile.Close()
-
- oldChecksum, checksumOK := templateChecksumsMap[templatePath]
-
- checksum := hex.EncodeToString(hasher.Sum(nil))
if isAddition {
- results.additions = append(results.additions, filepath.Join(finalPath, name))
- } else if checksumOK && oldChecksum[0] != checksum {
- results.modifications = append(results.modifications, filepath.Join(finalPath, name))
+ results.additions = append(results.additions, relativeTemplatePath)
+ } else if checksumOk && oldTemplateChecksum[0] != newTemplateChecksum {
+ results.modifications = append(results.modifications, relativeTemplatePath)
}
- results.checksums[templatePath] = checksum
+ results.checksums[templateAbsolutePath] = newTemplateChecksum
+ results.totalCount++
}
// If we don't find the previous file in the newly downloaded list,
@@ -378,12 +360,63 @@ func (r *Runner) compareAndWriteTemplates(zipReader *zip.Reader) (*templateUpdat
_, ok := results.checksums[templatePath]
if !ok && templateChecksums[0] == templateChecksums[1] {
_ = os.Remove(templatePath)
- results.deletions = append(results.deletions, strings.TrimPrefix(strings.TrimPrefix(templatePath, r.templatesConfig.TemplatesDirectory), string(os.PathSeparator)))
+ results.deletions = append(results.deletions, strings.TrimPrefix(strings.TrimPrefix(templatePath, configuredTemplateDirectory), string(os.PathSeparator)))
}
}
return results, nil
}
+func writeUnZippedTemplateFile(templateAbsolutePath string, zipTemplateFile *zip.File) (string, error) {
+ templateFile, err := os.OpenFile(templateAbsolutePath, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, 0644)
+ if err != nil {
+ return "", fmt.Errorf("could not create template file: %w", err)
+ }
+
+ zipTemplateFileReader, err := zipTemplateFile.Open()
+ if err != nil {
+ _ = templateFile.Close()
+ return "", fmt.Errorf("could not open archive to extract file: %w", err)
+ }
+
+ md5Hash := md5.New()
+
+ // Save file and also read into hash.Hash for md5
+ if _, err := io.Copy(templateFile, io.TeeReader(zipTemplateFileReader, md5Hash)); err != nil {
+ _ = templateFile.Close()
+ return "", fmt.Errorf("could not write template file: %w", err)
+ }
+
+ if err := templateFile.Close(); err != nil {
+ return "", fmt.Errorf("could not close file newly created template file: %w", err)
+ }
+
+ checksum := hex.EncodeToString(md5Hash.Sum(nil))
+ return checksum, nil
+}
+
+func calculateTemplateAbsolutePath(zipFilePath, configuredTemplateDirectory string) (string, bool, error) {
+ directory, fileName := filepath.Split(zipFilePath)
+
+ if strings.TrimSpace(fileName) == "" || strings.HasPrefix(fileName, ".") || strings.EqualFold(fileName, "README.md") {
+ return "", true, nil
+ }
+
+ directoryPathChunks := strings.Split(directory, string(os.PathSeparator))
+ relativeDirectoryPathWithoutZipRoot := filepath.Join(directoryPathChunks[1:]...)
+
+ if strings.HasPrefix(relativeDirectoryPathWithoutZipRoot, ".") {
+ return "", true, nil
+ }
+
+ templateDirectory := filepath.Join(configuredTemplateDirectory, relativeDirectoryPathWithoutZipRoot)
+
+ if err := os.MkdirAll(templateDirectory, 0755); err != nil {
+ return "", false, fmt.Errorf("failed to create template folder: %s. %w", templateDirectory, err)
+ }
+
+ return filepath.Join(templateDirectory, fileName), false, nil
+}
+
// createTemplateChecksumsMap reads the previous checksum file from the disk.
// Creates a map of template paths and their previous and currently calculated checksums as values.
func createTemplateChecksumsMap(checksumsFilePath string) (map[string][2]string, error) {
diff --git a/v2/internal/runner/update_test.go b/v2/internal/runner/update_test.go
index dc57140c2..e8153b42e 100644
--- a/v2/internal/runner/update_test.go
+++ b/v2/internal/runner/update_test.go
@@ -12,10 +12,11 @@ import (
"strings"
"testing"
- "github.com/projectdiscovery/gologger"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
- "github.com/projectdiscovery/nuclei/v2/pkg/catalog/config"
"github.com/stretchr/testify/require"
+
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v2/pkg/catalog/config"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
func TestDownloadReleaseAndUnzipAddition(t *testing.T) {
@@ -25,7 +26,7 @@ func TestDownloadReleaseAndUnzipAddition(t *testing.T) {
require.Nil(t, err, "could not create temp directory")
defer os.RemoveAll(baseTemplates)
- err = ioutil.WriteFile(filepath.Join(baseTemplates, "base.yaml"), []byte("id: test"), 0777)
+ err = ioutil.WriteFile(filepath.Join(baseTemplates, "base.yaml"), []byte("id: test"), os.ModePerm)
require.Nil(t, err, "could not create write base file")
err = zipFromDirectory("base.zip", baseTemplates)
@@ -50,9 +51,9 @@ func TestDownloadReleaseAndUnzipAddition(t *testing.T) {
require.Nil(t, err, "could not create temp directory")
defer os.RemoveAll(newTempDir)
- err = ioutil.WriteFile(filepath.Join(newTempDir, "base.yaml"), []byte("id: test"), 0777)
+ err = ioutil.WriteFile(filepath.Join(newTempDir, "base.yaml"), []byte("id: test"), os.ModePerm)
require.Nil(t, err, "could not create base file")
- err = ioutil.WriteFile(filepath.Join(newTempDir, "new.yaml"), []byte("id: test"), 0777)
+ err = ioutil.WriteFile(filepath.Join(newTempDir, "new.yaml"), []byte("id: test"), os.ModePerm)
require.Nil(t, err, "could not create new file")
err = zipFromDirectory("new.zip", newTempDir)
@@ -77,7 +78,7 @@ func TestDownloadReleaseAndUnzipDeletion(t *testing.T) {
require.Nil(t, err, "could not create temp directory")
defer os.RemoveAll(baseTemplates)
- err = ioutil.WriteFile(filepath.Join(baseTemplates, "base.yaml"), []byte("id: test"), 0777)
+ err = ioutil.WriteFile(filepath.Join(baseTemplates, "base.yaml"), []byte("id: test"), os.ModePerm)
require.Nil(t, err, "could not create write base file")
err = zipFromDirectory("base.zip", baseTemplates)
@@ -118,6 +119,43 @@ func TestDownloadReleaseAndUnzipDeletion(t *testing.T) {
require.Equal(t, "base.yaml", results.deletions[0], "could not get correct new deletions")
}
+func TestCalculateTemplateAbsolutePath(t *testing.T) {
+ configuredTemplateDirectory := filepath.Join(os.TempDir(), "templates")
+ defer os.RemoveAll(configuredTemplateDirectory)
+
+ t.Run("positive scenarios", func(t *testing.T) {
+ zipFilePathsExpectedPathsMap := map[string]string{
+ "nuclei-templates/cve/test.yaml": filepath.Join(configuredTemplateDirectory, "cve/test.yaml"),
+ "nuclei-templates/cve/test/test.yaml": filepath.Join(configuredTemplateDirectory, "cve/test/test.yaml"),
+ }
+
+ for filePathFromZip, expectedTemplateAbsPath := range zipFilePathsExpectedPathsMap {
+ calculatedTemplateAbsPath, skipFile, err := calculateTemplateAbsolutePath(filePathFromZip, configuredTemplateDirectory)
+ require.Nil(t, err)
+ require.Equal(t, expectedTemplateAbsPath, calculatedTemplateAbsPath)
+ require.False(t, skipFile)
+ }
+ })
+
+ t.Run("negative scenarios", func(t *testing.T) {
+ filePathsFromZip := []string{
+ "./../nuclei-templates/../cve/test.yaml",
+ "nuclei-templates/../cve/test.yaml",
+ "nuclei-templates/cve/../test.yaml",
+ "nuclei-templates/././../cve/test.yaml",
+ "nuclei-templates/.././../cve/test.yaml",
+ "nuclei-templates/.././../cve/../test.yaml",
+ }
+
+ for _, filePathFromZip := range filePathsFromZip {
+ calculatedTemplateAbsPath, skipFile, err := calculateTemplateAbsolutePath(filePathFromZip, configuredTemplateDirectory)
+ require.Nil(t, err)
+ require.True(t, skipFile)
+ require.Equal(t, "", calculatedTemplateAbsPath)
+ }
+ })
+}
+
func zipFromDirectory(zipPath, directory string) error {
file, err := os.Create(zipPath)
if err != nil {
diff --git a/v2/pkg/catalog/config/config.go b/v2/pkg/catalog/config/config.go
index 8a996b62d..bb5cfb24f 100644
--- a/v2/pkg/catalog/config/config.go
+++ b/v2/pkg/catalog/config/config.go
@@ -26,7 +26,7 @@ type Config struct {
const nucleiConfigFilename = ".templates-config.json"
// Version is the current version of nuclei
-const Version = `2.5.3`
+const Version = `2.5.4`
func getConfigDetails() (string, error) {
homeDir, err := os.UserHomeDir()
@@ -34,7 +34,7 @@ func getConfigDetails() (string, error) {
return "", errors.Wrap(err, "could not get home directory")
}
configDir := filepath.Join(homeDir, ".config", "nuclei")
- _ = os.MkdirAll(configDir, os.ModePerm)
+ _ = os.MkdirAll(configDir, 0755)
templatesConfigFile := filepath.Join(configDir, nucleiConfigFilename)
return templatesConfigFile, nil
}
@@ -67,7 +67,7 @@ func WriteConfiguration(config *Config) error {
if err != nil {
return err
}
- file, err := os.OpenFile(templatesConfigFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0777)
+ file, err := os.OpenFile(templatesConfigFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
if err != nil {
return err
}
@@ -112,7 +112,7 @@ func getIgnoreFilePath() string {
home, err := os.UserHomeDir()
if err == nil {
configDir := filepath.Join(home, ".config", "nuclei")
- _ = os.MkdirAll(configDir, os.ModePerm)
+ _ = os.MkdirAll(configDir, 0755)
defIgnoreFilePath = filepath.Join(configDir, nucleiIgnoreFile)
return defIgnoreFilePath
diff --git a/v2/pkg/catalog/find.go b/v2/pkg/catalog/find.go
index 92e6d47f9..7b3ffc6b4 100644
--- a/v2/pkg/catalog/find.go
+++ b/v2/pkg/catalog/find.go
@@ -7,6 +7,7 @@ import (
"github.com/karrick/godirwalk"
"github.com/pkg/errors"
+
"github.com/projectdiscovery/gologger"
)
@@ -79,7 +80,7 @@ func (c *Catalog) GetTemplatePath(target string) ([]string, error) {
}
// convertPathToAbsolute resolves the paths provided to absolute paths
-// before doing any operations on them regardless of them being blob, folders, files, etc.
+// before doing any operations on them regardless of them being BLOB, folders, files, etc.
func (c *Catalog) convertPathToAbsolute(t string) (string, error) {
if strings.Contains(t, "*") {
file := filepath.Base(t)
diff --git a/v2/pkg/catalog/loader/filter/tag_filter.go b/v2/pkg/catalog/loader/filter/tag_filter.go
index ed8722c90..271b8e207 100644
--- a/v2/pkg/catalog/loader/filter/tag_filter.go
+++ b/v2/pkg/catalog/loader/filter/tag_filter.go
@@ -5,6 +5,7 @@ import (
"strings"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
+ "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
)
// TagFilter is used to filter nuclei templates for tag based execution
@@ -15,6 +16,8 @@ type TagFilter struct {
authors map[string]struct{}
block map[string]struct{}
matchAllows map[string]struct{}
+ types map[types.ProtocolType]struct{}
+ excludeTypes map[types.ProtocolType]struct{}
}
// ErrExcluded is returned for excluded templates
@@ -25,7 +28,7 @@ var ErrExcluded = errors.New("the template was excluded")
// unless it is explicitly specified by user using the includeTags (matchAllows field).
// Matching rule: (tag1 OR tag2...) AND (author1 OR author2...) AND (severity1 OR severity2...) AND (extraTags1 OR extraTags2...)
// Returns true if the template matches the filter criteria, false otherwise.
-func (tagFilter *TagFilter) Match(templateTags, templateAuthors []string, templateSeverity severity.Severity, extraTags []string) (bool, error) {
+func (tagFilter *TagFilter) Match(templateTags, templateAuthors []string, templateSeverity severity.Severity, extraTags []string, templateType types.ProtocolType) (bool, error) {
for _, templateTag := range templateTags {
_, blocked := tagFilter.block[templateTag]
_, allowed := tagFilter.matchAllows[templateTag]
@@ -51,6 +54,9 @@ func (tagFilter *TagFilter) Match(templateTags, templateAuthors []string, templa
return false, nil
}
+ if !isTemplateTypeMatch(tagFilter, templateType) {
+ return false, nil
+ }
return true, nil
}
@@ -116,6 +122,27 @@ func isTagMatch(tagFilter *TagFilter, templateTags []string) bool {
return false
}
+func isTemplateTypeMatch(tagFilter *TagFilter, templateType types.ProtocolType) bool {
+ if len(tagFilter.excludeTypes) == 0 && len(tagFilter.types) == 0 {
+ return true
+ }
+ if templateType.String() == "" || templateType == types.InvalidProtocol {
+ return true
+ }
+
+ included := true
+ if len(tagFilter.types) > 0 {
+ _, included = tagFilter.types[templateType]
+ }
+
+ excluded := false
+ if len(tagFilter.excludeTypes) > 0 {
+ _, excluded = tagFilter.excludeTypes[templateType]
+ }
+
+ return included && !excluded
+}
+
type Config struct {
Tags []string
ExcludeTags []string
@@ -123,6 +150,8 @@ type Config struct {
Severities severity.Severities
ExcludeSeverities severity.Severities
IncludeTags []string
+ Protocols types.ProtocolTypes
+ ExcludeProtocols types.ProtocolTypes
}
// New returns a tag filter for nuclei tag based execution
@@ -136,6 +165,8 @@ func New(config *Config) *TagFilter {
excludeSeverities: make(map[severity.Severity]struct{}),
block: make(map[string]struct{}),
matchAllows: make(map[string]struct{}),
+ types: make(map[types.ProtocolType]struct{}),
+ excludeTypes: make(map[types.ProtocolType]struct{}),
}
for _, tag := range config.ExcludeTags {
for _, val := range splitCommaTrim(tag) {
@@ -177,6 +208,16 @@ func New(config *Config) *TagFilter {
delete(filter.block, val)
}
}
+ for _, tag := range config.Protocols {
+ if _, ok := filter.types[tag]; !ok {
+ filter.types[tag] = struct{}{}
+ }
+ }
+ for _, tag := range config.ExcludeProtocols {
+ if _, ok := filter.excludeTypes[tag]; !ok {
+ filter.excludeTypes[tag] = struct{}{}
+ }
+ }
return filter
}
@@ -189,9 +230,9 @@ func splitCommaTrim(value string) []string {
if !strings.Contains(value, ",") {
return []string{strings.ToLower(value)}
}
- splitted := strings.Split(value, ",")
- final := make([]string, len(splitted))
- for i, value := range splitted {
+ split := strings.Split(value, ",")
+ final := make([]string, len(split))
+ for i, value := range split {
final[i] = strings.ToLower(strings.TrimSpace(value))
}
return final
diff --git a/v2/pkg/catalog/loader/filter/tag_filter_test.go b/v2/pkg/catalog/loader/filter/tag_filter_test.go
index 22d18b189..0758f6d4a 100644
--- a/v2/pkg/catalog/loader/filter/tag_filter_test.go
+++ b/v2/pkg/catalog/loader/filter/tag_filter_test.go
@@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
+ "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
)
func TestTagBasedFilter(t *testing.T) {
@@ -15,19 +16,19 @@ func TestTagBasedFilter(t *testing.T) {
})
t.Run("true", func(t *testing.T) {
- matched, _ := filter.Match([]string{"jira"}, []string{"pdteam"}, severity.Low, nil)
+ matched, _ := filter.Match([]string{"jira"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.True(t, matched, "could not get correct match")
})
t.Run("false", func(t *testing.T) {
- matched, _ := filter.Match([]string{"consul"}, []string{"pdteam"}, severity.Low, nil)
+ matched, _ := filter.Match([]string{"consul"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.False(t, matched, "could not get correct match")
})
t.Run("match-extra-tags-positive", func(t *testing.T) {
- matched, _ := filter.Match([]string{"cves", "vuln"}, []string{"pdteam"}, severity.Low, []string{"vuln"})
+ matched, _ := filter.Match([]string{"cves", "vuln"}, []string{"pdteam"}, severity.Low, []string{"vuln"}, types.HTTPProtocol)
require.True(t, matched, "could not get correct match")
})
t.Run("match-extra-tags-negative", func(t *testing.T) {
- matched, _ := filter.Match([]string{"cves"}, []string{"pdteam"}, severity.Low, []string{"vuln"})
+ matched, _ := filter.Match([]string{"cves"}, []string{"pdteam"}, severity.Low, []string{"vuln"}, types.HTTPProtocol)
require.False(t, matched, "could not get correct match")
})
}
@@ -36,7 +37,7 @@ func TestTagBasedFilter(t *testing.T) {
filter := New(&Config{
ExcludeTags: []string{"dos"},
})
- matched, err := filter.Match([]string{"dos"}, []string{"pdteam"}, severity.Low, nil)
+ matched, err := filter.Match([]string{"dos"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.False(t, matched, "could not get correct match")
require.Equal(t, ErrExcluded, err, "could not get correct error")
})
@@ -46,7 +47,7 @@ func TestTagBasedFilter(t *testing.T) {
ExcludeTags: []string{"dos", "fuzz"},
IncludeTags: []string{"fuzz"},
})
- matched, err := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil)
+ matched, err := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.Nil(t, err, "could not get match")
require.True(t, matched, "could not get correct match")
})
@@ -55,7 +56,7 @@ func TestTagBasedFilter(t *testing.T) {
Tags: []string{"fuzz"},
ExcludeTags: []string{"fuzz"},
})
- matched, err := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil)
+ matched, err := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.Nil(t, err, "could not get match")
require.True(t, matched, "could not get correct match")
})
@@ -63,24 +64,24 @@ func TestTagBasedFilter(t *testing.T) {
filter := New(&Config{
Authors: []string{"pdteam"},
})
- matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil)
+ matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.True(t, matched, "could not get correct match")
})
t.Run("match-severity", func(t *testing.T) {
filter := New(&Config{
Severities: severity.Severities{severity.High},
})
- matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.High, nil)
+ matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.High, nil, types.HTTPProtocol)
require.True(t, matched, "could not get correct match")
})
t.Run("match-exclude-severity", func(t *testing.T) {
filter := New(&Config{
ExcludeSeverities: severity.Severities{severity.Low},
})
- matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.High, nil)
+ matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.High, nil, types.HTTPProtocol)
require.True(t, matched, "could not get correct match")
- matched, _ = filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil)
+ matched, _ = filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.False(t, matched, "could not get correct match")
})
t.Run("match-exclude-with-tags", func(t *testing.T) {
@@ -88,7 +89,7 @@ func TestTagBasedFilter(t *testing.T) {
Tags: []string{"tag"},
ExcludeTags: []string{"another"},
})
- matched, _ := filter.Match([]string{"another"}, []string{"pdteam"}, severity.High, nil)
+ matched, _ := filter.Match([]string{"another"}, []string{"pdteam"}, severity.High, nil, types.HTTPProtocol)
require.False(t, matched, "could not get correct match")
})
t.Run("match-conditions", func(t *testing.T) {
@@ -97,16 +98,33 @@ func TestTagBasedFilter(t *testing.T) {
Tags: []string{"jira"},
Severities: severity.Severities{severity.High},
})
- matched, _ := filter.Match([]string{"jira", "cve"}, []string{"pdteam", "someOtherUser"}, severity.High, nil)
+ matched, _ := filter.Match([]string{"jira", "cve"}, []string{"pdteam", "someOtherUser"}, severity.High, nil, types.HTTPProtocol)
require.True(t, matched, "could not get correct match")
- matched, _ = filter.Match([]string{"jira"}, []string{"pdteam"}, severity.Low, nil)
+ matched, _ = filter.Match([]string{"jira"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.False(t, matched, "could not get correct match")
- matched, _ = filter.Match([]string{"jira"}, []string{"random"}, severity.Low, nil)
+ matched, _ = filter.Match([]string{"jira"}, []string{"random"}, severity.Low, nil, types.HTTPProtocol)
require.False(t, matched, "could not get correct match")
- matched, _ = filter.Match([]string{"consul"}, []string{"random"}, severity.Low, nil)
+ matched, _ = filter.Match([]string{"consul"}, []string{"random"}, severity.Low, nil, types.HTTPProtocol)
+ require.False(t, matched, "could not get correct match")
+ })
+ t.Run("match-type", func(t *testing.T) {
+ filter := New(&Config{
+ Protocols: []types.ProtocolType{types.HTTPProtocol},
+ })
+ matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.High, nil, types.HTTPProtocol)
+ require.True(t, matched, "could not get correct match")
+ })
+ t.Run("match-exclude-type", func(t *testing.T) {
+ filter := New(&Config{
+ ExcludeProtocols: []types.ProtocolType{types.HTTPProtocol},
+ })
+ matched, _ := filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.High, nil, types.DNSProtocol)
+ require.True(t, matched, "could not get correct match")
+
+ matched, _ = filter.Match([]string{"fuzz"}, []string{"pdteam"}, severity.Low, nil, types.HTTPProtocol)
require.False(t, matched, "could not get correct match")
})
}
diff --git a/v2/pkg/catalog/loader/loader.go b/v2/pkg/catalog/loader/loader.go
index eac22b4ae..23dfd980a 100644
--- a/v2/pkg/catalog/loader/loader.go
+++ b/v2/pkg/catalog/loader/loader.go
@@ -10,17 +10,24 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/parsers"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
+ templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
+ "github.com/projectdiscovery/nuclei/v2/pkg/types"
+ "github.com/projectdiscovery/nuclei/v2/pkg/utils/stats"
)
// Config contains the configuration options for the loader
type Config struct {
Templates []string
+ TemplateURLs []string
Workflows []string
+ WorkflowURLs []string
ExcludeTemplates []string
IncludeTemplates []string
Tags []string
ExcludeTags []string
+ Protocols templateTypes.ProtocolTypes
+ ExcludeProtocols templateTypes.ProtocolTypes
Authors []string
Severities severity.Severities
ExcludeSeverities severity.Severities
@@ -37,6 +44,7 @@ type Store struct {
pathFilter *filter.PathFilter
config *Config
finalTemplates []string
+ finalWorkflows []string
templates []*templates.Template
workflows []*templates.Template
@@ -44,6 +52,30 @@ type Store struct {
preprocessor templates.Preprocessor
}
+// NewConfig returns a new loader config
+func NewConfig(options *types.Options, catalog *catalog.Catalog, executerOpts protocols.ExecuterOptions) *Config {
+ loaderConfig := Config{
+ Templates: options.Templates,
+ Workflows: options.Workflows,
+ TemplateURLs: options.TemplateURLs,
+ WorkflowURLs: options.WorkflowURLs,
+ ExcludeTemplates: options.ExcludedTemplates,
+ Tags: options.Tags,
+ ExcludeTags: options.ExcludeTags,
+ IncludeTemplates: options.IncludeTemplates,
+ Authors: options.Authors,
+ Severities: options.Severities,
+ ExcludeSeverities: options.ExcludeSeverities,
+ IncludeTags: options.IncludeTags,
+ TemplatesDirectory: options.TemplatesDirectory,
+ Protocols: options.Protocols,
+ ExcludeProtocols: options.ExcludeProtocols,
+ Catalog: catalog,
+ ExecutorOptions: executerOpts,
+ }
+ return &loaderConfig
+}
+
// New creates a new template store based on provided configuration
func New(config *Config) (*Store, error) {
// Create a tag filter based on provided configuration
@@ -56,18 +88,32 @@ func New(config *Config) (*Store, error) {
Severities: config.Severities,
ExcludeSeverities: config.ExcludeSeverities,
IncludeTags: config.IncludeTags,
+ Protocols: config.Protocols,
+ ExcludeProtocols: config.ExcludeProtocols,
}),
pathFilter: filter.NewPathFilter(&filter.PathFilterConfig{
IncludedTemplates: config.IncludeTemplates,
ExcludedTemplates: config.ExcludeTemplates,
}, config.Catalog),
+ finalTemplates: config.Templates,
+ finalWorkflows: config.Workflows,
+ }
+
+ urlBasedTemplatesProvided := len(config.TemplateURLs) > 0 || len(config.WorkflowURLs) > 0
+ if urlBasedTemplatesProvided {
+ remoteTemplates, remoteWorkflows, err := getRemoteTemplatesAndWorkflows(config.TemplateURLs, config.WorkflowURLs)
+ if err != nil {
+ return store, err
+ }
+ store.finalTemplates = append(store.finalTemplates, remoteTemplates...)
+ store.finalWorkflows = append(store.finalWorkflows, remoteWorkflows...)
}
// Handle a case with no templates or workflows, where we use base directory
- if len(config.Templates) == 0 && len(config.Workflows) == 0 {
- config.Templates = append(config.Templates, config.TemplatesDirectory)
+ if len(store.finalTemplates) == 0 && len(store.finalWorkflows) == 0 && !urlBasedTemplatesProvided {
+ store.finalTemplates = []string{config.TemplatesDirectory}
}
- store.finalTemplates = append(store.finalTemplates, config.Templates...)
+
return store, nil
}
@@ -90,12 +136,16 @@ func (store *Store) RegisterPreprocessor(preprocessor templates.Preprocessor) {
// the complete compiled templates for a nuclei execution configuration.
func (store *Store) Load() {
store.templates = store.LoadTemplates(store.finalTemplates)
- store.workflows = store.LoadWorkflows(store.config.Workflows)
+ store.workflows = store.LoadWorkflows(store.finalWorkflows)
}
// ValidateTemplates takes a list of templates and validates them
// erroring out on discovering any faulty templates.
func (store *Store) ValidateTemplates(templatesList, workflowsList []string) error {
+ // consider all the templates by default if no templates passed by user
+ if len(templatesList) == 0 {
+ templatesList = store.finalTemplates
+ }
templatePaths := store.config.Catalog.GetTemplatesPath(templatesList)
workflowPaths := store.config.Catalog.GetTemplatesPath(workflowsList)
@@ -169,6 +219,7 @@ func (store *Store) LoadTemplates(templatesList []string) []*templates.Template
if loaded {
parsed, err := templates.Parse(templatePath, store.preprocessor, store.config.ExecutorOptions)
if err != nil {
+ stats.Increment(parsers.RuntimeWarningsStats)
gologger.Warning().Msgf("Could not parse template %s: %s\n", templatePath, err)
} else if parsed != nil {
loadedTemplates = append(loadedTemplates, parsed)
diff --git a/v2/pkg/catalog/loader/remote_loader.go b/v2/pkg/catalog/loader/remote_loader.go
new file mode 100644
index 000000000..c787e9601
--- /dev/null
+++ b/v2/pkg/catalog/loader/remote_loader.go
@@ -0,0 +1,95 @@
+package loader
+
+import (
+ "bufio"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/pkg/errors"
+)
+
+type ContentType string
+
+const (
+ Template ContentType = "Template"
+ Workflow ContentType = "Workflow"
+)
+
+type RemoteContentError struct {
+ Content []string
+ Type ContentType
+ Error error
+}
+
+func getRemoteTemplatesAndWorkflows(templateURLs []string, workflowURLs []string) ([]string, []string, error) {
+ remoteContentErrorChannel := make(chan RemoteContentError)
+
+ for _, templateURL := range templateURLs {
+ go getRemoteContent(templateURL, remoteContentErrorChannel, Template)
+ }
+ for _, workflowURL := range workflowURLs {
+ go getRemoteContent(workflowURL, remoteContentErrorChannel, Workflow)
+ }
+
+ var remoteTemplateList []string
+ var remoteWorkFlowList []string
+ var err error
+ for i := 0; i < (len(templateURLs) + len(workflowURLs)); i++ {
+ remoteContentError := <-remoteContentErrorChannel
+ if remoteContentError.Error != nil {
+ if err != nil {
+ err = errors.New(remoteContentError.Error.Error() + ": " + err.Error())
+ } else {
+ err = remoteContentError.Error
+ }
+ } else {
+ if remoteContentError.Type == Template {
+ remoteTemplateList = append(remoteTemplateList, remoteContentError.Content...)
+ } else if remoteContentError.Type == Workflow {
+ remoteWorkFlowList = append(remoteWorkFlowList, remoteContentError.Content...)
+ }
+ }
+ }
+
+ return remoteTemplateList, remoteWorkFlowList, err
+}
+
+func getRemoteContent(URL string, w chan<- RemoteContentError, contentType ContentType) {
+ response, err := http.Get(URL)
+ if err != nil {
+ w <- RemoteContentError{
+ Error: err,
+ }
+ return
+ }
+ defer response.Body.Close()
+ if response.StatusCode < 200 || response.StatusCode > 299 {
+ w <- RemoteContentError{
+ Error: fmt.Errorf("get \"%s\": unexpect status %d", URL, response.StatusCode),
+ }
+ return
+ }
+
+ scanner := bufio.NewScanner(response.Body)
+ var templateList []string
+ for scanner.Scan() {
+ text := strings.TrimSpace(scanner.Text())
+ if text == "" {
+ continue
+ }
+ templateList = append(templateList, text)
+ }
+
+ if err := scanner.Err(); err != nil {
+ w <- RemoteContentError{
+ Error: errors.Wrap(err, "get \"%s\""),
+ }
+ return
+ }
+
+ w <- RemoteContentError{
+ Content: templateList,
+ Type: contentType,
+ }
+}
diff --git a/v2/pkg/core/engine.go b/v2/pkg/core/engine.go
new file mode 100644
index 000000000..4ea9b5417
--- /dev/null
+++ b/v2/pkg/core/engine.go
@@ -0,0 +1,59 @@
+package core
+
+import (
+ "github.com/projectdiscovery/nuclei/v2/pkg/protocols"
+ "github.com/projectdiscovery/nuclei/v2/pkg/types"
+)
+
+// Engine is an executer for running Nuclei Templates/Workflows.
+//
+// The engine contains multiple thread pools which allow using different
+// concurrency values per protocol executed.
+//
+// The engine does most of the heavy lifting of execution, from clustering
+// templates to leading to the final execution by the workpool, it is
+// handled by the engine.
+type Engine struct {
+ workPool *WorkPool
+ options *types.Options
+ executerOpts protocols.ExecuterOptions
+}
+
+// InputProvider is an input providing interface for the nuclei execution
+// engine.
+//
+// An example InputProvider implementation is provided in form of hybrid
+// input provider in pkg/core/inputs/hybrid/hmap.go
+type InputProvider interface {
+ // Count returns the number of items for input provider
+ Count() int64
+ // Scan iterates the input and each found item is passed to the
+ // callback consumer.
+ Scan(callback func(value string))
+}
+
+// New returns a new Engine instance
+func New(options *types.Options) *Engine {
+ workPool := NewWorkPool(WorkPoolConfig{
+ InputConcurrency: options.BulkSize,
+ TypeConcurrency: options.TemplateThreads,
+ HeadlessInputConcurrency: options.HeadlessBulkSize,
+ HeadlessTypeConcurrency: options.HeadlessTemplateThreads,
+ })
+ engine := &Engine{
+ options: options,
+ workPool: workPool,
+ }
+ return engine
+}
+
+// SetExecuterOptions sets the executer options for the engine. This is required
+// before using the engine to perform any execution.
+func (e *Engine) SetExecuterOptions(options protocols.ExecuterOptions) {
+ e.executerOpts = options
+}
+
+// ExecuterOptions returns protocols.ExecuterOptions for nuclei engine.
+func (e *Engine) ExecuterOptions() protocols.ExecuterOptions {
+ return e.executerOpts
+}
diff --git a/v2/pkg/core/engine_test.go b/v2/pkg/core/engine_test.go
new file mode 100644
index 000000000..9a8bc9592
--- /dev/null
+++ b/v2/pkg/core/engine_test.go
@@ -0,0 +1 @@
+package core
diff --git a/v2/pkg/core/execute.go b/v2/pkg/core/execute.go
new file mode 100644
index 000000000..9e58a3d3f
--- /dev/null
+++ b/v2/pkg/core/execute.go
@@ -0,0 +1,96 @@
+package core
+
+import (
+ "github.com/remeh/sizedwaitgroup"
+ "go.uber.org/atomic"
+
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v2/pkg/templates"
+ "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
+)
+
+// Execute takes a list of templates/workflows that have been compiled
+// and executes them based on provided concurrency options.
+//
+// All the execution logic for the templates/workflows happens in this part
+// of the engine.
+func (e *Engine) Execute(templates []*templates.Template, target InputProvider) *atomic.Bool {
+ return e.ExecuteWithOpts(templates, target, false)
+}
+
+// ExecuteWithOpts executes with the full options
+func (e *Engine) ExecuteWithOpts(templatesList []*templates.Template, target InputProvider, noCluster bool) *atomic.Bool {
+ var finalTemplates []*templates.Template
+ if !noCluster {
+ finalTemplates, _ = templates.ClusterTemplates(templatesList, e.executerOpts)
+ } else {
+ finalTemplates = templatesList
+ }
+
+ results := &atomic.Bool{}
+ for _, template := range finalTemplates {
+ templateType := template.Type()
+
+ var wg *sizedwaitgroup.SizedWaitGroup
+ if templateType == types.HeadlessProtocol {
+ wg = e.workPool.Headless
+ } else {
+ wg = e.workPool.Default
+ }
+
+ wg.Add()
+ go func(tpl *templates.Template) {
+ switch {
+ case tpl.SelfContained:
+ // Self Contained requests are executed here separately
+ e.executeSelfContainedTemplateWithInput(tpl, results)
+ default:
+ // All other request types are executed here
+ e.executeModelWithInput(templateType, tpl, target, results)
+ }
+ wg.Done()
+ }(template)
+ }
+ e.workPool.Wait()
+ return results
+}
+
+// processSelfContainedTemplates execute a self-contained template.
+func (e *Engine) executeSelfContainedTemplateWithInput(template *templates.Template, results *atomic.Bool) {
+ match, err := template.Executer.Execute("")
+ if err != nil {
+ gologger.Warning().Msgf("[%s] Could not execute step: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
+ }
+ results.CAS(false, match)
+}
+
+// executeModelWithInput executes a type of template with input
+func (e *Engine) executeModelWithInput(templateType types.ProtocolType, template *templates.Template, target InputProvider, results *atomic.Bool) {
+ wg := e.workPool.InputPool(templateType)
+
+ target.Scan(func(scannedValue string) {
+ // Skip if the host has had errors
+ if e.executerOpts.HostErrorsCache != nil && e.executerOpts.HostErrorsCache.Check(scannedValue) {
+ return
+ }
+
+ wg.WaitGroup.Add()
+ go func(value string) {
+ defer wg.WaitGroup.Done()
+
+ var match bool
+ var err error
+ switch templateType {
+ case types.WorkflowProtocol:
+ match = e.executeWorkflow(value, template.CompiledWorkflow)
+ default:
+ match, err = template.Executer.Execute(value)
+ }
+ if err != nil {
+ gologger.Warning().Msgf("[%s] Could not execute step: %s\n", e.executerOpts.Colorizer.BrightBlue(template.ID), err)
+ }
+ results.CAS(false, match)
+ }(scannedValue)
+ })
+ wg.WaitGroup.Wait()
+}
diff --git a/v2/pkg/core/inputs/hybrid/hmap.go b/v2/pkg/core/inputs/hybrid/hmap.go
new file mode 100644
index 000000000..901bc6b7f
--- /dev/null
+++ b/v2/pkg/core/inputs/hybrid/hmap.go
@@ -0,0 +1,135 @@
+// Package hybrid implements a hybrid hmap/filekv backed input provider
+// for nuclei that can either stream or store results using different kv stores.
+package hybrid
+
+import (
+ "bufio"
+ "io"
+ "os"
+ "strings"
+
+ "github.com/pkg/errors"
+
+ "github.com/projectdiscovery/filekv"
+ "github.com/projectdiscovery/fileutil"
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/hmap/store/hybrid"
+ "github.com/projectdiscovery/nuclei/v2/pkg/types"
+)
+
+// Input is a hmap/filekv backed nuclei Input provider
+type Input struct {
+ inputCount int64
+ dupeCount int64
+ hostMap *hybrid.HybridMap
+ hostMapStream *filekv.FileDB
+}
+
+// New creates a new hmap backed nuclei Input Provider
+// and initializes it based on the passed options Model.
+func New(options *types.Options) (*Input, error) {
+ hm, err := hybrid.New(hybrid.DefaultDiskOptions)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create temporary input file")
+ }
+
+ input := &Input{hostMap: hm}
+ if options.Stream {
+ fkvOptions := filekv.DefaultOptions
+ if tmpFileName, err := fileutil.GetTempFileName(); err != nil {
+ return nil, errors.Wrap(err, "could not create temporary input file")
+ } else {
+ fkvOptions.Path = tmpFileName
+ }
+ fkv, err := filekv.Open(fkvOptions)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create temporary unsorted input file")
+ }
+ input.hostMapStream = fkv
+ }
+ if initErr := input.initializeInputSources(options); initErr != nil {
+ return nil, initErr
+ }
+ if input.dupeCount > 0 {
+ gologger.Info().Msgf("Supplied input was automatically deduplicated (%d removed).", input.dupeCount)
+ }
+ return input, nil
+}
+
+// Close closes the input provider
+func (i *Input) Close() {
+ i.hostMap.Close()
+ if i.hostMapStream != nil {
+ i.hostMapStream.Close()
+ }
+}
+
+// initializeInputSources initializes the input sources for hmap input
+func (i *Input) initializeInputSources(options *types.Options) error {
+ // Handle targets flags
+ for _, target := range options.Targets {
+ i.normalizeStoreInputValue(target)
+ }
+
+ // Handle stdin
+ if options.Stdin {
+ i.scanInputFromReader(os.Stdin)
+ }
+
+ // Handle target file
+ if options.TargetsFilePath != "" {
+ input, inputErr := os.Open(options.TargetsFilePath)
+ if inputErr != nil {
+ return errors.Wrap(inputErr, "could not open targets file")
+ }
+ i.scanInputFromReader(input)
+ input.Close()
+ }
+ return nil
+}
+
+// scanInputFromReader scans a line of input from reader and passes it for storage
+func (i *Input) scanInputFromReader(reader io.Reader) {
+ scanner := bufio.NewScanner(reader)
+ for scanner.Scan() {
+ i.normalizeStoreInputValue(scanner.Text())
+ }
+}
+
+// normalizeStoreInputValue normalizes and stores passed input values
+func (i *Input) normalizeStoreInputValue(value string) {
+ url := strings.TrimSpace(value)
+ if url == "" {
+ return
+ }
+
+ if _, ok := i.hostMap.Get(url); ok {
+ i.dupeCount++
+ return
+ }
+
+ i.inputCount++
+ _ = i.hostMap.Set(url, nil)
+ if i.hostMapStream != nil {
+ _ = i.hostMapStream.Set([]byte(url), nil)
+ }
+}
+
+// Count returns the input count
+func (i *Input) Count() int64 {
+ return i.inputCount
+}
+
+// Scan iterates the input and each found item is passed to the
+// callback consumer.
+func (i *Input) Scan(callback func(value string)) {
+ callbackFunc := func(k, _ []byte) error {
+ callback(string(k))
+ return nil
+ }
+ if i.hostMapStream != nil {
+ _ = i.hostMapStream.Scan(callbackFunc)
+ } else {
+ i.hostMap.Scan(callbackFunc)
+ }
+}
diff --git a/v2/pkg/core/inputs/inputs.go b/v2/pkg/core/inputs/inputs.go
new file mode 100644
index 000000000..6237dfb99
--- /dev/null
+++ b/v2/pkg/core/inputs/inputs.go
@@ -0,0 +1,17 @@
+package inputs
+
+type SimpleInputProvider struct {
+ Inputs []string
+}
+
+// Count returns the number of items for input provider
+func (s *SimpleInputProvider) Count() int64 {
+ return int64(len(s.Inputs))
+}
+
+// Scan calls a callback function till the input provider is exhausted
+func (s *SimpleInputProvider) Scan(callback func(value string)) {
+ for _, v := range s.Inputs {
+ callback(v)
+ }
+}
diff --git a/v2/pkg/workflows/execute.go b/v2/pkg/core/workflow_execute.go
similarity index 79%
rename from v2/pkg/workflows/execute.go
rename to v2/pkg/core/workflow_execute.go
index c0710c6ad..9ee87df4a 100644
--- a/v2/pkg/workflows/execute.go
+++ b/v2/pkg/core/workflow_execute.go
@@ -1,21 +1,23 @@
-package workflows
+package core
import (
- "github.com/projectdiscovery/gologger"
- "github.com/projectdiscovery/nuclei/v2/pkg/output"
"github.com/remeh/sizedwaitgroup"
"go.uber.org/atomic"
+
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v2/pkg/output"
+ "github.com/projectdiscovery/nuclei/v2/pkg/workflows"
)
-// RunWorkflow runs a workflow on an input and returns true or false
-func (w *Workflow) RunWorkflow(input string) bool {
+// executeWorkflow runs a workflow on an input and returns true or false
+func (e *Engine) executeWorkflow(input string, w *workflows.Workflow) bool {
results := &atomic.Bool{}
swg := sizedwaitgroup.New(w.Options.Options.TemplateThreads)
for _, template := range w.Workflows {
swg.Add()
- func(template *WorkflowTemplate) {
- if err := w.runWorkflowStep(template, input, results, &swg); err != nil {
+ func(template *workflows.WorkflowTemplate) {
+ if err := e.runWorkflowStep(template, input, results, &swg, w); err != nil {
gologger.Warning().Msgf("[%s] Could not execute workflow step: %s\n", template.Template, err)
}
swg.Done()
@@ -27,7 +29,7 @@ func (w *Workflow) RunWorkflow(input string) bool {
// runWorkflowStep runs a workflow step for the workflow. It executes the workflow
// in a recursive manner running all subtemplates and matchers.
-func (w *Workflow) runWorkflowStep(template *WorkflowTemplate, input string, results *atomic.Bool, swg *sizedwaitgroup.SizedWaitGroup) error {
+func (e *Engine) runWorkflowStep(template *workflows.WorkflowTemplate, input string, results *atomic.Bool, swg *sizedwaitgroup.SizedWaitGroup, w *workflows.Workflow) error {
var firstMatched bool
var err error
var mainErr error
@@ -90,8 +92,8 @@ func (w *Workflow) runWorkflowStep(template *WorkflowTemplate, input string, res
for _, subtemplate := range matcher.Subtemplates {
swg.Add()
- go func(subtemplate *WorkflowTemplate) {
- if err := w.runWorkflowStep(subtemplate, input, results, swg); err != nil {
+ go func(subtemplate *workflows.WorkflowTemplate) {
+ if err := e.runWorkflowStep(subtemplate, input, results, swg, w); err != nil {
gologger.Warning().Msgf("[%s] Could not execute workflow step: %s\n", subtemplate.Template, err)
}
swg.Done()
@@ -114,8 +116,8 @@ func (w *Workflow) runWorkflowStep(template *WorkflowTemplate, input string, res
for _, subtemplate := range template.Subtemplates {
swg.Add()
- go func(template *WorkflowTemplate) {
- if err := w.runWorkflowStep(template, input, results, swg); err != nil {
+ go func(template *workflows.WorkflowTemplate) {
+ if err := e.runWorkflowStep(template, input, results, swg, w); err != nil {
gologger.Warning().Msgf("[%s] Could not execute workflow step: %s\n", template.Template, err)
}
swg.Done()
diff --git a/v2/pkg/workflows/execute_test.go b/v2/pkg/core/workflow_execute_test.go
similarity index 70%
rename from v2/pkg/workflows/execute_test.go
rename to v2/pkg/core/workflow_execute_test.go
index 6d9ab6a09..a00ce6043 100644
--- a/v2/pkg/workflows/execute_test.go
+++ b/v2/pkg/core/workflow_execute_test.go
@@ -1,4 +1,4 @@
-package workflows
+package core
import (
"testing"
@@ -10,18 +10,20 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
+ "github.com/projectdiscovery/nuclei/v2/pkg/workflows"
)
func TestWorkflowsSimple(t *testing.T) {
progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0)
- workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{
- {Executers: []*ProtocolExecuterPair{{
+ workflow := &workflows.Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*workflows.WorkflowTemplate{
+ {Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
}},
}}
- matched := workflow.RunWorkflow("https://test.com")
+ engine := &Engine{}
+ matched := engine.executeWorkflow("https://test.com", workflow)
require.True(t, matched, "could not get correct match value")
}
@@ -29,20 +31,21 @@ func TestWorkflowsSimpleMultiple(t *testing.T) {
progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0)
var firstInput, secondInput string
- workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{
- {Executers: []*ProtocolExecuterPair{{
+ workflow := &workflows.Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*workflows.WorkflowTemplate{
+ {Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
firstInput = input
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
}},
- {Executers: []*ProtocolExecuterPair{{
+ {Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
secondInput = input
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
}},
}}
- matched := workflow.RunWorkflow("https://test.com")
+ engine := &Engine{}
+ matched := engine.executeWorkflow("https://test.com", workflow)
require.True(t, matched, "could not get correct match value")
require.Equal(t, "https://test.com", firstInput, "could not get correct first input")
@@ -53,21 +56,22 @@ func TestWorkflowsSubtemplates(t *testing.T) {
progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0)
var firstInput, secondInput string
- workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{
- {Executers: []*ProtocolExecuterPair{{
+ workflow := &workflows.Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*workflows.WorkflowTemplate{
+ {Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
firstInput = input
}, outputs: []*output.InternalWrappedEvent{
{OperatorsResult: &operators.Result{}, Results: []*output.ResultEvent{{}}},
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
- }, Subtemplates: []*WorkflowTemplate{{Executers: []*ProtocolExecuterPair{{
+ }, Subtemplates: []*workflows.WorkflowTemplate{{Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
secondInput = input
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
}}}},
}}
- matched := workflow.RunWorkflow("https://test.com")
+ engine := &Engine{}
+ matched := engine.executeWorkflow("https://test.com", workflow)
require.True(t, matched, "could not get correct match value")
require.Equal(t, "https://test.com", firstInput, "could not get correct first input")
@@ -78,19 +82,20 @@ func TestWorkflowsSubtemplatesNoMatch(t *testing.T) {
progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0)
var firstInput, secondInput string
- workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{
- {Executers: []*ProtocolExecuterPair{{
+ workflow := &workflows.Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*workflows.WorkflowTemplate{
+ {Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: false, executeHook: func(input string) {
firstInput = input
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
- }, Subtemplates: []*WorkflowTemplate{{Executers: []*ProtocolExecuterPair{{
+ }, Subtemplates: []*workflows.WorkflowTemplate{{Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
secondInput = input
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
}}}},
}}
- matched := workflow.RunWorkflow("https://test.com")
+ engine := &Engine{}
+ matched := engine.executeWorkflow("https://test.com", workflow)
require.False(t, matched, "could not get correct match value")
require.Equal(t, "https://test.com", firstInput, "could not get correct first input")
@@ -101,8 +106,8 @@ func TestWorkflowsSubtemplatesWithMatcher(t *testing.T) {
progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0)
var firstInput, secondInput string
- workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{
- {Executers: []*ProtocolExecuterPair{{
+ workflow := &workflows.Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*workflows.WorkflowTemplate{
+ {Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
firstInput = input
}, outputs: []*output.InternalWrappedEvent{
@@ -111,14 +116,15 @@ func TestWorkflowsSubtemplatesWithMatcher(t *testing.T) {
Extracts: map[string][]string{},
}},
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
- }, Matchers: []*Matcher{{Name: "tomcat", Subtemplates: []*WorkflowTemplate{{Executers: []*ProtocolExecuterPair{{
+ }, Matchers: []*workflows.Matcher{{Name: "tomcat", Subtemplates: []*workflows.WorkflowTemplate{{Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
secondInput = input
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
}}}}}},
}}
- matched := workflow.RunWorkflow("https://test.com")
+ engine := &Engine{}
+ matched := engine.executeWorkflow("https://test.com", workflow)
require.True(t, matched, "could not get correct match value")
require.Equal(t, "https://test.com", firstInput, "could not get correct first input")
@@ -129,8 +135,8 @@ func TestWorkflowsSubtemplatesWithMatcherNoMatch(t *testing.T) {
progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0)
var firstInput, secondInput string
- workflow := &Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*WorkflowTemplate{
- {Executers: []*ProtocolExecuterPair{{
+ workflow := &workflows.Workflow{Options: &protocols.ExecuterOptions{Options: &types.Options{TemplateThreads: 10}}, Workflows: []*workflows.WorkflowTemplate{
+ {Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
firstInput = input
}, outputs: []*output.InternalWrappedEvent{
@@ -139,14 +145,15 @@ func TestWorkflowsSubtemplatesWithMatcherNoMatch(t *testing.T) {
Extracts: map[string][]string{},
}},
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
- }, Matchers: []*Matcher{{Name: "apache", Subtemplates: []*WorkflowTemplate{{Executers: []*ProtocolExecuterPair{{
+ }, Matchers: []*workflows.Matcher{{Name: "apache", Subtemplates: []*workflows.WorkflowTemplate{{Executers: []*workflows.ProtocolExecuterPair{{
Executer: &mockExecuter{result: true, executeHook: func(input string) {
secondInput = input
}}, Options: &protocols.ExecuterOptions{Progress: progressBar}},
}}}}}},
}}
- matched := workflow.RunWorkflow("https://test.com")
+ engine := &Engine{}
+ matched := engine.executeWorkflow("https://test.com", workflow)
require.False(t, matched, "could not get correct match value")
require.Equal(t, "https://test.com", firstInput, "could not get correct first input")
diff --git a/v2/pkg/core/workpool.go b/v2/pkg/core/workpool.go
new file mode 100644
index 000000000..46ca8549d
--- /dev/null
+++ b/v2/pkg/core/workpool.go
@@ -0,0 +1,65 @@
+package core
+
+import (
+ "github.com/remeh/sizedwaitgroup"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
+)
+
+// WorkPool implements an execution pool for executing different
+// types of task with different concurrency requirements.
+//
+// It also allows Configuration of such requirements. This is used
+// for per-module like separate headless concurrency etc.
+type WorkPool struct {
+ Headless *sizedwaitgroup.SizedWaitGroup
+ Default *sizedwaitgroup.SizedWaitGroup
+ config WorkPoolConfig
+}
+
+// WorkPoolConfig is the configuration for work pool
+type WorkPoolConfig struct {
+ // InputConcurrency is the concurrency for inputs values.
+ InputConcurrency int
+ // TypeConcurrency is the concurrency for the request type templates.
+ TypeConcurrency int
+ // HeadlessInputConcurrency is the concurrency for headless inputs values.
+ HeadlessInputConcurrency int
+ // TypeConcurrency is the concurrency for the headless request type templates.
+ HeadlessTypeConcurrency int
+}
+
+// NewWorkPool returns a new WorkPool instance
+func NewWorkPool(config WorkPoolConfig) *WorkPool {
+ headlessWg := sizedwaitgroup.New(config.HeadlessTypeConcurrency)
+ defaultWg := sizedwaitgroup.New(config.TypeConcurrency)
+
+ return &WorkPool{
+ config: config,
+ Headless: &headlessWg,
+ Default: &defaultWg,
+ }
+}
+
+// Wait waits for all the work pool wait groups to finish
+func (w *WorkPool) Wait() {
+ w.Default.Wait()
+ w.Headless.Wait()
+}
+
+// InputWorkPool is a work pool per-input
+type InputWorkPool struct {
+ WaitGroup *sizedwaitgroup.SizedWaitGroup
+}
+
+// InputPool returns a work pool for an input type
+func (w *WorkPool) InputPool(templateType types.ProtocolType) *InputWorkPool {
+ var count int
+ if templateType == types.HeadlessProtocol {
+ count = w.config.HeadlessInputConcurrency
+ } else {
+ count = w.config.InputConcurrency
+ }
+ swg := sizedwaitgroup.New(count)
+ return &InputWorkPool{WaitGroup: &swg}
+}
diff --git a/v2/pkg/model/model.go b/v2/pkg/model/model.go
index a2a830294..f8ddb0858 100644
--- a/v2/pkg/model/model.go
+++ b/v2/pkg/model/model.go
@@ -50,13 +50,6 @@ type Info struct {
Reference stringslice.StringSlice `json:"reference,omitempty" yaml:"reference,omitempty" jsonschema:"title=references for the template,description=Links relevant to the template"`
// description: |
// Severity of the template.
- //
- // values:
- // - info
- // - low
- // - medium
- // - high
- // - critical
SeverityHolder severity.Holder `json:"severity,omitempty" yaml:"severity,omitempty"`
// description: |
// Metadata of the template.
diff --git a/v2/pkg/model/model_test.go b/v2/pkg/model/model_test.go
index bdbe27ddb..e0803c31a 100644
--- a/v2/pkg/model/model_test.go
+++ b/v2/pkg/model/model_test.go
@@ -72,6 +72,7 @@ func TestUnmarshal(t *testing.T) {
}
assertUnmarshalledTemplateInfo := func(t *testing.T, yamlPayload string) Info {
+ t.Helper()
info := Info{}
err := yaml.Unmarshal([]byte(yamlPayload), &info)
assert.Nil(t, err)
diff --git a/v2/pkg/model/types/severity/severities.go b/v2/pkg/model/types/severity/severities.go
index 9e3244859..91ffa1ceb 100644
--- a/v2/pkg/model/types/severity/severities.go
+++ b/v2/pkg/model/types/severity/severities.go
@@ -43,7 +43,7 @@ func (severities *Severities) UnmarshalYAML(unmarshal func(interface{}) error) e
}
func (severities Severities) String() string {
- var stringSeverities []string
+ var stringSeverities = make([]string, 0, len(severities))
for _, severity := range severities {
stringSeverities = append(stringSeverities, severity.String())
}
diff --git a/v2/pkg/model/types/severity/severity.go b/v2/pkg/model/types/severity/severity.go
index bb52a2eb7..5c3b28150 100644
--- a/v2/pkg/model/types/severity/severity.go
+++ b/v2/pkg/model/types/severity/severity.go
@@ -1,19 +1,28 @@
package severity
import (
+ "encoding/json"
"strings"
+ "github.com/alecthomas/jsonschema"
"github.com/pkg/errors"
)
type Severity int
+// name:Severity
const (
+ // name:undefined
Undefined Severity = iota
+ // name:info
Info
+ // name:low
Low
+ // name:medium
Medium
+ // name:high
High
+ // name:critical
Critical
limit
)
@@ -51,3 +60,44 @@ func normalizeValue(value string) string {
func (severity Severity) String() string {
return severityMappings[severity]
}
+
+//nolint:exported,revive //prefer to be explicit about the name, and make it refactor-safe
+// Holder holds a Severity type. Required for un/marshalling purposes
+type Holder struct {
+ Severity Severity `mapping:"true"`
+}
+
+func (severityHolder Holder) JSONSchemaType() *jsonschema.Type {
+ gotType := &jsonschema.Type{
+ Type: "string",
+ Title: "severity of the template",
+ Description: "Seriousness of the implications of the template",
+ }
+ for _, severity := range GetSupportedSeverities() {
+ gotType.Enum = append(gotType.Enum, severity.String())
+ }
+ return gotType
+}
+
+func (severityHolder *Holder) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var marshalledSeverity string
+ if err := unmarshal(&marshalledSeverity); err != nil {
+ return err
+ }
+
+ computedSeverity, err := toSeverity(marshalledSeverity)
+ if err != nil {
+ return err
+ }
+
+ severityHolder.Severity = computedSeverity
+ return nil
+}
+
+func (severityHolder *Holder) MarshalJSON() ([]byte, error) {
+ return json.Marshal(severityHolder.Severity.String())
+}
+
+func (severityHolder Holder) MarshalYAML() (interface{}, error) {
+ return severityHolder.Severity.String(), nil
+}
diff --git a/v2/pkg/model/types/severity/severity_holder.go b/v2/pkg/model/types/severity/severity_holder.go
deleted file mode 100644
index ad4c2496d..000000000
--- a/v2/pkg/model/types/severity/severity_holder.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package severity
-
-import (
- "encoding/json"
-
- "github.com/alecthomas/jsonschema"
-)
-
-//nolint:exported,revive //prefer to be explicit about the name, and make it refactor-safe
-// Holder holds a Severity type. Required for un/marshalling purposes
-type Holder struct {
- Severity Severity
-}
-
-func (severityHolder Holder) JSONSchemaType() *jsonschema.Type {
- gotType := &jsonschema.Type{
- Type: "string",
- Title: "severity of the template",
- Description: "Seriousness of the implications of the template",
- }
- for _, severity := range GetSupportedSeverities() {
- gotType.Enum = append(gotType.Enum, severity.String())
- }
- return gotType
-}
-
-func (severityHolder *Holder) UnmarshalYAML(unmarshal func(interface{}) error) error {
- var marshalledSeverity string
- if err := unmarshal(&marshalledSeverity); err != nil {
- return err
- }
-
- computedSeverity, err := toSeverity(marshalledSeverity)
- if err != nil {
- return err
- }
-
- severityHolder.Severity = computedSeverity
- return nil
-}
-
-func (severityHolder *Holder) MarshalJSON() ([]byte, error) {
- return json.Marshal(severityHolder.Severity.String())
-}
-
-func (severityHolder Holder) MarshalYAML() (interface{}, error) {
- return severityHolder.Severity.String(), nil
-}
diff --git a/v2/pkg/model/types/severity/severity_test.go b/v2/pkg/model/types/severity/severity_test.go
index b21f57265..6ba472388 100644
--- a/v2/pkg/model/types/severity/severity_test.go
+++ b/v2/pkg/model/types/severity/severity_test.go
@@ -30,6 +30,7 @@ func TestGetSupportedSeverities(t *testing.T) {
}
func testUnmarshal(t *testing.T, unmarshaller func(data []byte, v interface{}) error, payloadCreator func(value string) string) {
+ t.Helper()
payloads := [...]string{
payloadCreator("Info"),
payloadCreator("info"),
@@ -48,6 +49,7 @@ func testUnmarshal(t *testing.T, unmarshaller func(data []byte, v interface{}) e
}
func testUnmarshalFail(t *testing.T, unmarshaller func(data []byte, v interface{}) error, payloadCreator func(value string) string) {
+ t.Helper()
assert.Panics(t, func() { unmarshal(payloadCreator("invalid"), unmarshaller) })
}
diff --git a/v2/pkg/model/types/stringslice/stringslice.go b/v2/pkg/model/types/stringslice/stringslice.go
index 4a3e28486..55d798550 100644
--- a/v2/pkg/model/types/stringslice/stringslice.go
+++ b/v2/pkg/model/types/stringslice/stringslice.go
@@ -80,11 +80,12 @@ func marshalStringToSlice(unmarshal func(interface{}) error) ([]string, error) {
}
var result []string
- if len(marshalledValuesAsSlice) > 0 {
+ switch {
+ case len(marshalledValuesAsSlice) > 0:
result = marshalledValuesAsSlice
- } else if utils.IsNotBlank(marshalledValueAsString) {
+ case utils.IsNotBlank(marshalledValueAsString):
result = strings.Split(marshalledValueAsString, ",")
- } else {
+ default:
result = []string{}
}
diff --git a/v2/pkg/operators/common/dsl/dsl.go b/v2/pkg/operators/common/dsl/dsl.go
index 787cd8ff6..9e1a9f2ad 100644
--- a/v2/pkg/operators/common/dsl/dsl.go
+++ b/v2/pkg/operators/common/dsl/dsl.go
@@ -1,6 +1,8 @@
package dsl
import (
+ "bytes"
+ "compress/gzip"
"crypto/md5"
"crypto/sha1"
"crypto/sha256"
@@ -17,10 +19,11 @@ import (
"time"
"github.com/Knetic/govaluate"
+ "github.com/spaolacci/murmur3"
+
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/deserialization"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
- "github.com/spaolacci/murmur3"
)
const (
@@ -31,21 +34,38 @@ const (
withMaxRandArgsSize = withCutSetArgsSize
)
+var ErrDSLArguments = errors.New("invalid arguments provided to dsl")
+
var functions = map[string]govaluate.ExpressionFunction{
"len": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
length := len(types.ToString(args[0]))
return float64(length), nil
},
"toupper": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return strings.ToUpper(types.ToString(args[0])), nil
},
"tolower": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return strings.ToLower(types.ToString(args[0])), nil
},
"replace": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 3 {
+ return nil, ErrDSLArguments
+ }
return strings.ReplaceAll(types.ToString(args[0]), types.ToString(args[1]), types.ToString(args[2])), nil
},
"replace_regex": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 3 {
+ return nil, ErrDSLArguments
+ }
compiled, err := regexp.Compile(types.ToString(args[1]))
if err != nil {
return nil, err
@@ -53,66 +73,133 @@ var functions = map[string]govaluate.ExpressionFunction{
return compiled.ReplaceAllString(types.ToString(args[0]), types.ToString(args[2])), nil
},
"trim": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
return strings.Trim(types.ToString(args[0]), types.ToString(args[1])), nil
},
"trimleft": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
return strings.TrimLeft(types.ToString(args[0]), types.ToString(args[1])), nil
},
"trimright": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
return strings.TrimRight(types.ToString(args[0]), types.ToString(args[1])), nil
},
"trimspace": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return strings.TrimSpace(types.ToString(args[0])), nil
},
"trimprefix": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
return strings.TrimPrefix(types.ToString(args[0]), types.ToString(args[1])), nil
},
"trimsuffix": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
return strings.TrimSuffix(types.ToString(args[0]), types.ToString(args[1])), nil
},
"reverse": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return reverseString(types.ToString(args[0])), nil
},
// encoding
"base64": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
sEnc := base64.StdEncoding.EncodeToString([]byte(types.ToString(args[0])))
return sEnc, nil
},
+ "gzip": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
+ buffer := &bytes.Buffer{}
+ writer := gzip.NewWriter(buffer)
+ if _, err := writer.Write([]byte(args[0].(string))); err != nil {
+ return "", err
+ }
+ _ = writer.Close()
+
+ return buffer.String(), nil
+ },
// python encodes to base64 with lines of 76 bytes terminated by new line "\n"
"base64_py": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
sEnc := base64.StdEncoding.EncodeToString([]byte(types.ToString(args[0])))
return deserialization.InsertInto(sEnc, 76, '\n'), nil
},
"base64_decode": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return base64.StdEncoding.DecodeString(types.ToString(args[0]))
},
"url_encode": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return url.QueryEscape(types.ToString(args[0])), nil
},
"url_decode": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return url.QueryUnescape(types.ToString(args[0]))
},
"hex_encode": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return hex.EncodeToString([]byte(types.ToString(args[0]))), nil
},
"hex_decode": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
hx, _ := hex.DecodeString(types.ToString(args[0]))
return string(hx), nil
},
"html_escape": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return html.EscapeString(types.ToString(args[0])), nil
},
"html_unescape": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return html.UnescapeString(types.ToString(args[0])), nil
},
// hashing
"md5": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
hash := md5.Sum([]byte(types.ToString(args[0])))
return hex.EncodeToString(hash[:]), nil
},
"sha256": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
h := sha256.New()
if _, err := h.Write([]byte(types.ToString(args[0]))); err != nil {
return nil, err
@@ -120,6 +207,9 @@ var functions = map[string]govaluate.ExpressionFunction{
return hex.EncodeToString(h.Sum(nil)), nil
},
"sha1": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
h := sha1.New()
if _, err := h.Write([]byte(types.ToString(args[0]))); err != nil {
return nil, err
@@ -127,13 +217,22 @@ var functions = map[string]govaluate.ExpressionFunction{
return hex.EncodeToString(h.Sum(nil)), nil
},
"mmh3": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
return fmt.Sprintf("%d", int32(murmur3.Sum32WithSeed([]byte(types.ToString(args[0])), 0))), nil
},
// search
"contains": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
return strings.Contains(types.ToString(args[0]), types.ToString(args[1])), nil
},
"regex": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
compiled, err := regexp.Compile(types.ToString(args[0]))
if err != nil {
return nil, err
@@ -142,6 +241,9 @@ var functions = map[string]govaluate.ExpressionFunction{
},
// random generators
"rand_char": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
chars := letters + numbers
bad := ""
if len(args) >= 1 {
@@ -154,6 +256,9 @@ var functions = map[string]govaluate.ExpressionFunction{
return chars[rand.Intn(len(chars))], nil
},
"rand_base": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 3 {
+ return nil, ErrDSLArguments
+ }
l := 0
bad := ""
base := letters + numbers
@@ -171,6 +276,9 @@ var functions = map[string]govaluate.ExpressionFunction{
return randSeq(base, l), nil
},
"rand_text_alphanumeric": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
l := 0
bad := ""
chars := letters + numbers
@@ -185,6 +293,9 @@ var functions = map[string]govaluate.ExpressionFunction{
return randSeq(chars, l), nil
},
"rand_text_alpha": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
l := 0
bad := ""
chars := letters
@@ -199,6 +310,9 @@ var functions = map[string]govaluate.ExpressionFunction{
return randSeq(chars, l), nil
},
"rand_text_numeric": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
l := 0
bad := ""
chars := numbers
@@ -213,6 +327,9 @@ var functions = map[string]govaluate.ExpressionFunction{
return randSeq(chars, l), nil
},
"rand_int": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 2 {
+ return nil, ErrDSLArguments
+ }
min := 0
max := math.MaxInt32
@@ -231,16 +348,22 @@ var functions = map[string]govaluate.ExpressionFunction{
}
now := time.Now()
offset := now.Add(time.Duration(seconds) * time.Second)
- return offset.Unix(), nil
+ return float64(offset.Unix()), nil
},
// Time Functions
"waitfor": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 1 {
+ return nil, ErrDSLArguments
+ }
seconds := args[0].(float64)
time.Sleep(time.Duration(seconds) * time.Second)
return true, nil
},
// deserialization Functions
"generate_java_gadget": func(args ...interface{}) (interface{}, error) {
+ if len(args) != 3 {
+ return nil, ErrDSLArguments
+ }
gadget := args[0].(string)
cmd := args[1].(string)
diff --git a/v2/pkg/operators/common/dsl/dsl_test.go b/v2/pkg/operators/common/dsl/dsl_test.go
index bf2c5bfef..e5bfb0d64 100644
--- a/v2/pkg/operators/common/dsl/dsl_test.go
+++ b/v2/pkg/operators/common/dsl/dsl_test.go
@@ -1,9 +1,16 @@
package dsl
import (
+ "compress/gzip"
+ "io/ioutil"
+ "strings"
"testing"
+ "time"
+ "github.com/Knetic/govaluate"
"github.com/stretchr/testify/require"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/types"
)
func TestDSLURLEncodeDecode(t *testing.T) {
@@ -17,3 +24,25 @@ func TestDSLURLEncodeDecode(t *testing.T) {
require.Nil(t, err, "could not url encode")
require.Equal(t, "&test\"", decoded, "could not get url decoded data")
}
+
+func TestDSLTimeComparison(t *testing.T) {
+ compiled, err := govaluate.NewEvaluableExpressionWithFunctions("unixtime() > not_after", HelperFunctions())
+ require.Nil(t, err, "could not compare time")
+
+ result, err := compiled.Evaluate(map[string]interface{}{"not_after": float64(time.Now().Unix() - 1000)})
+ require.Nil(t, err, "could not evaluate compare time")
+ require.Equal(t, true, result, "could not get url encoded data")
+}
+
+func TestDSLGzipSerialize(t *testing.T) {
+ compiled, err := govaluate.NewEvaluableExpressionWithFunctions("gzip(\"hello world\")", HelperFunctions())
+ require.Nil(t, err, "could not compare time")
+
+ result, err := compiled.Evaluate(make(map[string]interface{}))
+ require.Nil(t, err, "could not evaluate compare time")
+
+ reader, _ := gzip.NewReader(strings.NewReader(types.ToString(result)))
+ data, _ := ioutil.ReadAll(reader)
+
+ require.Equal(t, "hello world", string(data), "could not get gzip encoded data")
+}
diff --git a/v2/pkg/operators/extractors/compile.go b/v2/pkg/operators/extractors/compile.go
index e688c9520..1d03184be 100644
--- a/v2/pkg/operators/extractors/compile.go
+++ b/v2/pkg/operators/extractors/compile.go
@@ -10,13 +10,12 @@ import (
// CompileExtractors performs the initial setup operation on an extractor
func (e *Extractor) CompileExtractors() error {
- var ok bool
// Set up the extractor type
- e.extractorType, ok = ExtractorTypes[e.Type]
- if !ok {
+ computedType, err := toExtractorTypes(e.GetType().String())
+ if err != nil {
return fmt.Errorf("unknown extractor type specified: %s", e.Type)
}
-
+ e.extractorType = computedType
// Compile the regexes
for _, regex := range e.Regex {
compiled, err := regexp.Compile(regex)
@@ -25,7 +24,6 @@ func (e *Extractor) CompileExtractors() error {
}
e.regexCompiled = append(e.regexCompiled, compiled)
}
-
for i, kval := range e.KVal {
e.KVal[i] = strings.ToLower(kval)
}
@@ -42,9 +40,14 @@ func (e *Extractor) CompileExtractors() error {
e.jsonCompiled = append(e.jsonCompiled, compiled)
}
- // Set up the part of the request to match, if any.
- if e.Part == "" {
- e.Part = "body"
+ if e.CaseInsensitive {
+ if e.GetType() != KValExtractor {
+ return fmt.Errorf("case-insensitive flag is supported only for 'kval' extractors (not '%s')", e.Type)
+ }
+ for i := range e.KVal {
+ e.KVal[i] = strings.ToLower(e.KVal[i])
+ }
}
+
return nil
}
diff --git a/v2/pkg/operators/extractors/extract.go b/v2/pkg/operators/extractors/extract.go
index 72440c6ec..e06428ac2 100644
--- a/v2/pkg/operators/extractors/extract.go
+++ b/v2/pkg/operators/extractors/extract.go
@@ -1,9 +1,8 @@
package extractors
import (
- "strings"
-
"encoding/json"
+ "strings"
"github.com/antchfx/htmlquery"
@@ -34,8 +33,18 @@ func (e *Extractor) ExtractRegex(corpus string) map[string]struct{} {
// ExtractKval extracts key value pairs from a data map
func (e *Extractor) ExtractKval(data map[string]interface{}) map[string]struct{} {
- results := make(map[string]struct{})
+ if e.CaseInsensitive {
+ inputData := data
+ data = make(map[string]interface{}, len(inputData))
+ for k, v := range inputData {
+ if s, ok := v.(string); ok {
+ v = strings.ToLower(s)
+ }
+ data[strings.ToLower(k)] = v
+ }
+ }
+ results := make(map[string]struct{})
for _, k := range e.KVal {
item, ok := data[k]
if !ok {
diff --git a/v2/pkg/operators/extractors/extractor_types.go b/v2/pkg/operators/extractors/extractor_types.go
new file mode 100644
index 000000000..af2a5ea78
--- /dev/null
+++ b/v2/pkg/operators/extractors/extractor_types.go
@@ -0,0 +1,105 @@
+package extractors
+
+import (
+ "encoding/json"
+ "errors"
+ "strings"
+
+ "github.com/alecthomas/jsonschema"
+)
+
+// ExtractorType is the type of the extractor specified
+type ExtractorType int
+
+// name:ExtractorType
+const (
+ // name:regex
+ RegexExtractor ExtractorType = iota + 1
+ // name:kval
+ KValExtractor
+ // name:xpath
+ XPathExtractor
+ // name:json
+ JSONExtractor
+ limit
+)
+
+// extractorMappings is a table for conversion of extractor type from string.
+var extractorMappings = map[ExtractorType]string{
+ RegexExtractor: "regex",
+ KValExtractor: "kval",
+ XPathExtractor: "xpath",
+ JSONExtractor: "json",
+}
+
+// GetType returns the type of the matcher
+func (e *Extractor) GetType() ExtractorType {
+ return e.Type.ExtractorType
+}
+
+// GetSupportedExtractorTypes returns list of supported types
+func GetSupportedExtractorTypes() []ExtractorType {
+ var result []ExtractorType
+ for index := ExtractorType(1); index < limit; index++ {
+ result = append(result, index)
+ }
+ return result
+}
+
+func toExtractorTypes(valueToMap string) (ExtractorType, error) {
+ normalizedValue := normalizeValue(valueToMap)
+ for key, currentValue := range extractorMappings {
+ if normalizedValue == currentValue {
+ return key, nil
+ }
+ }
+ return -1, errors.New("Invalid extractor type: " + valueToMap)
+}
+
+func normalizeValue(value string) string {
+ return strings.TrimSpace(strings.ToLower(value))
+}
+
+func (t ExtractorType) String() string {
+ return extractorMappings[t]
+}
+
+// ExtractorTypeHolder is used to hold internal type of the extractor
+type ExtractorTypeHolder struct {
+ ExtractorType ExtractorType `mapping:"true"`
+}
+
+func (holder ExtractorTypeHolder) JSONSchemaType() *jsonschema.Type {
+ gotType := &jsonschema.Type{
+ Type: "string",
+ Title: "type of the extractor",
+ Description: "Type of the extractor",
+ }
+ for _, types := range GetSupportedExtractorTypes() {
+ gotType.Enum = append(gotType.Enum, types.String())
+ }
+ return gotType
+}
+
+func (holder *ExtractorTypeHolder) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var marshalledTypes string
+ if err := unmarshal(&marshalledTypes); err != nil {
+ return err
+ }
+
+ computedType, err := toExtractorTypes(marshalledTypes)
+ if err != nil {
+ return err
+ }
+
+ holder.ExtractorType = computedType
+ return nil
+}
+
+func (holder *ExtractorTypeHolder) MarshalJSON() ([]byte, error) {
+ return json.Marshal(holder.ExtractorType.String())
+}
+
+func (holder ExtractorTypeHolder) MarshalYAML() (interface{}, error) {
+ return holder.ExtractorType.String(), nil
+}
diff --git a/v2/pkg/operators/extractors/extractors.go b/v2/pkg/operators/extractors/extractors.go
index 5c126a271..693ecb092 100644
--- a/v2/pkg/operators/extractors/extractors.go
+++ b/v2/pkg/operators/extractors/extractors.go
@@ -16,12 +16,7 @@ type Extractor struct {
Name string `yaml:"name,omitempty" jsonschema:"title=name of the extractor,description=Name of the extractor"`
// description: |
// Type is the type of the extractor.
- // values:
- // - "regex"
- // - "kval"
- // - "json"
- // - "xpath"
- Type string `yaml:"type" jsonschema:"title=type of the extractor,description=Type of the extractor,enum=regex,enum=kval,enum=json,enum=xpath"`
+ Type ExtractorTypeHolder `json:"name,omitempty" yaml:"type"`
// extractorType is the internal type of the extractor
extractorType ExtractorType
@@ -105,31 +100,11 @@ type Extractor struct {
// Internal, when set to true will allow using the value extracted
// in the next request for some protocols (like HTTP).
Internal bool `yaml:"internal,omitempty" jsonschema:"title=mark extracted value for internal variable use,description=Internal when set to true will allow using the value extracted in the next request for some protocols"`
-}
-
-// ExtractorType is the type of the extractor specified
-type ExtractorType = int
-
-const (
- // RegexExtractor extracts responses with regexes
- RegexExtractor ExtractorType = iota + 1
- // KValExtractor extracts responses with key:value
- KValExtractor
- // XPathExtractor extracts responses with Xpath selectors
- XPathExtractor
- // JSONExtractor extracts responses with json
- JSONExtractor
-)
-
-// ExtractorTypes is a table for conversion of extractor type from string.
-var ExtractorTypes = map[string]ExtractorType{
- "regex": RegexExtractor,
- "kval": KValExtractor,
- "xpath": XPathExtractor,
- "json": JSONExtractor,
-}
-
-// GetType returns the type of the matcher
-func (e *Extractor) GetType() ExtractorType {
- return e.extractorType
+
+ // description: |
+ // CaseInsensitive enables case-insensitive extractions. Default is false.
+ // values:
+ // - false
+ // - true
+ CaseInsensitive bool `yaml:"case-insensitive,omitempty" jsonschema:"title=use case insensitive extract,description=use case insensitive extract"`
}
diff --git a/v2/pkg/operators/matchers/compile.go b/v2/pkg/operators/matchers/compile.go
index 09213e23a..b571085c4 100644
--- a/v2/pkg/operators/matchers/compile.go
+++ b/v2/pkg/operators/matchers/compile.go
@@ -4,6 +4,7 @@ import (
"encoding/hex"
"fmt"
"regexp"
+ "strings"
"github.com/Knetic/govaluate"
@@ -11,54 +12,74 @@ import (
)
// CompileMatchers performs the initial setup operation on a matcher
-func (m *Matcher) CompileMatchers() error {
+func (matcher *Matcher) CompileMatchers() error {
var ok bool
// Support hexadecimal encoding for matchers too.
- if m.Encoding == "hex" {
- for i, word := range m.Words {
+ if matcher.Encoding == "hex" {
+ for i, word := range matcher.Words {
if decoded, err := hex.DecodeString(word); err == nil && len(decoded) > 0 {
- m.Words[i] = string(decoded)
+ matcher.Words[i] = string(decoded)
}
}
}
// Set up the matcher type
- m.matcherType, ok = MatcherTypes[m.Type]
- if !ok {
- return fmt.Errorf("unknown matcher type specified: %s", m.Type)
+ computedType, err := toMatcherTypes(matcher.GetType().String())
+ if err != nil {
+ return fmt.Errorf("unknown matcher type specified: %s", matcher.Type)
}
+
+ matcher.matcherType = computedType
// By default, match on body if user hasn't provided any specific items
- if m.Part == "" {
- m.Part = "body"
+ if matcher.Part == "" {
+ matcher.Part = "body"
}
// Compile the regexes
- for _, regex := range m.Regex {
+ for _, regex := range matcher.Regex {
compiled, err := regexp.Compile(regex)
if err != nil {
return fmt.Errorf("could not compile regex: %s", regex)
}
- m.regexCompiled = append(m.regexCompiled, compiled)
+ matcher.regexCompiled = append(matcher.regexCompiled, compiled)
+ }
+
+ // Compile and validate binary Values in matcher
+ for _, value := range matcher.Binary {
+ if decoded, err := hex.DecodeString(value); err != nil {
+ return fmt.Errorf("could not hex decode binary: %s", value)
+ } else {
+ matcher.binaryDecoded = append(matcher.binaryDecoded, string(decoded))
+ }
}
// Compile the dsl expressions
- for _, expr := range m.DSL {
+ for _, expr := range matcher.DSL {
compiled, err := govaluate.NewEvaluableExpressionWithFunctions(expr, dsl.HelperFunctions())
if err != nil {
return fmt.Errorf("could not compile dsl: %s", expr)
}
- m.dslCompiled = append(m.dslCompiled, compiled)
+ matcher.dslCompiled = append(matcher.dslCompiled, compiled)
}
// Set up the condition type, if any.
- if m.Condition != "" {
- m.condition, ok = ConditionTypes[m.Condition]
+ if matcher.Condition != "" {
+ matcher.condition, ok = ConditionTypes[matcher.Condition]
if !ok {
- return fmt.Errorf("unknown condition specified: %s", m.Condition)
+ return fmt.Errorf("unknown condition specified: %s", matcher.Condition)
}
} else {
- m.condition = ORCondition
+ matcher.condition = ORCondition
+ }
+
+ if matcher.CaseInsensitive {
+ if matcher.GetType() != WordsMatcher {
+ return fmt.Errorf("case-insensitive flag is supported only for 'word' matchers (not '%s')", matcher.Type)
+ }
+ for i := range matcher.Words {
+ matcher.Words[i] = strings.ToLower(matcher.Words[i])
+ }
}
return nil
}
diff --git a/v2/pkg/operators/matchers/match.go b/v2/pkg/operators/matchers/match.go
index fff0055de..22fb9e108 100644
--- a/v2/pkg/operators/matchers/match.go
+++ b/v2/pkg/operators/matchers/match.go
@@ -1,19 +1,20 @@
package matchers
import (
- "encoding/hex"
"strings"
+ "github.com/Knetic/govaluate"
"github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v2/pkg/operators/common/dsl"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/expressions"
)
// MatchStatusCode matches a status code check against a corpus
-func (m *Matcher) MatchStatusCode(statusCode int) bool {
+func (matcher *Matcher) MatchStatusCode(statusCode int) bool {
// Iterate over all the status codes accepted as valid
//
// Status codes don't support AND conditions.
- for _, status := range m.Status {
+ for _, status := range matcher.Status {
// Continue if the status codes don't match
if statusCode != status {
continue
@@ -25,11 +26,11 @@ func (m *Matcher) MatchStatusCode(statusCode int) bool {
}
// MatchSize matches a size check against a corpus
-func (m *Matcher) MatchSize(length int) bool {
+func (matcher *Matcher) MatchSize(length int) bool {
// Iterate over all the sizes accepted as valid
//
// Sizes codes don't support AND conditions.
- for _, size := range m.Size {
+ for _, size := range matcher.Size {
// Continue if the size doesn't match
if length != size {
continue
@@ -41,16 +42,20 @@ func (m *Matcher) MatchSize(length int) bool {
}
// MatchWords matches a word check against a corpus.
-func (m *Matcher) MatchWords(corpus string, dynamicValues map[string]interface{}) (bool, []string) {
+func (matcher *Matcher) MatchWords(corpus string, data map[string]interface{}) (bool, []string) {
+ if matcher.CaseInsensitive {
+ corpus = strings.ToLower(corpus)
+ }
+
var matchedWords []string
// Iterate over all the words accepted as valid
- for i, word := range m.Words {
- if dynamicValues == nil {
- dynamicValues = make(map[string]interface{})
+ for i, word := range matcher.Words {
+ if data == nil {
+ data = make(map[string]interface{})
}
var err error
- word, err = expressions.Evaluate(word, dynamicValues)
+ word, err = expressions.Evaluate(word, data)
if err != nil {
continue
}
@@ -58,7 +63,7 @@ func (m *Matcher) MatchWords(corpus string, dynamicValues map[string]interface{}
if !strings.Contains(corpus, word) {
// If we are in an AND request and a match failed,
// return false as the AND condition fails on any single mismatch.
- if m.condition == ANDCondition {
+ if matcher.condition == ANDCondition {
return false, []string{}
}
// Continue with the flow since it's an OR Condition.
@@ -66,14 +71,14 @@ func (m *Matcher) MatchWords(corpus string, dynamicValues map[string]interface{}
}
// If the condition was an OR, return on the first match.
- if m.condition == ORCondition {
+ if matcher.condition == ORCondition {
return true, []string{word}
}
matchedWords = append(matchedWords, word)
// If we are at the end of the words, return with true
- if len(m.Words)-1 == i {
+ if len(matcher.Words)-1 == i {
return true, matchedWords
}
}
@@ -81,15 +86,15 @@ func (m *Matcher) MatchWords(corpus string, dynamicValues map[string]interface{}
}
// MatchRegex matches a regex check against a corpus
-func (m *Matcher) MatchRegex(corpus string) (bool, []string) {
+func (matcher *Matcher) MatchRegex(corpus string) (bool, []string) {
var matchedRegexes []string
// Iterate over all the regexes accepted as valid
- for i, regex := range m.regexCompiled {
+ for i, regex := range matcher.regexCompiled {
// Continue if the regex doesn't match
if !regex.MatchString(corpus) {
// If we are in an AND request and a match failed,
// return false as the AND condition fails on any single mismatch.
- if m.condition == ANDCondition {
+ if matcher.condition == ANDCondition {
return false, []string{}
}
// Continue with the flow since it's an OR Condition.
@@ -98,14 +103,14 @@ func (m *Matcher) MatchRegex(corpus string) (bool, []string) {
currentMatches := regex.FindAllString(corpus, -1)
// If the condition was an OR, return on the first match.
- if m.condition == ORCondition {
+ if matcher.condition == ORCondition {
return true, currentMatches
}
matchedRegexes = append(matchedRegexes, currentMatches...)
// If we are at the end of the regex, return with true
- if len(m.regexCompiled)-1 == i {
+ if len(matcher.regexCompiled)-1 == i {
return true, matchedRegexes
}
}
@@ -113,23 +118,14 @@ func (m *Matcher) MatchRegex(corpus string) (bool, []string) {
}
// MatchBinary matches a binary check against a corpus
-func (m *Matcher) MatchBinary(corpus string) (bool, []string) {
+func (matcher *Matcher) MatchBinary(corpus string) (bool, []string) {
var matchedBinary []string
// Iterate over all the words accepted as valid
- for i, binary := range m.Binary {
- // Continue if the word doesn't match
- hexa, err := hex.DecodeString(binary)
- if err != nil {
- gologger.Warning().Msgf("Could not hex encode the given binary matcher value: '%s'", binary)
- if m.condition == ANDCondition {
- return false, []string{}
- }
- continue
- }
- if !strings.Contains(corpus, string(hexa)) {
+ for i, binary := range matcher.binaryDecoded {
+ if !strings.Contains(corpus, binary) {
// If we are in an AND request and a match failed,
// return false as the AND condition fails on any single mismatch.
- if m.condition == ANDCondition {
+ if matcher.condition == ANDCondition {
return false, []string{}
}
// Continue with the flow since it's an OR Condition.
@@ -137,14 +133,14 @@ func (m *Matcher) MatchBinary(corpus string) (bool, []string) {
}
// If the condition was an OR, return on the first match.
- if m.condition == ORCondition {
- return true, []string{string(hexa)}
+ if matcher.condition == ORCondition {
+ return true, []string{binary}
}
- matchedBinary = append(matchedBinary, string(hexa))
+ matchedBinary = append(matchedBinary, binary)
// If we are at the end of the words, return with true
- if len(m.Binary)-1 == i {
+ if len(matcher.Binary)-1 == i {
return true, matchedBinary
}
}
@@ -152,9 +148,21 @@ func (m *Matcher) MatchBinary(corpus string) (bool, []string) {
}
// MatchDSL matches on a generic map result
-func (m *Matcher) MatchDSL(data map[string]interface{}) bool {
+func (matcher *Matcher) MatchDSL(data map[string]interface{}) bool {
// Iterate over all the expressions accepted as valid
- for i, expression := range m.dslCompiled {
+ for i, expression := range matcher.dslCompiled {
+ if varErr := expressions.ContainsUnresolvedVariables(expression.String()); varErr != nil {
+ resolvedExpression, err := expressions.Evaluate(expression.String(), data)
+ if err != nil {
+ gologger.Warning().Msgf("Could not evaluate expression: %s, error: %s", matcher.Name, err.Error())
+ return false
+ }
+ expression, err = govaluate.NewEvaluableExpressionWithFunctions(resolvedExpression, dsl.HelperFunctions())
+ if err != nil {
+ gologger.Warning().Msgf("Could not evaluate expression: %s, error: %s", matcher.Name, err.Error())
+ return false
+ }
+ }
result, err := expression.Evaluate(data)
if err != nil {
continue
@@ -167,7 +175,7 @@ func (m *Matcher) MatchDSL(data map[string]interface{}) bool {
if !ok || !bResult {
// If we are in an AND request and a match failed,
// return false as the AND condition fails on any single mismatch.
- if m.condition == ANDCondition {
+ if matcher.condition == ANDCondition {
return false
}
// Continue with the flow since it's an OR Condition.
@@ -175,12 +183,12 @@ func (m *Matcher) MatchDSL(data map[string]interface{}) bool {
}
// If the condition was an OR, return on the first match.
- if m.condition == ORCondition {
+ if matcher.condition == ORCondition {
return true
}
// If we are at the end of the dsl, return with true
- if len(m.dslCompiled)-1 == i {
+ if len(matcher.dslCompiled)-1 == i {
return true
}
}
diff --git a/v2/pkg/operators/matchers/match_test.go b/v2/pkg/operators/matchers/match_test.go
index 4c2f9bc0c..68a6d1b01 100644
--- a/v2/pkg/operators/matchers/match_test.go
+++ b/v2/pkg/operators/matchers/match_test.go
@@ -3,6 +3,8 @@ package matchers
import (
"testing"
+ "github.com/Knetic/govaluate"
+ "github.com/projectdiscovery/nuclei/v2/pkg/operators/common/dsl"
"github.com/stretchr/testify/require"
)
@@ -19,7 +21,7 @@ func TestWordANDCondition(t *testing.T) {
}
func TestRegexANDCondition(t *testing.T) {
- m := &Matcher{Type: "regex", Condition: "and", Regex: []string{"[a-z]{3}", "\\d{2}"}}
+ m := &Matcher{Type: MatcherTypeHolder{MatcherType: RegexMatcher}, Condition: "and", Regex: []string{"[a-z]{3}", "\\d{2}"}}
err := m.CompileMatchers()
require.Nil(t, err)
@@ -49,7 +51,7 @@ func TestORCondition(t *testing.T) {
}
func TestRegexOrCondition(t *testing.T) {
- m := &Matcher{Type: "regex", Condition: "or", Regex: []string{"[a-z]{3}", "\\d{2}"}}
+ m := &Matcher{Type: MatcherTypeHolder{MatcherType: RegexMatcher}, Condition: "or", Regex: []string{"[a-z]{3}", "\\d{2}"}}
err := m.CompileMatchers()
require.Nil(t, err)
@@ -63,7 +65,7 @@ func TestRegexOrCondition(t *testing.T) {
}
func TestHexEncoding(t *testing.T) {
- m := &Matcher{Encoding: "hex", Type: "word", Part: "body", Words: []string{"50494e47"}}
+ m := &Matcher{Encoding: "hex", Type: MatcherTypeHolder{MatcherType: WordsMatcher}, Part: "body", Words: []string{"50494e47"}}
err := m.CompileMatchers()
require.Nil(t, err, "could not compile matcher")
@@ -71,3 +73,19 @@ func TestHexEncoding(t *testing.T) {
require.True(t, isMatched, "Could not match valid Hex condition")
require.Equal(t, m.Words, matched)
}
+
+func TestMatcher_MatchDSL(t *testing.T) {
+ compiled, err := govaluate.NewEvaluableExpressionWithFunctions("contains(body, \"{{VARIABLE}}\")", dsl.HelperFunctions())
+ require.Nil(t, err, "couldn't compile expression")
+
+ m := &Matcher{Type: MatcherTypeHolder{MatcherType: DSLMatcher}, dslCompiled: []*govaluate.EvaluableExpression{compiled}}
+ err = m.CompileMatchers()
+ require.Nil(t, err, "could not compile matcher")
+
+ values := []string{"PING", "pong"}
+
+ for value := range values {
+ isMatched := m.MatchDSL(map[string]interface{}{"body": value, "VARIABLE": value})
+ require.True(t, isMatched)
+ }
+}
diff --git a/v2/pkg/operators/matchers/matchers.go b/v2/pkg/operators/matchers/matchers.go
index 88ee413e1..936e4a72a 100644
--- a/v2/pkg/operators/matchers/matchers.go
+++ b/v2/pkg/operators/matchers/matchers.go
@@ -10,14 +10,7 @@ import (
type Matcher struct {
// description: |
// Type is the type of the matcher.
- // values:
- // - "status"
- // - "size"
- // - "word"
- // - "regex"
- // - "binary"
- // - "dsl"
- Type string `yaml:"type" jsonschema:"title=type of matcher,description=Type of the matcher,enum=status,enum=size,enum=word,enum=regex,enum=binary,enum=dsl"`
+ Type MatcherTypeHolder `yaml:"type" jsonschema:"title=type of matcher,description=Type of the matcher,enum=status,enum=size,enum=word,enum=regex,enum=binary,enum=dsl"`
// description: |
// Condition is the optional condition between two matcher variables. By default,
// the condition is assumed to be OR.
@@ -62,7 +55,7 @@ type Matcher struct {
// description: |
// Words contains word patterns required to be present in the response part.
// examples:
- // - name: Match for outlook mail protection domain
+ // - name: Match for Outlook mail protection domain
// value: >
// []string{"mail.protection.outlook.com"}
// - name: Match for application/json in response headers
@@ -105,42 +98,21 @@ type Matcher struct {
// values:
// - "hex"
Encoding string `yaml:"encoding,omitempty" jsonschema:"title=encoding for word field,description=Optional encoding for the word fields,enum=hex"`
+ // description: |
+ // CaseInsensitive enables case-insensitive matches. Default is false.
+ // values:
+ // - false
+ // - true
+ CaseInsensitive bool `yaml:"case-insensitive,omitempty" jsonschema:"title=use case insensitive match,description=use case insensitive match"`
// cached data for the compiled matcher
condition ConditionType
matcherType MatcherType
+ binaryDecoded []string
regexCompiled []*regexp.Regexp
dslCompiled []*govaluate.EvaluableExpression
}
-// MatcherType is the type of the matcher specified
-type MatcherType = int
-
-const (
- // WordsMatcher matches responses with words
- WordsMatcher MatcherType = iota + 1
- // RegexMatcher matches responses with regexes
- RegexMatcher
- // BinaryMatcher matches responses with words
- BinaryMatcher
- // StatusMatcher matches responses with status codes
- StatusMatcher
- // SizeMatcher matches responses with response size
- SizeMatcher
- // DSLMatcher matches based upon dsl syntax
- DSLMatcher
-)
-
-// MatcherTypes is a table for conversion of matcher type from string.
-var MatcherTypes = map[string]MatcherType{
- "status": StatusMatcher,
- "size": SizeMatcher,
- "word": WordsMatcher,
- "regex": RegexMatcher,
- "binary": BinaryMatcher,
- "dsl": DSLMatcher,
-}
-
// ConditionType is the type of condition for matcher
type ConditionType int
@@ -158,22 +130,17 @@ var ConditionTypes = map[string]ConditionType{
}
// Result reverts the results of the match if the matcher is of type negative.
-func (m *Matcher) Result(data bool) bool {
- if m.Negative {
+func (matcher *Matcher) Result(data bool) bool {
+ if matcher.Negative {
return !data
}
return data
}
// ResultWithMatchedSnippet returns true and the matched snippet, or false and an empty string
-func (m *Matcher) ResultWithMatchedSnippet(data bool, matchedSnippet []string) (bool, []string) {
- if m.Negative {
+func (matcher *Matcher) ResultWithMatchedSnippet(data bool, matchedSnippet []string) (bool, []string) {
+ if matcher.Negative {
return !data, []string{}
}
return data, matchedSnippet
}
-
-// GetType returns the type of the matcher
-func (m *Matcher) GetType() MatcherType {
- return m.matcherType
-}
diff --git a/v2/pkg/operators/matchers/matchers_types.go b/v2/pkg/operators/matchers/matchers_types.go
new file mode 100644
index 000000000..14b5c507f
--- /dev/null
+++ b/v2/pkg/operators/matchers/matchers_types.go
@@ -0,0 +1,115 @@
+package matchers
+
+import (
+ "encoding/json"
+ "errors"
+ "strings"
+
+ "github.com/alecthomas/jsonschema"
+)
+
+// MatcherType is the type of the matcher specified
+type MatcherType int
+
+// name:MatcherType
+const (
+ // name:word
+ WordsMatcher MatcherType = iota + 1
+ // name:regex
+ RegexMatcher
+ // name:binary
+ BinaryMatcher
+ // name:status
+ StatusMatcher
+ // name:size
+ SizeMatcher
+ // name:dsl
+ DSLMatcher
+ limit
+)
+
+// MatcherTypes is a table for conversion of matcher type from string.
+var MatcherTypes = map[MatcherType]string{
+ StatusMatcher: "status",
+ SizeMatcher: "size",
+ WordsMatcher: "word",
+ RegexMatcher: "regex",
+ BinaryMatcher: "binary",
+ DSLMatcher: "dsl",
+}
+
+//GetType returns the type of the matcher
+func (matcher *Matcher) GetType() MatcherType {
+ return matcher.Type.MatcherType
+}
+
+// GetSupportedMatcherTypes returns list of supported types
+func GetSupportedMatcherTypes() []MatcherType {
+ var result []MatcherType
+ for index := MatcherType(1); index < limit; index++ {
+ result = append(result, index)
+ }
+ return result
+}
+
+func toMatcherTypes(valueToMap string) (MatcherType, error) {
+ normalizedValue := normalizeValue(valueToMap)
+ for key, currentValue := range MatcherTypes {
+ if normalizedValue == currentValue {
+ return key, nil
+ }
+ }
+ return -1, errors.New("Invalid matcher type: " + valueToMap)
+}
+
+func normalizeValue(value string) string {
+ return strings.TrimSpace(strings.ToLower(value))
+}
+
+func (t MatcherType) String() string {
+ return MatcherTypes[t]
+}
+
+// MatcherTypeHolder is used to hold internal type of the matcher
+type MatcherTypeHolder struct {
+ MatcherType MatcherType `mapping:"true"`
+}
+
+func (t MatcherTypeHolder) String() string {
+ return t.MatcherType.String()
+}
+
+func (holder MatcherTypeHolder) JSONSchemaType() *jsonschema.Type {
+ gotType := &jsonschema.Type{
+ Type: "string",
+ Title: "type of the matcher",
+ Description: "Type of the matcher,enum=status,enum=size,enum=word,enum=regex,enum=binary,enum=dsl",
+ }
+ for _, types := range GetSupportedMatcherTypes() {
+ gotType.Enum = append(gotType.Enum, types.String())
+ }
+ return gotType
+}
+
+func (holder *MatcherTypeHolder) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var marshalledTypes string
+ if err := unmarshal(&marshalledTypes); err != nil {
+ return err
+ }
+
+ computedType, err := toMatcherTypes(marshalledTypes)
+ if err != nil {
+ return err
+ }
+
+ holder.MatcherType = computedType
+ return nil
+}
+
+func (holder MatcherTypeHolder) MarshalJSON() ([]byte, error) {
+ return json.Marshal(holder.MatcherType.String())
+}
+
+func (holder MatcherTypeHolder) MarshalYAML() (interface{}, error) {
+ return holder.MatcherType.String(), nil
+}
diff --git a/v2/pkg/operators/operators.go b/v2/pkg/operators/operators.go
index af13b4d93..804965d69 100644
--- a/v2/pkg/operators/operators.go
+++ b/v2/pkg/operators/operators.go
@@ -72,11 +72,64 @@ type Result struct {
// OutputExtracts is the list of extracts to be displayed on screen.
OutputExtracts []string
// DynamicValues contains any dynamic values to be templated
- DynamicValues map[string]interface{}
+ DynamicValues map[string][]string
// PayloadValues contains payload values provided by user. (Optional)
PayloadValues map[string]interface{}
}
+// MakeDynamicValuesCallback takes an input dynamic values map and calls
+// the callback function with all variations of the data in input in form
+// of map[string]string (interface{}).
+func MakeDynamicValuesCallback(input map[string][]string, iterateAllValues bool, callback func(map[string]interface{}) bool) {
+ output := make(map[string]interface{}, len(input))
+
+ if !iterateAllValues {
+ for k, v := range input {
+ if len(v) > 0 {
+ output[k] = v[0]
+ }
+ }
+ callback(output)
+ return
+ }
+ inputIndex := make(map[string]int, len(input))
+
+ var maxValue int
+ for _, v := range input {
+ if len(v) > maxValue {
+ maxValue = len(v)
+ }
+ }
+
+ for i := 0; i < maxValue; i++ {
+ for k, v := range input {
+ if len(v) == 0 {
+ continue
+ }
+ if len(v) == 1 {
+ output[k] = v[0]
+ continue
+ }
+ if gotIndex, ok := inputIndex[k]; !ok {
+ inputIndex[k] = 0
+ output[k] = v[0]
+ } else {
+ newIndex := gotIndex + 1
+ if newIndex >= len(v) {
+ output[k] = v[len(v)-1]
+ continue
+ }
+ output[k] = v[newIndex]
+ inputIndex[k] = newIndex
+ }
+ }
+ // skip if the callback says so
+ if callback(output) {
+ return
+ }
+ }
+}
+
// Merge merges a result structure into the other.
func (r *Result) Merge(result *Result) {
if !r.Matched && result.Matched {
@@ -115,7 +168,7 @@ func (operators *Operators) Execute(data map[string]interface{}, match MatchFunc
result := &Result{
Matches: make(map[string][]string),
Extracts: make(map[string][]string),
- DynamicValues: make(map[string]interface{}),
+ DynamicValues: make(map[string][]string),
}
// Start with the extractors first and evaluate them.
@@ -126,8 +179,10 @@ func (operators *Operators) Execute(data map[string]interface{}, match MatchFunc
extractorResults = append(extractorResults, match)
if extractor.Internal {
- if _, ok := result.DynamicValues[extractor.Name]; !ok {
- result.DynamicValues[extractor.Name] = match
+ if data, ok := result.DynamicValues[extractor.Name]; !ok {
+ result.DynamicValues[extractor.Name] = []string{match}
+ } else {
+ result.DynamicValues[extractor.Name] = append(data, match)
}
} else {
result.OutputExtracts = append(result.OutputExtracts, match)
@@ -179,7 +234,7 @@ func getMatcherName(matcher *matchers.Matcher, matcherIndex int) string {
if matcher.Name != "" {
return matcher.Name
} else {
- return matcher.Type + "-" + strconv.Itoa(matcherIndex+1) // making the index start from 1 to be more readable
+ return matcher.Type.String() + "-" + strconv.Itoa(matcherIndex+1) // making the index start from 1 to be more readable
}
}
diff --git a/v2/pkg/operators/operators_test.go b/v2/pkg/operators/operators_test.go
new file mode 100644
index 000000000..204cd57ba
--- /dev/null
+++ b/v2/pkg/operators/operators_test.go
@@ -0,0 +1,57 @@
+package operators
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestMakeDynamicValuesCallback(t *testing.T) {
+ input := map[string][]string{
+ "a": []string{"1", "2"},
+ "b": []string{"3"},
+ "c": []string{},
+ "d": []string{"A", "B", "C"},
+ }
+
+ count := 0
+ MakeDynamicValuesCallback(input, true, func(data map[string]interface{}) bool {
+ count++
+ require.Len(t, data, 3, "could not get correct output length")
+ return false
+ })
+ require.Equal(t, 3, count, "could not get correct result count")
+
+ t.Run("all", func(t *testing.T) {
+ input := map[string][]string{
+ "a": []string{"1"},
+ "b": []string{"2"},
+ "c": []string{"3"},
+ }
+
+ count := 0
+ MakeDynamicValuesCallback(input, true, func(data map[string]interface{}) bool {
+ count++
+ require.Len(t, data, 3, "could not get correct output length")
+ return false
+ })
+ require.Equal(t, 1, count, "could not get correct result count")
+ })
+
+ t.Run("first", func(t *testing.T) {
+ input := map[string][]string{
+ "a": []string{"1", "2"},
+ "b": []string{"3"},
+ "c": []string{},
+ "d": []string{"A", "B", "C"},
+ }
+
+ count := 0
+ MakeDynamicValuesCallback(input, false, func(data map[string]interface{}) bool {
+ count++
+ require.Len(t, data, 3, "could not get correct output length")
+ return false
+ })
+ require.Equal(t, 1, count, "could not get correct result count")
+ })
+}
diff --git a/v2/pkg/output/file_output_writer.go b/v2/pkg/output/file_output_writer.go
index 94765ae23..8c1ab0ebe 100644
--- a/v2/pkg/output/file_output_writer.go
+++ b/v2/pkg/output/file_output_writer.go
@@ -2,11 +2,13 @@ package output
import (
"os"
+ "sync"
)
// fileWriter is a concurrent file based output writer.
type fileWriter struct {
file *os.File
+ mu sync.Mutex
}
// NewFileOutputWriter creates a new buffered writer for a file
@@ -19,16 +21,22 @@ func newFileOutputWriter(file string) (*fileWriter, error) {
}
// WriteString writes an output to the underlying file
-func (w *fileWriter) Write(data []byte) error {
+func (w *fileWriter) Write(data []byte) (int, error) {
+ w.mu.Lock()
+ defer w.mu.Unlock()
if _, err := w.file.Write(data); err != nil {
- return err
+ return 0, err
}
- _, err := w.file.Write([]byte("\n"))
- return err
+ if _, err := w.file.Write([]byte("\n")); err != nil {
+ return 0, err
+ }
+ return len(data) + 1, nil
}
// Close closes the underlying writer flushing everything to disk
func (w *fileWriter) Close() error {
+ w.mu.Lock()
+ defer w.mu.Unlock()
//nolint:errcheck // we don't care whether sync failed or succeeded.
w.file.Sync()
return w.file.Close()
diff --git a/v2/pkg/output/format_screen.go b/v2/pkg/output/format_screen.go
index ddab852ae..2331095d0 100644
--- a/v2/pkg/output/format_screen.go
+++ b/v2/pkg/output/format_screen.go
@@ -27,6 +27,15 @@ func (w *StandardWriter) formatScreen(output *ResultEvent) []byte {
builder.WriteString(w.aurora.BrightGreen(output.ExtractorName).Bold().String())
}
+ if w.matcherStatus {
+ builder.WriteString("] [")
+ if !output.MatcherStatus {
+ builder.WriteString(w.aurora.Red("failed").String())
+ } else {
+ builder.WriteString(w.aurora.Green("matched").String())
+ }
+ }
+
builder.WriteString("] [")
builder.WriteString(w.aurora.BrightBlue(output.Type).String())
builder.WriteString("] ")
@@ -35,7 +44,11 @@ func (w *StandardWriter) formatScreen(output *ResultEvent) []byte {
builder.WriteString(w.severityColors(output.Info.SeverityHolder.Severity))
builder.WriteString("] ")
}
- builder.WriteString(output.Matched)
+ if output.Matched != "" {
+ builder.WriteString(output.Matched)
+ } else {
+ builder.WriteString(output.Host)
+ }
// If any extractors, write the results
if len(output.ExtractedResults) > 0 {
diff --git a/v2/pkg/output/output.go b/v2/pkg/output/output.go
index b90be29c3..e78699f7a 100644
--- a/v2/pkg/output/output.go
+++ b/v2/pkg/output/output.go
@@ -1,9 +1,9 @@
package output
import (
+ "io"
"os"
"regexp"
- "sync"
"time"
"github.com/pkg/errors"
@@ -16,6 +16,8 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
+ "github.com/projectdiscovery/nuclei/v2/pkg/types"
+ "github.com/projectdiscovery/nuclei/v2/pkg/utils"
)
// Writer is an interface which writes output to somewhere for nuclei events.
@@ -26,6 +28,8 @@ type Writer interface {
Colorizer() aurora.Aurora
// Write writes the event to file and/or screen.
Write(*ResultEvent) error
+ // WriteFailure writes the optional failure event for template to file and/or screen.
+ WriteFailure(event InternalEvent) error
// Request logs a request in the trace log
Request(templateID, url, requestType string, err error)
}
@@ -36,11 +40,11 @@ type StandardWriter struct {
jsonReqResp bool
noTimestamp bool
noMetadata bool
+ matcherStatus bool
aurora aurora.Aurora
- outputFile *fileWriter
- outputMutex *sync.Mutex
- traceFile *fileWriter
- traceMutex *sync.Mutex
+ outputFile io.WriteCloser
+ traceFile io.WriteCloser
+ errorFile io.WriteCloser
severityColors func(severity.Severity) string
}
@@ -54,10 +58,16 @@ type InternalWrappedEvent struct {
InternalEvent InternalEvent
Results []*ResultEvent
OperatorsResult *operators.Result
+ UsesInteractsh bool
}
// ResultEvent is a wrapped result event for a single nuclei output.
type ResultEvent struct {
+ // Template is the relative filename for the template
+ Template string `json:"template,omitempty"`
+ // TemplateURL is the URL of the template for the result inside the nuclei
+ // templates repository if it belongs to the repository.
+ TemplateURL string `json:"template-url,omitempty"`
// TemplateID is the ID of the template for the result.
TemplateID string `json:"template-id"`
// TemplatePath is the path of template
@@ -92,15 +102,17 @@ type ResultEvent struct {
Interaction *server.Interaction `json:"interaction,omitempty"`
// CURLCommand is an optional curl command to reproduce the request
// Only applicable if the report is for HTTP.
- CURLCommand string `json:"curl-command,omitempty"`
+ CURLCommand string `json:"curl-command,omitempty"`
+ // MatcherStatus is the status of the match
+ MatcherStatus bool `json:"matcher-status"`
FileToIndexPosition map[string]int `json:"-"`
}
// NewStandardWriter creates a new output writer based on user configurations
-func NewStandardWriter(colors, noMetadata, noTimestamp, json, jsonReqResp bool, file, traceFile string) (*StandardWriter, error) {
+func NewStandardWriter(colors, noMetadata, noTimestamp, json, jsonReqResp, MatcherStatus bool, file, traceFile string, errorFile string) (*StandardWriter, error) {
auroraColorizer := aurora.NewAurora(colors)
- var outputFile *fileWriter
+ var outputFile io.WriteCloser
if file != "" {
output, err := newFileOutputWriter(file)
if err != nil {
@@ -108,7 +120,7 @@ func NewStandardWriter(colors, noMetadata, noTimestamp, json, jsonReqResp bool,
}
outputFile = output
}
- var traceOutput *fileWriter
+ var traceOutput io.WriteCloser
if traceFile != "" {
output, err := newFileOutputWriter(traceFile)
if err != nil {
@@ -116,16 +128,24 @@ func NewStandardWriter(colors, noMetadata, noTimestamp, json, jsonReqResp bool,
}
traceOutput = output
}
+ var errorOutput io.WriteCloser
+ if errorFile != "" {
+ output, err := newFileOutputWriter(errorFile)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create error file")
+ }
+ errorOutput = output
+ }
writer := &StandardWriter{
json: json,
jsonReqResp: jsonReqResp,
noMetadata: noMetadata,
+ matcherStatus: MatcherStatus,
noTimestamp: noTimestamp,
aurora: auroraColorizer,
outputFile: outputFile,
- outputMutex: &sync.Mutex{},
traceFile: traceOutput,
- traceMutex: &sync.Mutex{},
+ errorFile: errorOutput,
severityColors: colorizer.New(auroraColorizer),
}
return writer, nil
@@ -133,6 +153,10 @@ func NewStandardWriter(colors, noMetadata, noTimestamp, json, jsonReqResp bool,
// Write writes the event to file and/or screen.
func (w *StandardWriter) Write(event *ResultEvent) error {
+ // Enrich the result event with extra metadata on the template-path and url.
+ if event.TemplatePath != "" {
+ event.Template, event.TemplateURL = utils.TemplatePathURL(types.ToString(event.TemplatePath))
+ }
event.Timestamp = time.Now()
var data []byte
@@ -155,33 +179,33 @@ func (w *StandardWriter) Write(event *ResultEvent) error {
if !w.json {
data = decolorizerRegex.ReplaceAll(data, []byte(""))
}
- if writeErr := w.outputFile.Write(data); writeErr != nil {
+ if _, writeErr := w.outputFile.Write(data); writeErr != nil {
return errors.Wrap(err, "could not write to output")
}
}
return nil
}
-// JSONTraceRequest is a trace log request written to file
-type JSONTraceRequest struct {
- ID string `json:"id"`
- URL string `json:"url"`
- Error string `json:"error"`
- Type string `json:"type"`
+// JSONLogRequest is a trace/error log request written to file
+type JSONLogRequest struct {
+ Template string `json:"template"`
+ Input string `json:"input"`
+ Error string `json:"error"`
+ Type string `json:"type"`
}
// Request writes a log the requests trace log
-func (w *StandardWriter) Request(templateID, url, requestType string, err error) {
- if w.traceFile == nil {
+func (w *StandardWriter) Request(templatePath, input, requestType string, requestErr error) {
+ if w.traceFile == nil && w.errorFile == nil {
return
}
- request := &JSONTraceRequest{
- ID: templateID,
- URL: url,
- Type: requestType,
+ request := &JSONLogRequest{
+ Template: templatePath,
+ Input: input,
+ Type: requestType,
}
- if err != nil {
- request.Error = err.Error()
+ if unwrappedErr := utils.UnwrapError(requestErr); unwrappedErr != nil {
+ request.Error = unwrappedErr.Error()
} else {
request.Error = "none"
}
@@ -190,9 +214,14 @@ func (w *StandardWriter) Request(templateID, url, requestType string, err error)
if err != nil {
return
}
- w.traceMutex.Lock()
- _ = w.traceFile.Write(data)
- w.traceMutex.Unlock()
+
+ if w.traceFile != nil {
+ _, _ = w.traceFile.Write(data)
+ }
+
+ if requestErr != nil && w.errorFile != nil {
+ _, _ = w.errorFile.Write(data)
+ }
}
// Colorizer returns the colorizer instance for writer
@@ -208,4 +237,27 @@ func (w *StandardWriter) Close() {
if w.traceFile != nil {
w.traceFile.Close()
}
+ if w.errorFile != nil {
+ w.errorFile.Close()
+ }
+}
+
+// WriteFailure writes the failure event for template to file and/or screen.
+func (w *StandardWriter) WriteFailure(event InternalEvent) error {
+ if !w.matcherStatus {
+ return nil
+ }
+ templatePath, templateURL := utils.TemplatePathURL(types.ToString(event["template-path"]))
+ data := &ResultEvent{
+ Template: templatePath,
+ TemplateURL: templateURL,
+ TemplateID: types.ToString(event["template-id"]),
+ TemplatePath: types.ToString(event["template-path"]),
+ Info: event["template-info"].(model.Info),
+ Type: types.ToString(event["type"]),
+ Host: types.ToString(event["host"]),
+ MatcherStatus: false,
+ Timestamp: time.Now(),
+ }
+ return w.Write(data)
}
diff --git a/v2/pkg/output/output_test.go b/v2/pkg/output/output_test.go
new file mode 100644
index 000000000..1648dcc36
--- /dev/null
+++ b/v2/pkg/output/output_test.go
@@ -0,0 +1,59 @@
+package output
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/pkg/errors"
+ "github.com/stretchr/testify/require"
+)
+
+func TestStandardWriterRequest(t *testing.T) {
+ t.Run("WithoutTraceAndError", func(t *testing.T) {
+ w, err := NewStandardWriter(false, false, false, false, false, false, "", "", "")
+ require.NoError(t, err)
+ require.NotPanics(t, func() {
+ w.Request("path", "input", "http", nil)
+ w.Close()
+ })
+ })
+
+ t.Run("TraceAndErrorWithoutError", func(t *testing.T) {
+ traceWriter := &testWriteCloser{}
+ errorWriter := &testWriteCloser{}
+
+ w, err := NewStandardWriter(false, false, false, false, false, false, "", "", "")
+ w.traceFile = traceWriter
+ w.errorFile = errorWriter
+ require.NoError(t, err)
+ w.Request("path", "input", "http", nil)
+
+ require.Equal(t, `{"template":"path","input":"input","error":"none","type":"http"}`, traceWriter.String())
+ require.Empty(t, errorWriter.String())
+ })
+
+ t.Run("ErrorWithWrappedError", func(t *testing.T) {
+ errorWriter := &testWriteCloser{}
+
+ w, err := NewStandardWriter(false, false, false, false, false, false, "", "", "")
+ w.errorFile = errorWriter
+ require.NoError(t, err)
+ w.Request(
+ "misconfiguration/tcpconfig.yaml",
+ "https://example.com/tcpconfig.html",
+ "http",
+ fmt.Errorf("GET https://example.com/tcpconfig.html/tcpconfig.html giving up after 2 attempts: %w", errors.New("context deadline exceeded (Client.Timeout exceeded while awaiting headers)")),
+ )
+
+ require.Equal(t, `{"template":"misconfiguration/tcpconfig.yaml","input":"https://example.com/tcpconfig.html","error":"context deadline exceeded (Client.Timeout exceeded while awaiting headers)","type":"http"}`, errorWriter.String())
+ })
+}
+
+type testWriteCloser struct {
+ strings.Builder
+}
+
+func (w testWriteCloser) Close() error {
+ return nil
+}
diff --git a/v2/pkg/parsers/parser.go b/v2/pkg/parsers/parser.go
index b65cb79e2..c63922466 100644
--- a/v2/pkg/parsers/parser.go
+++ b/v2/pkg/parsers/parser.go
@@ -5,6 +5,7 @@ import (
"io/ioutil"
"os"
"regexp"
+ "strings"
"gopkg.in/yaml.v2"
@@ -13,11 +14,15 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
"github.com/projectdiscovery/nuclei/v2/pkg/templates/cache"
+ "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v2/pkg/utils"
"github.com/projectdiscovery/nuclei/v2/pkg/utils/stats"
)
-const mandatoryFieldMissingTemplate = "mandatory '%s' field is missing"
+const (
+ mandatoryFieldMissingTemplate = "mandatory '%s' field is missing"
+ invalidFieldFormatTemplate = "invalid field format for '%s' (allowed format is %s)"
+)
// LoadTemplate returns true if the template is valid and matches the filtering criteria.
func LoadTemplate(templatePath string, tagFilter *filter.TagFilter, extraTags []string) (bool, error) {
@@ -30,12 +35,12 @@ func LoadTemplate(templatePath string, tagFilter *filter.TagFilter, extraTags []
return false, nil
}
- templateInfo := template.Info
- if validationError := validateMandatoryInfoFields(&templateInfo); validationError != nil {
+ if validationError := validateTemplateFields(template); validationError != nil {
+ stats.Increment(SyntaxErrorStats)
return false, validationError
}
- return isTemplateInfoMetadataMatch(tagFilter, &templateInfo, extraTags)
+ return isTemplateInfoMetadataMatch(tagFilter, &template.Info, extraTags, template.Type())
}
// LoadWorkflow returns true if the workflow is valid and matches the filtering criteria.
@@ -45,10 +50,8 @@ func LoadWorkflow(templatePath string) (bool, error) {
return false, templateParseError
}
- templateInfo := template.Info
-
if len(template.Workflows) > 0 {
- if validationError := validateMandatoryInfoFields(&templateInfo); validationError != nil {
+ if validationError := validateTemplateFields(template); validationError != nil {
return false, validationError
}
return true, nil
@@ -57,12 +60,12 @@ func LoadWorkflow(templatePath string) (bool, error) {
return false, nil
}
-func isTemplateInfoMetadataMatch(tagFilter *filter.TagFilter, templateInfo *model.Info, extraTags []string) (bool, error) {
+func isTemplateInfoMetadataMatch(tagFilter *filter.TagFilter, templateInfo *model.Info, extraTags []string, templateType types.ProtocolType) (bool, error) {
templateTags := templateInfo.Tags.ToSlice()
templateAuthors := templateInfo.Authors.ToSlice()
templateSeverity := templateInfo.SeverityHolder.Severity
- match, err := tagFilter.Match(templateTags, templateAuthors, templateSeverity, extraTags)
+ match, err := tagFilter.Match(templateTags, templateAuthors, templateSeverity, extraTags, templateType)
if err == filter.ErrExcluded {
return false, filter.ErrExcluded
@@ -71,18 +74,29 @@ func isTemplateInfoMetadataMatch(tagFilter *filter.TagFilter, templateInfo *mode
return match, err
}
-func validateMandatoryInfoFields(info *model.Info) error {
- if info == nil {
- return fmt.Errorf(mandatoryFieldMissingTemplate, "info")
- }
+func validateTemplateFields(template *templates.Template) error {
+ info := template.Info
+
+ var errors []string
if utils.IsBlank(info.Name) {
- return fmt.Errorf(mandatoryFieldMissingTemplate, "name")
+ errors = append(errors, fmt.Sprintf(mandatoryFieldMissingTemplate, "name"))
}
if info.Authors.IsEmpty() {
- return fmt.Errorf(mandatoryFieldMissingTemplate, "author")
+ errors = append(errors, fmt.Sprintf(mandatoryFieldMissingTemplate, "author"))
}
+
+ if template.ID == "" {
+ errors = append(errors, fmt.Sprintf(mandatoryFieldMissingTemplate, "id"))
+ } else if !templateIDRegexp.MatchString(template.ID) {
+ errors = append(errors, fmt.Sprintf(invalidFieldFormatTemplate, "id", templateIDRegexp.String()))
+ }
+
+ if len(errors) > 0 {
+ return fmt.Errorf(strings.Join(errors, ", "))
+ }
+
return nil
}
@@ -90,11 +104,13 @@ var (
parsedTemplatesCache *cache.Templates
ShouldValidate bool
fieldErrorRegexp = regexp.MustCompile(`not found in`)
+ templateIDRegexp = regexp.MustCompile(`^([a-zA-Z0-9]+[-_])*[a-zA-Z0-9]+$`)
)
const (
- SyntaxWarningStats = "syntax-warnings"
- SyntaxErrorStats = "syntax-errors"
+ SyntaxWarningStats = "syntax-warnings"
+ SyntaxErrorStats = "syntax-errors"
+ RuntimeWarningsStats = "runtime-warnings"
)
func init() {
@@ -103,6 +119,7 @@ func init() {
stats.NewEntry(SyntaxWarningStats, "Found %d templates with syntax warning (use -validate flag for further examination)")
stats.NewEntry(SyntaxErrorStats, "Found %d templates with syntax error (use -validate flag for further examination)")
+ stats.NewEntry(RuntimeWarningsStats, "Found %d templates with runtime error (use -validate flag for further examination)")
}
// ParseTemplate parses a template and returns a *templates.Template structure
diff --git a/v2/pkg/parsers/parser_test.go b/v2/pkg/parsers/parser_test.go
new file mode 100644
index 000000000..32a69ef5b
--- /dev/null
+++ b/v2/pkg/parsers/parser_test.go
@@ -0,0 +1,111 @@
+package parsers
+
+import (
+ "errors"
+ "fmt"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/catalog/loader/filter"
+ "github.com/projectdiscovery/nuclei/v2/pkg/model"
+ "github.com/projectdiscovery/nuclei/v2/pkg/model/types/stringslice"
+ "github.com/projectdiscovery/nuclei/v2/pkg/templates"
+)
+
+func TestLoadTemplate(t *testing.T) {
+ origTemplatesCache := parsedTemplatesCache
+ defer func() { parsedTemplatesCache = origTemplatesCache }()
+
+ tt := []struct {
+ name string
+ template *templates.Template
+ templateErr error
+
+ expectedErr error
+ }{
+ {
+ name: "valid",
+ template: &templates.Template{
+ ID: "CVE-2021-27330",
+ Info: model.Info{
+ Name: "Valid template",
+ Authors: stringslice.StringSlice{Value: "Author"},
+ },
+ },
+ },
+ {
+ name: "emptyTemplate",
+ template: &templates.Template{},
+ expectedErr: errors.New("mandatory 'name' field is missing, mandatory 'author' field is missing, mandatory 'id' field is missing"),
+ },
+ {
+ name: "emptyNameWithInvalidID",
+ template: &templates.Template{
+ ID: "invalid id",
+ Info: model.Info{
+ Authors: stringslice.StringSlice{Value: "Author"},
+ },
+ },
+ expectedErr: errors.New("mandatory 'name' field is missing, invalid field format for 'id' (allowed format is ^([a-zA-Z0-9]+[-_])*[a-zA-Z0-9]+$)"),
+ },
+ }
+
+ for _, tc := range tt {
+ t.Run(tc.name, func(t *testing.T) {
+ parsedTemplatesCache.Store(tc.name, tc.template, tc.templateErr)
+
+ tagFilter := filter.New(&filter.Config{})
+ success, err := LoadTemplate(tc.name, tagFilter, nil)
+ if tc.expectedErr == nil {
+ require.NoError(t, err)
+ require.True(t, success)
+ } else {
+ require.Equal(t, tc.expectedErr, err)
+ require.False(t, success)
+ }
+ })
+ }
+
+ t.Run("invalidTemplateID", func(t *testing.T) {
+ tt := []struct {
+ id string
+ success bool
+ }{
+ {id: "A-B-C", success: true},
+ {id: "A-B-C-1", success: true},
+ {id: "CVE_2021_27330", success: true},
+ {id: "ABC DEF", success: false},
+ {id: "_-__AAA_", success: false},
+ {id: " CVE-2021-27330", success: false},
+ {id: "CVE-2021-27330 ", success: false},
+ {id: "CVE-2021-27330-", success: false},
+ {id: "-CVE-2021-27330-", success: false},
+ {id: "CVE-2021--27330", success: false},
+ {id: "CVE-2021+27330", success: false},
+ }
+ for i, tc := range tt {
+ name := fmt.Sprintf("regexp%d", i)
+ t.Run(name, func(t *testing.T) {
+ template := &templates.Template{
+ ID: tc.id,
+ Info: model.Info{
+ Name: "Valid template",
+ Authors: stringslice.StringSlice{Value: "Author"},
+ },
+ }
+ parsedTemplatesCache.Store(name, template, nil)
+
+ tagFilter := filter.New(&filter.Config{})
+ success, err := LoadTemplate(name, tagFilter, nil)
+ if tc.success {
+ require.NoError(t, err)
+ require.True(t, success)
+ } else {
+ require.Equal(t, errors.New("invalid field format for 'id' (allowed format is ^([a-zA-Z0-9]+[-_])*[a-zA-Z0-9]+$)"), err)
+ require.False(t, success)
+ }
+ })
+ }
+ })
+}
diff --git a/v2/pkg/parsers/workflow_loader.go b/v2/pkg/parsers/workflow_loader.go
index e3f9ab8b2..e0efd3fed 100644
--- a/v2/pkg/parsers/workflow_loader.go
+++ b/v2/pkg/parsers/workflow_loader.go
@@ -18,7 +18,7 @@ func NewLoader(options *protocols.ExecuterOptions) (model.WorkflowLoader, error)
tagFilter := filter.New(&filter.Config{
Tags: options.Options.Tags,
ExcludeTags: options.Options.ExcludeTags,
- Authors: options.Options.Author,
+ Authors: options.Options.Authors,
Severities: options.Options.Severities,
IncludeTags: options.Options.IncludeTags,
})
diff --git a/v2/pkg/projectfile/httputil.go b/v2/pkg/projectfile/httputil.go
index 6479fd20b..6be0b40db 100644
--- a/v2/pkg/projectfile/httputil.go
+++ b/v2/pkg/projectfile/httputil.go
@@ -79,23 +79,6 @@ func newInternalResponse() *InternalResponse {
}
}
-// Unused
-// func toInternalRequest(req *http.Request, target string, body []byte) *InternalRequest {
-// intReq := newInternalRquest()
-
-// intReq.Target = target
-// intReq.HTTPMajor = req.ProtoMajor
-// intReq.HTTPMinor = req.ProtoMinor
-// for k, v := range req.Header {
-// intReq.Headers[k] = v
-// }
-// intReq.Headers = req.Header
-// intReq.Method = req.Method
-// intReq.Body = body
-
-// return intReq
-// }
-
func toInternalResponse(resp *http.Response, body []byte) *InternalResponse {
intResp := newInternalResponse()
@@ -125,14 +108,3 @@ func fromInternalResponse(intResp *InternalResponse) *http.Response {
Body: ioutil.NopCloser(bytes.NewReader(intResp.Body)),
}
}
-
-// Unused
-// func fromInternalRequest(intReq *InternalRequest) *http.Request {
-// return &http.Request{
-// ProtoMinor: intReq.HTTPMinor,
-// ProtoMajor: intReq.HTTPMajor,
-// Header: intReq.Headers,
-// ContentLength: int64(len(intReq.Body)),
-// Body: ioutil.NopCloser(bytes.NewReader(intReq.Body)),
-// }
-// }
diff --git a/v2/pkg/projectfile/project.go b/v2/pkg/projectfile/project.go
index 71ad9e365..a64d7db1b 100644
--- a/v2/pkg/projectfile/project.go
+++ b/v2/pkg/projectfile/project.go
@@ -42,13 +42,13 @@ func (pf *ProjectFile) Get(req []byte) (*http.Response, error) {
return nil, fmt.Errorf("not found")
}
- var httprecord HTTPRecord
- httprecord.Response = newInternalResponse()
- if err := unmarshal(data, &httprecord); err != nil {
+ var httpRecord HTTPRecord
+ httpRecord.Response = newInternalResponse()
+ if err := unmarshal(data, &httpRecord); err != nil {
return nil, err
}
- return fromInternalResponse(httprecord.Response), nil
+ return fromInternalResponse(httpRecord.Response), nil
}
func (pf *ProjectFile) Set(req []byte, resp *http.Response, data []byte) error {
@@ -57,10 +57,10 @@ func (pf *ProjectFile) Set(req []byte, resp *http.Response, data []byte) error {
return err
}
- var httprecord HTTPRecord
- httprecord.Request = req
- httprecord.Response = toInternalResponse(resp, data)
- data, err = marshal(httprecord)
+ var httpRecord HTTPRecord
+ httpRecord.Request = req
+ httpRecord.Response = toInternalResponse(resp, data)
+ data, err = marshal(httpRecord)
if err != nil {
return err
}
diff --git a/v2/pkg/protocols/common/clusterer/clusterer.go b/v2/pkg/protocols/common/clusterer/clusterer.go
deleted file mode 100644
index 29c1a309e..000000000
--- a/v2/pkg/protocols/common/clusterer/clusterer.go
+++ /dev/null
@@ -1,49 +0,0 @@
-package clusterer
-
-import (
- "github.com/projectdiscovery/nuclei/v2/pkg/templates"
-)
-
-// Cluster clusters a list of templates into a lesser number if possible based
-// on the similarity between the sent requests.
-//
-// If the attributes match, multiple requests can be clustered into a single
-// request which saves time and network resources during execution.
-func Cluster(list map[string]*templates.Template) [][]*templates.Template {
- final := [][]*templates.Template{}
-
- // Each protocol that can be clustered should be handled here.
- for key, template := range list {
- // We only cluster http requests as of now.
- // Take care of requests that can't be clustered first.
- if len(template.RequestsHTTP) == 0 {
- delete(list, key)
- final = append(final, []*templates.Template{template})
- continue
- }
-
- delete(list, key) // delete element first so it's not found later.
- // Find any/all similar matching request that is identical to
- // this one and cluster them together for http protocol only.
- if len(template.RequestsHTTP) == 1 {
- cluster := []*templates.Template{}
-
- for otherKey, other := range list {
- if len(other.RequestsHTTP) == 0 {
- continue
- }
- if template.RequestsHTTP[0].CanCluster(other.RequestsHTTP[0]) {
- delete(list, otherKey)
- cluster = append(cluster, other)
- }
- }
- if len(cluster) > 0 {
- cluster = append(cluster, template)
- final = append(final, cluster)
- continue
- }
- }
- final = append(final, []*templates.Template{template})
- }
- return final
-}
diff --git a/v2/pkg/protocols/common/executer/executer.go b/v2/pkg/protocols/common/executer/executer.go
index d9a285948..16a3ede71 100644
--- a/v2/pkg/protocols/common/executer/executer.go
+++ b/v2/pkg/protocols/common/executer/executer.go
@@ -6,6 +6,7 @@ import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
+ "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/writer"
)
// Executer executes a group of requests for a protocol
@@ -47,8 +48,6 @@ func (e *Executer) Execute(input string) (bool, error) {
dynamicValues := make(map[string]interface{})
previous := make(map[string]interface{})
for _, req := range e.requests {
- req := req
-
err := req.ExecuteWithResults(input, dynamicValues, previous, func(event *output.InternalWrappedEvent) {
ID := req.GetID()
if ID != "" {
@@ -61,18 +60,17 @@ func (e *Executer) Execute(input string) (bool, error) {
builder.Reset()
}
}
- if event.OperatorsResult == nil {
- return
- }
- for _, result := range event.Results {
- if e.options.IssuesClient != nil {
- if err := e.options.IssuesClient.CreateIssue(result); err != nil {
- gologger.Warning().Msgf("Could not create issue on tracker: %s", err)
- }
+ // If no results were found, and also interactsh is not being used
+ // in that case we can skip it, otherwise we've to show failure in
+ // case of matcher-status flag.
+ if event.OperatorsResult == nil && !event.UsesInteractsh {
+ if err := e.options.Output.WriteFailure(event.InternalEvent); err != nil {
+ gologger.Warning().Msgf("Could not write failure event to output: %s\n", err)
+ }
+ } else {
+ if writer.WriteResult(event, e.options.Output, e.options.Progress, e.options.IssuesClient) {
+ results = true
}
- results = true
- _ = e.options.Output.Write(result)
- e.options.Progress.IncrementMatched()
}
})
if err != nil {
@@ -83,6 +81,10 @@ func (e *Executer) Execute(input string) (bool, error) {
}
gologger.Warning().Msgf("[%s] Could not execute request for %s: %s\n", e.options.TemplateID, input, err)
}
+ // If a match was found and stop at first match is set, break out of the loop and return
+ if results && (e.options.StopAtFirstMatch || e.options.Options.StopAtFirstMatch) {
+ break
+ }
}
return results, nil
}
@@ -91,6 +93,7 @@ func (e *Executer) Execute(input string) (bool, error) {
func (e *Executer) ExecuteWithResults(input string, callback protocols.OutputEventCallback) error {
dynamicValues := make(map[string]interface{})
previous := make(map[string]interface{})
+ var results bool
for _, req := range e.requests {
req := req
@@ -110,6 +113,7 @@ func (e *Executer) ExecuteWithResults(input string, callback protocols.OutputEve
if event.OperatorsResult == nil {
return
}
+ results = true
callback(event)
})
if err != nil {
@@ -120,6 +124,10 @@ func (e *Executer) ExecuteWithResults(input string, callback protocols.OutputEve
}
gologger.Warning().Msgf("[%s] Could not execute request for %s: %s\n", e.options.TemplateID, input, err)
}
+ // If a match was found and stop at first match is set, break out of the loop and return
+ if results && (e.options.StopAtFirstMatch || e.options.Options.StopAtFirstMatch) {
+ break
+ }
}
return nil
}
diff --git a/v2/pkg/protocols/common/expressions/expressions.go b/v2/pkg/protocols/common/expressions/expressions.go
index 5a4ba2f3a..4f3a4b613 100644
--- a/v2/pkg/protocols/common/expressions/expressions.go
+++ b/v2/pkg/protocols/common/expressions/expressions.go
@@ -4,12 +4,13 @@ import (
"regexp"
"github.com/Knetic/govaluate"
+
"github.com/projectdiscovery/nuclei/v2/pkg/operators/common/dsl"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/generators"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/replacer"
)
-var templateExpressionRegex = regexp.MustCompile(`(?m)\{\{[^}]+\}\}["'\)\}]*`)
+var templateExpressionRegex = regexp.MustCompile(`(?m){{[^}]+}}["')}]*`)
// Evaluate checks if the match contains a dynamic variable, for each
// found one we will check if it's an expression and can
diff --git a/v2/pkg/protocols/common/expressions/variables.go b/v2/pkg/protocols/common/expressions/variables.go
index 48ac546ed..2aba5c324 100644
--- a/v2/pkg/protocols/common/expressions/variables.go
+++ b/v2/pkg/protocols/common/expressions/variables.go
@@ -6,51 +6,59 @@ import (
"strings"
)
-var unresolvedVariablesRegex = regexp.MustCompile(`(?:%7[B|b]|\{){2}([^}]+)(?:%7[D|d]|\}){2}["'\)\}]*`)
+var unresolvedVariablesRegex = regexp.MustCompile(`(?:%7[B|b]|{){2}([^}]+)(?:%7[D|d]|}){2}["')}]*`)
// ContainsUnresolvedVariables returns an error with variable names if the passed
// input contains unresolved {{
}} variables.
-func ContainsUnresolvedVariables(data string) error {
- matches := unresolvedVariablesRegex.FindAllStringSubmatch(data, -1)
- if len(matches) == 0 {
- return nil
- }
- errorString := &strings.Builder{}
- errorString.WriteString("unresolved variables found: ")
-
- for i, match := range matches {
- if len(match) < 2 {
- continue
+func ContainsUnresolvedVariables(items ...string) error {
+ for _, data := range items {
+ matches := unresolvedVariablesRegex.FindAllStringSubmatch(data, -1)
+ if len(matches) == 0 {
+ return nil
}
- errorString.WriteString(match[1])
- if i != len(matches)-1 {
- errorString.WriteString(",")
- }
- }
- errorMessage := errorString.String()
- return errors.New(errorMessage)
-}
+ errorString := &strings.Builder{}
+ errorString.WriteString("unresolved variables found: ")
-func ContainsVariablesWithNames(data string, names map[string]interface{}) error {
- matches := unresolvedVariablesRegex.FindAllStringSubmatch(data, -1)
- if len(matches) == 0 {
- return nil
- }
- errorString := &strings.Builder{}
- errorString.WriteString("unresolved variables with values found: ")
-
- for i, match := range matches {
- if len(match) < 2 {
- continue
- }
- matchName := match[1]
- if _, ok := names[matchName]; !ok {
- errorString.WriteString(matchName)
+ for i, match := range matches {
+ if len(match) < 2 {
+ continue
+ }
+ errorString.WriteString(match[1])
if i != len(matches)-1 {
errorString.WriteString(",")
}
}
+ errorMessage := errorString.String()
+ return errors.New(errorMessage)
}
- errorMessage := errorString.String()
- return errors.New(errorMessage)
+
+ return nil
+}
+
+func ContainsVariablesWithNames(names map[string]interface{}, items ...string) error {
+ for _, data := range items {
+ matches := unresolvedVariablesRegex.FindAllStringSubmatch(data, -1)
+ if len(matches) == 0 {
+ return nil
+ }
+ errorString := &strings.Builder{}
+ errorString.WriteString("unresolved variables with values found: ")
+
+ for i, match := range matches {
+ if len(match) < 2 {
+ continue
+ }
+ matchName := match[1]
+ if _, ok := names[matchName]; !ok {
+ errorString.WriteString(matchName)
+ if i != len(matches)-1 {
+ errorString.WriteString(",")
+ }
+ }
+ }
+ errorMessage := errorString.String()
+ return errors.New(errorMessage)
+ }
+
+ return nil
}
diff --git a/v2/pkg/protocols/common/generators/attack_types.go b/v2/pkg/protocols/common/generators/attack_types.go
new file mode 100644
index 000000000..a0a9eb78d
--- /dev/null
+++ b/v2/pkg/protocols/common/generators/attack_types.go
@@ -0,0 +1,97 @@
+package generators
+
+import (
+ "encoding/json"
+ "strings"
+
+ "github.com/alecthomas/jsonschema"
+ "github.com/pkg/errors"
+)
+
+// AttackType is the type of attack for payloads
+type AttackType int
+
+// Supported values for the AttackType
+// name:AttackType
+const (
+ // name:batteringram
+ BatteringRamAttack AttackType = iota + 1
+ // name:pitchfork
+ PitchForkAttack
+ // name:clusterbomb
+ ClusterBombAttack
+ limit
+)
+
+// attackTypeMappings is a table for conversion of attack type from string.
+var attackTypeMappings = map[AttackType]string{
+ BatteringRamAttack: "batteringram",
+ PitchForkAttack: "pitchfork",
+ ClusterBombAttack: "clusterbomb",
+}
+
+func GetSupportedAttackTypes() []AttackType {
+ var result []AttackType
+ for index := AttackType(1); index < limit; index++ {
+ result = append(result, index)
+ }
+ return result
+}
+
+func toAttackType(valueToMap string) (AttackType, error) {
+ normalizedValue := normalizeValue(valueToMap)
+ for key, currentValue := range attackTypeMappings {
+ if normalizedValue == currentValue {
+ return key, nil
+ }
+ }
+ return -1, errors.New("invalid attack type: " + valueToMap)
+}
+
+func normalizeValue(value string) string {
+ return strings.TrimSpace(strings.ToLower(value))
+}
+
+func (t AttackType) String() string {
+ return attackTypeMappings[t]
+}
+
+// AttackTypeHolder is used to hold internal type of the protocol
+type AttackTypeHolder struct {
+ Value AttackType `mapping:"true"`
+}
+
+func (holder AttackTypeHolder) JSONSchemaType() *jsonschema.Type {
+ gotType := &jsonschema.Type{
+ Type: "string",
+ Title: "type of the attack",
+ Description: "Type of the attack",
+ }
+ for _, types := range GetSupportedAttackTypes() {
+ gotType.Enum = append(gotType.Enum, types.String())
+ }
+ return gotType
+}
+
+func (holder *AttackTypeHolder) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var marshalledTypes string
+ if err := unmarshal(&marshalledTypes); err != nil {
+ return err
+ }
+
+ computedType, err := toAttackType(marshalledTypes)
+ if err != nil {
+ return err
+ }
+
+ holder.Value = computedType
+ return nil
+}
+
+func (holder *AttackTypeHolder) MarshalJSON() ([]byte, error) {
+ return json.Marshal(holder.Value.String())
+}
+
+func (holder AttackTypeHolder) MarshalYAML() (interface{}, error) {
+ return holder.Value.String(), nil
+}
diff --git a/v2/pkg/protocols/common/generators/generators.go b/v2/pkg/protocols/common/generators/generators.go
index a63fbcb2b..ca8f9b655 100644
--- a/v2/pkg/protocols/common/generators/generators.go
+++ b/v2/pkg/protocols/common/generators/generators.go
@@ -2,49 +2,54 @@
package generators
-import "github.com/pkg/errors"
+import (
+ "github.com/pkg/errors"
-// Generator is the generator struct for generating payloads
-type Generator struct {
- Type Type
+ "github.com/projectdiscovery/nuclei/v2/pkg/catalog"
+)
+
+// PayloadGenerator is the generator struct for generating payloads
+type PayloadGenerator struct {
+ Type AttackType
payloads map[string][]string
}
-// Type is type of attack
-type Type int
-
-const (
- // Batteringram replaces same payload into all of the defined payload positions at once.
- BatteringRam Type = iota + 1
- // PitchFork replaces variables with positional value from multiple wordlists
- PitchFork
- // ClusterBomb replaces variables with all possible combinations of values
- ClusterBomb
-)
-
-// StringToType is a table for conversion of attack type from string.
-var StringToType = map[string]Type{
- "batteringram": BatteringRam,
- "pitchfork": PitchFork,
- "clusterbomb": ClusterBomb,
-}
-
// New creates a new generator structure for payload generation
-func New(payloads map[string]interface{}, payloadType Type, templatePath string) (*Generator, error) {
- generator := &Generator{}
+func New(payloads map[string]interface{}, attackType AttackType, templatePath string, catalog *catalog.Catalog) (*PayloadGenerator, error) {
+ if attackType.String() == "" {
+ attackType = BatteringRamAttack
+ }
+
+ // Resolve payload paths if they are files.
+ payloadsFinal := make(map[string]interface{})
+ for name, payload := range payloads {
+ payloadsFinal[name] = payload
+ }
+ for name, payload := range payloads {
+ payloadStr, ok := payload.(string)
+ if ok {
+ final, resolveErr := catalog.ResolvePath(payloadStr, templatePath)
+ if resolveErr != nil {
+ return nil, errors.Wrap(resolveErr, "could not read payload file")
+ }
+ payloadsFinal[name] = final
+ }
+ }
+
+ generator := &PayloadGenerator{}
if err := generator.validate(payloads, templatePath); err != nil {
return nil, err
}
- compiled, err := loadPayloads(payloads)
+ compiled, err := loadPayloads(payloadsFinal)
if err != nil {
return nil, err
}
- generator.Type = payloadType
+ generator.Type = attackType
generator.payloads = compiled
// Validate the batteringram payload set
- if payloadType == BatteringRam {
+ if attackType == BatteringRamAttack {
if len(payloads) != 1 {
return nil, errors.New("batteringram must have single payload set")
}
@@ -54,7 +59,7 @@ func New(payloads map[string]interface{}, payloadType Type, templatePath string)
// Iterator is a single instance of an iterator for a generator structure
type Iterator struct {
- Type Type
+ Type AttackType
position int
msbIterator int
total int
@@ -62,7 +67,7 @@ type Iterator struct {
}
// NewIterator creates a new iterator for the payloads generator
-func (g *Generator) NewIterator() *Iterator {
+func (g *PayloadGenerator) NewIterator() *Iterator {
var payloads []*payloadIterator
for name, values := range g.payloads {
@@ -95,18 +100,18 @@ func (i *Iterator) Remaining() int {
func (i *Iterator) Total() int {
count := 0
switch i.Type {
- case BatteringRam:
+ case BatteringRamAttack:
for _, p := range i.payloads {
count += len(p.values)
}
- case PitchFork:
+ case PitchForkAttack:
count = len(i.payloads[0].values)
for _, p := range i.payloads {
if count > len(p.values) {
count = len(p.values)
}
}
- case ClusterBomb:
+ case ClusterBombAttack:
count = 1
for _, p := range i.payloads {
count *= len(p.values)
@@ -118,11 +123,11 @@ func (i *Iterator) Total() int {
// Value returns the next value for an iterator
func (i *Iterator) Value() (map[string]interface{}, bool) {
switch i.Type {
- case BatteringRam:
+ case BatteringRamAttack:
return i.batteringRamValue()
- case PitchFork:
+ case PitchForkAttack:
return i.pitchforkValue()
- case ClusterBomb:
+ case ClusterBombAttack:
return i.clusterbombValue()
default:
return i.batteringRamValue()
@@ -179,7 +184,7 @@ func (i *Iterator) clusterbombValue() (map[string]interface{}, bool) {
signalNext = false
}
if !p.next() {
- // No more inputs in this inputprovider
+ // No more inputs in this input provider
if index == i.msbIterator {
// Reset all previous wordlists and increment the msb counter
i.msbIterator++
diff --git a/v2/pkg/protocols/common/generators/generators_test.go b/v2/pkg/protocols/common/generators/generators_test.go
index de37ca62d..24a9e2731 100644
--- a/v2/pkg/protocols/common/generators/generators_test.go
+++ b/v2/pkg/protocols/common/generators/generators_test.go
@@ -4,12 +4,15 @@ import (
"testing"
"github.com/stretchr/testify/require"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/catalog"
)
func TestBatteringRamGenerator(t *testing.T) {
usernames := []string{"admin", "password"}
- generator, err := New(map[string]interface{}{"username": usernames}, BatteringRam, "")
+ catalogInstance := catalog.New("")
+ generator, err := New(map[string]interface{}{"username": usernames}, BatteringRamAttack, "", catalogInstance)
require.Nil(t, err, "could not create generator")
iterator := generator.NewIterator()
@@ -28,7 +31,8 @@ func TestPitchforkGenerator(t *testing.T) {
usernames := []string{"admin", "token"}
passwords := []string{"password1", "password2", "password3"}
- generator, err := New(map[string]interface{}{"username": usernames, "password": passwords}, PitchFork, "")
+ catalogInstance := catalog.New("")
+ generator, err := New(map[string]interface{}{"username": usernames, "password": passwords}, PitchForkAttack, "", catalogInstance)
require.Nil(t, err, "could not create generator")
iterator := generator.NewIterator()
@@ -49,7 +53,8 @@ func TestClusterbombGenerator(t *testing.T) {
usernames := []string{"admin"}
passwords := []string{"admin", "password", "token"}
- generator, err := New(map[string]interface{}{"username": usernames, "password": passwords}, ClusterBomb, "")
+ catalogInstance := catalog.New("")
+ generator, err := New(map[string]interface{}{"username": usernames, "password": passwords}, ClusterBombAttack, "", catalogInstance)
require.Nil(t, err, "could not create generator")
iterator := generator.NewIterator()
diff --git a/v2/pkg/protocols/common/generators/load.go b/v2/pkg/protocols/common/generators/load.go
index 0c44b613c..d5b00e4d3 100644
--- a/v2/pkg/protocols/common/generators/load.go
+++ b/v2/pkg/protocols/common/generators/load.go
@@ -53,7 +53,7 @@ func loadPayloadsFromFile(filepath string) ([]string, error) {
}
lines = append(lines, text)
}
- if err := scanner.Err(); err != nil && err != io.EOF {
+ if err := scanner.Err(); err != nil && !errors.Is(err, io.EOF) {
return lines, scanner.Err()
}
return lines, nil
diff --git a/v2/pkg/protocols/common/generators/maps.go b/v2/pkg/protocols/common/generators/maps.go
index 31df768bb..d88ec3dbd 100644
--- a/v2/pkg/protocols/common/generators/maps.go
+++ b/v2/pkg/protocols/common/generators/maps.go
@@ -1,9 +1,49 @@
package generators
import (
+ "reflect"
"strings"
)
+// MergeMapsMany merges many maps into a new map
+func MergeMapsMany(maps ...interface{}) map[string][]string {
+ m := make(map[string][]string)
+ for _, gotMap := range maps {
+ val := reflect.ValueOf(gotMap)
+ if val.Kind() != reflect.Map {
+ continue
+ }
+ appendToSlice := func(key, value string) {
+ if values, ok := m[key]; !ok {
+ m[key] = []string{value}
+ } else {
+ m[key] = append(values, value)
+ }
+ }
+ for _, e := range val.MapKeys() {
+ v := val.MapIndex(e)
+ switch v.Kind() {
+ case reflect.Slice, reflect.Array:
+ for i := 0; i < v.Len(); i++ {
+ appendToSlice(e.String(), v.Index(i).String())
+ }
+ case reflect.String:
+ appendToSlice(e.String(), v.String())
+ case reflect.Interface:
+ switch data := v.Interface().(type) {
+ case string:
+ appendToSlice(e.String(), data)
+ case []string:
+ for _, value := range data {
+ appendToSlice(e.String(), value)
+ }
+ }
+ }
+ }
+ }
+ return m
+}
+
// MergeMaps merges two maps into a new map
func MergeMaps(m1, m2 map[string]interface{}) map[string]interface{} {
m := make(map[string]interface{}, len(m1)+len(m2))
diff --git a/v2/pkg/protocols/common/generators/maps_test.go b/v2/pkg/protocols/common/generators/maps_test.go
new file mode 100644
index 000000000..870af84c9
--- /dev/null
+++ b/v2/pkg/protocols/common/generators/maps_test.go
@@ -0,0 +1,16 @@
+package generators
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestMergeMapsMany(t *testing.T) {
+ got := MergeMapsMany(map[string]interface{}{"a": []string{"1", "2"}, "c": "5"}, map[string][]string{"b": []string{"3", "4"}})
+ require.Equal(t, map[string][]string{
+ "a": []string{"1", "2"},
+ "b": []string{"3", "4"},
+ "c": []string{"5"},
+ }, got, "could not get correct merged map")
+}
diff --git a/v2/pkg/protocols/common/generators/validate.go b/v2/pkg/protocols/common/generators/validate.go
index 0384f8907..b04f8034c 100644
--- a/v2/pkg/protocols/common/generators/validate.go
+++ b/v2/pkg/protocols/common/generators/validate.go
@@ -4,43 +4,49 @@ import (
"errors"
"fmt"
"os"
- "path/filepath"
"strings"
+ "github.com/projectdiscovery/folderutil"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
)
// validate validates the payloads if any.
-func (g *Generator) validate(payloads map[string]interface{}, templatePath string) error {
+func (g *PayloadGenerator) validate(payloads map[string]interface{}, templatePath string) error {
for name, payload := range payloads {
- switch pt := payload.(type) {
+ switch payloadType := payload.(type) {
case string:
// check if it's a multiline string list
- if len(strings.Split(pt, "\n")) != 1 {
+ if len(strings.Split(payloadType, "\n")) != 1 {
return errors.New("invalid number of lines in payload")
}
// check if it's a worldlist file and try to load it
- if fileExists(pt) {
+ if fileExists(payloadType) {
continue
}
changed := false
- pathTokens := strings.Split(templatePath, string(os.PathSeparator))
- for i := range pathTokens {
- tpath := filepath.Join(filepath.Join(pathTokens[:i]...), pt)
- if fileExists(tpath) {
- payloads[name] = tpath
+ templatePathInfo, err := folderutil.NewPathInfo(templatePath)
+ if err != nil {
+ return err
+ }
+ payloadPathsToProbe, err := templatePathInfo.MeshWith(payloadType)
+ if err != nil {
+ return err
+ }
+ for _, payloadPath := range payloadPathsToProbe {
+ if fileExists(payloadPath) {
+ payloads[name] = payloadPath
changed = true
break
}
}
if !changed {
- return fmt.Errorf("the %s file for payload %s does not exist or does not contain enough elements", pt, name)
+ return fmt.Errorf("the %s file for payload %s does not exist or does not contain enough elements", payloadType, name)
}
case interface{}:
- loadedPayloads := types.ToStringSlice(pt)
+ loadedPayloads := types.ToStringSlice(payloadType)
if len(loadedPayloads) == 0 {
return fmt.Errorf("the payload %s does not contain enough elements", name)
}
diff --git a/v2/pkg/protocols/common/helpers/deserialization/java.go b/v2/pkg/protocols/common/helpers/deserialization/java.go
index ee8ddfb6b..0ea107c37 100644
--- a/v2/pkg/protocols/common/helpers/deserialization/java.go
+++ b/v2/pkg/protocols/common/helpers/deserialization/java.go
@@ -48,15 +48,19 @@ func gadgetEncodingHelper(returnData []byte, encoding string) string {
return hex.EncodeToString(returnData)
case "gzip":
buffer := &bytes.Buffer{}
- if _, err := gzip.NewWriter(buffer).Write(returnData); err != nil {
+ writer := gzip.NewWriter(buffer)
+ if _, err := writer.Write(returnData); err != nil {
return ""
}
+ _ = writer.Close()
return buffer.String()
case "gzip-base64":
buffer := &bytes.Buffer{}
- if _, err := gzip.NewWriter(buffer).Write(returnData); err != nil {
+ writer := gzip.NewWriter(buffer)
+ if _, err := writer.Write(returnData); err != nil {
return ""
}
+ _ = writer.Close()
return urlsafeBase64Encode(buffer.Bytes())
case "base64-raw":
return base64.StdEncoding.EncodeToString(returnData)
diff --git a/v2/pkg/protocols/common/helpers/responsehighlighter/hexdump.go b/v2/pkg/protocols/common/helpers/responsehighlighter/hexdump.go
new file mode 100644
index 000000000..decc985c7
--- /dev/null
+++ b/v2/pkg/protocols/common/helpers/responsehighlighter/hexdump.go
@@ -0,0 +1,130 @@
+package responsehighlighter
+
+import (
+ "errors"
+ "fmt"
+ "regexp"
+ "strings"
+ "unicode"
+
+ "github.com/projectdiscovery/gologger"
+)
+
+// [0-9a-fA-F]{8} {2} - hexdump indexes (8 character hex value followed by two spaces)
+// [0-9a-fA-F]{2} + - 2 character long hex values followed by one or two space (potentially wrapped with an ASCII color code, see below)
+// \x1b\[(\d;?)+m - ASCII color code pattern
+// \x1b\[0m - ASCII color code reset
+// \|(.*)\|\n - ASCII representation of the input delimited by pipe characters
+var hexDumpParsePattern = regexp.MustCompile(`([0-9a-fA-F]{8} {2})((?:(?:\x1b\[(?:\d;?)+m)?[0-9a-fA-F]{2}(?:\x1b\[0m)? +)+)\|(.*)\|\n`)
+var hexValuePattern = regexp.MustCompile(`([a-fA-F0-9]{2})`)
+
+type HighlightableHexDump struct {
+ index []string
+ hex []string
+ ascii []string
+}
+
+func NewHighlightableHexDump(rowSize int) HighlightableHexDump {
+ return HighlightableHexDump{index: make([]string, 0, rowSize), hex: make([]string, 0, rowSize), ascii: make([]string, 0, rowSize)}
+}
+
+func (hexDump HighlightableHexDump) len() int {
+ return len(hexDump.index)
+}
+
+func (hexDump HighlightableHexDump) String() string {
+ var result string
+ for i := 0; i < hexDump.len(); i++ {
+ result += hexDump.index[i] + hexDump.hex[i] + "|" + hexDump.ascii[i] + "|\n"
+ }
+ return result
+}
+
+func toHighLightedHexDump(hexDump, snippetToHighlight string) (HighlightableHexDump, error) {
+ hexDumpRowValues := hexDumpParsePattern.FindAllStringSubmatch(hexDump, -1)
+ if hexDumpRowValues == nil || len(hexDumpRowValues) != strings.Count(hexDump, "\n") {
+ message := "could not parse hexdump"
+ gologger.Warning().Msgf(message)
+ return HighlightableHexDump{}, errors.New(message)
+ }
+
+ result := NewHighlightableHexDump(len(hexDumpRowValues))
+ for _, currentHexDumpRowValues := range hexDumpRowValues {
+ result.index = append(result.index, currentHexDumpRowValues[1])
+ result.hex = append(result.hex, currentHexDumpRowValues[2])
+ result.ascii = append(result.ascii, currentHexDumpRowValues[3])
+ }
+ return result.highlight(snippetToHighlight), nil
+}
+
+func (hexDump HighlightableHexDump) highlight(snippetToColor string) HighlightableHexDump {
+ return highlightAsciiSection(highlightHexSection(hexDump, snippetToColor), snippetToColor)
+}
+
+func highlightHexSection(hexDump HighlightableHexDump, snippetToColor string) HighlightableHexDump {
+ var snippetHexCharactersMatchPattern string
+ for _, char := range snippetToColor {
+ snippetHexCharactersMatchPattern += fmt.Sprintf(`(%02x[ \n]+)`, char)
+ }
+
+ hexDump.hex = highlight(hexDump.hex, snippetHexCharactersMatchPattern, func(v string) string {
+ return hexValuePattern.ReplaceAllString(v, addColor("$1"))
+ })
+
+ return hexDump
+}
+
+func highlightAsciiSection(hexDump HighlightableHexDump, snippetToColor string) HighlightableHexDump {
+ var snippetCharactersMatchPattern string
+ for _, v := range snippetToColor {
+ var value string
+ if IsASCIIPrintable(v) {
+ value = regexp.QuoteMeta(string(v))
+ } else {
+ value = "."
+ }
+ snippetCharactersMatchPattern += fmt.Sprintf(`(%s\n*)`, value)
+ }
+
+ hexDump.ascii = highlight(hexDump.ascii, snippetCharactersMatchPattern, func(v string) string {
+ if len(v) > 1 {
+ return addColor(string(v[0])) + v[1:] // do not color new line characters
+ }
+ return addColor(v)
+ })
+
+ return hexDump
+}
+
+func highlight(values []string, snippetCharactersMatchPattern string, replaceToFunc func(v string) string) []string {
+ rows := strings.Join(values, "\n")
+ compiledPattern := regexp.MustCompile(snippetCharactersMatchPattern)
+ for _, submatch := range compiledPattern.FindAllStringSubmatch(rows, -1) {
+ var replaceTo string
+ var replaceFrom string
+ for _, matchedValueWithSuffix := range submatch[1:] {
+ replaceFrom += matchedValueWithSuffix
+ replaceTo += replaceToFunc(matchedValueWithSuffix)
+ }
+ rows = strings.ReplaceAll(rows, replaceFrom, replaceTo)
+ }
+ return strings.Split(rows, "\n")
+}
+
+func HasBinaryContent(input string) bool {
+ return !IsASCII(input)
+}
+
+// IsASCII tests whether a string consists only of ASCII characters or not
+func IsASCII(input string) bool {
+ for i := 0; i < len(input); i++ {
+ if input[i] > unicode.MaxASCII {
+ return false
+ }
+ }
+ return true
+}
+
+func IsASCIIPrintable(input rune) bool {
+ return input > 32 && input < unicode.MaxASCII
+}
diff --git a/v2/pkg/protocols/common/helpers/responsehighlighter/response_highlighter.go b/v2/pkg/protocols/common/helpers/responsehighlighter/response_highlighter.go
index 1cb914e97..ccec3cd38 100644
--- a/v2/pkg/protocols/common/helpers/responsehighlighter/response_highlighter.go
+++ b/v2/pkg/protocols/common/helpers/responsehighlighter/response_highlighter.go
@@ -1,6 +1,7 @@
package responsehighlighter
import (
+ "sort"
"strconv"
"strings"
@@ -9,16 +10,19 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
)
-var colorizer = aurora.NewAurora(true)
+var colorFunction = aurora.Green
-func Highlight(operatorResult *operators.Result, response string, noColor bool) string {
+func Highlight(operatorResult *operators.Result, response string, noColor, hexDump bool) string {
result := response
if operatorResult != nil && !noColor {
- for _, matches := range operatorResult.Matches {
- if len(matches) > 0 {
- for _, currentMatch := range matches {
- result = strings.ReplaceAll(result, currentMatch, colorizer.Green(currentMatch).String())
+ for _, currentMatch := range getSortedMatches(operatorResult) {
+ if hexDump {
+ highlightedHexDump, err := toHighLightedHexDump(result, currentMatch)
+ if err == nil {
+ result = highlightedHexDump.String()
}
+ } else {
+ result = highlightASCII(currentMatch, result)
}
}
}
@@ -26,6 +30,27 @@ func Highlight(operatorResult *operators.Result, response string, noColor bool)
return result
}
+func highlightASCII(currentMatch string, result string) string {
+ var coloredMatchBuilder strings.Builder
+ for _, char := range currentMatch {
+ coloredMatchBuilder.WriteString(addColor(string(char)))
+ }
+
+ return strings.ReplaceAll(result, currentMatch, coloredMatchBuilder.String())
+}
+
+func getSortedMatches(operatorResult *operators.Result) []string {
+ sortedMatches := make([]string, 0, len(operatorResult.Matches))
+ for _, matches := range operatorResult.Matches {
+ sortedMatches = append(sortedMatches, matches...)
+ }
+
+ sort.Slice(sortedMatches, func(i, j int) bool {
+ return len(sortedMatches[i]) > len(sortedMatches[j])
+ })
+ return sortedMatches
+}
+
func CreateStatusCodeSnippet(response string, statusCode int) string {
if strings.HasPrefix(response, "HTTP/") {
strStatusCode := strconv.Itoa(statusCode)
@@ -33,3 +58,7 @@ func CreateStatusCodeSnippet(response string, statusCode int) string {
}
return ""
}
+
+func addColor(value string) string {
+ return colorFunction(value).String()
+}
diff --git a/v2/pkg/protocols/common/helpers/responsehighlighter/response_highlighter_test.go b/v2/pkg/protocols/common/helpers/responsehighlighter/response_highlighter_test.go
new file mode 100644
index 000000000..fcdd16a84
--- /dev/null
+++ b/v2/pkg/protocols/common/helpers/responsehighlighter/response_highlighter_test.go
@@ -0,0 +1,111 @@
+package responsehighlighter
+
+import (
+ "encoding/hex"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/projectdiscovery/nuclei/v2/pkg/operators"
+)
+
+const input = "abcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmnabcdefghijklmn"
+
+func TestHexDumpHighlighting(t *testing.T) {
+ highlightedHexDumpResponse :=
+ "00000000 61 62 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e 61 62 |abc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmnab|\n" +
+ "00000010 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e 61 62 63 \x1b[32m64\x1b[0m |c\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmnabc\x1b[32md\x1b[0m|\n" +
+ "00000020 \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e 61 62 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m |\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmnabc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m|\n" +
+ "00000030 \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e 61 62 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m |\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmnabc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m|\n" +
+ "00000040 \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e 61 62 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m |\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmnabc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0m|\n" +
+ "00000050 6b 6c 6d 6e 61 62 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c |klmnabc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mkl|\n" +
+ "00000060 6d 6e 61 62 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e |mnabc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn|\n" +
+ "00000070 61 62 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e |abc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn|\n"
+
+ t.Run("Test highlighting when the snippet is wrapped", func(t *testing.T) {
+ result, err := toHighLightedHexDump(hex.Dump([]byte(input)), "defghij")
+ assert.Nil(t, err)
+ assert.Equal(t, highlightedHexDumpResponse, result.String())
+ })
+
+ t.Run("Test highlight when the snippet contains separator character", func(t *testing.T) {
+ value := "asdfasdfasda|basdfadsdfs|"
+ result, err := toHighLightedHexDump(hex.Dump([]byte(value)), "a|b")
+
+ expected :=
+ "00000000 61 73 64 66 61 73 64 66 61 73 64 \x1b[32m61\x1b[0m \x1b[32m7c\x1b[0m \x1b[32m62\x1b[0m 61 73 |asdfasdfasd\x1b[32ma\x1b[0m\x1b[32m|\x1b[0m\x1b[32mb\x1b[0mas|\n" +
+ "00000010 64 66 61 64 73 64 66 73 7c |dfadsdfs||\n"
+
+ assert.Nil(t, err)
+ assert.Equal(t, expected, result.String())
+ })
+}
+
+func TestHighlight(t *testing.T) {
+ const multiSnippetHighlightHexDumpResponse = "00000000 \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m |\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32ma\x1b[0m\x1b[32mb\x1b[0m|\n" +
+ "00000010 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m 63 \x1b[32m64\x1b[0m |c\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m|\n" +
+ "00000020 \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m |\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m|\n" +
+ "00000030 \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m |\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m|\n" +
+ "00000040 \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m |\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0m|\n" +
+ "00000050 6b 6c 6d 6e \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c |klmn\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mkl|\n" +
+ "00000060 6d 6e \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e |mn\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn|\n" +
+ "00000070 \x1b[32m61\x1b[0m \x1b[32m62\x1b[0m 63 \x1b[32m64\x1b[0m \x1b[32m65\x1b[0m \x1b[32m66\x1b[0m \x1b[32m67\x1b[0m \x1b[32m68\x1b[0m \x1b[32m69\x1b[0m \x1b[32m6a\x1b[0m 6b 6c 6d 6e |\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn|\n"
+
+ matches := map[string][]string{
+ "first": {"defghij"},
+ "second": {"ab"},
+ }
+ operatorResult := operators.Result{Matches: matches}
+
+ t.Run("Test highlighting when the snippet is wrapped", func(t *testing.T) {
+ result := Highlight(&operatorResult, hex.Dump([]byte(input)), false, true)
+ assert.Equal(t, multiSnippetHighlightHexDumpResponse, result)
+ })
+
+ t.Run("Test highlighting without hexdump", func(t *testing.T) {
+ result := Highlight(&operatorResult, input, false, false)
+ expected :=
+ "\x1b[32ma\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
+ "a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
+ "a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
+ "a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
+ "a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
+ "a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
+ "a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
+ "a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn\x1b[32m" +
+ "a\x1b[0m\x1b[32mb\x1b[0mc\x1b[32md\x1b[0m\x1b[32me\x1b[0m\x1b[32mf\x1b[0m\x1b[32mg\x1b[0m\x1b[32mh\x1b[0m\x1b[32mi\x1b[0m\x1b[32mj\x1b[0mklmn"
+ print(result)
+ assert.Equal(t, expected, result)
+ })
+
+ t.Run("Test the response is not modified if noColor is true", func(t *testing.T) {
+ result := Highlight(&operatorResult, input, true, false)
+ assert.Equal(t, input, result)
+ })
+
+ t.Run("Test the response is not modified if noColor is true", func(t *testing.T) {
+ result := Highlight(&operatorResult, hex.Dump([]byte(input)), true, true)
+ assert.Equal(t, hex.Dump([]byte(input)), result)
+ })
+}
+
+func TestMultiSubstringMatchHighlight(t *testing.T) {
+ const input = `
+start ValueToMatch end
+start ValueToMatch-1.2.3 end
+start ValueToMatch-2.1 end
+`
+ matches := map[string][]string{
+ "first": {"ValueToMatch"},
+ "second": {"ValueToMatch-1.2.3"},
+ "third": {"ValueToMatch-2.1"},
+ }
+ operatorResult := operators.Result{Matches: matches}
+
+ expected :=
+ "\nstart \x1b[32mV\x1b[0m\x1b[32ma\x1b[0m\x1b[32ml\x1b[0m\x1b[32mu\x1b[0m\x1b[32me\x1b[0m\x1b[32mT\x1b[0m\x1b[32mo\x1b[0m\x1b[32mM\x1b[0m\x1b[32ma\x1b[0m\x1b[32mt\x1b[0m\x1b[32mc\x1b[0m\x1b[32mh\x1b[0m end\n" +
+ "start \x1b[32mV\x1b[0m\x1b[32ma\x1b[0m\x1b[32ml\x1b[0m\x1b[32mu\x1b[0m\x1b[32me\x1b[0m\x1b[32mT\x1b[0m\x1b[32mo\x1b[0m\x1b[32mM\x1b[0m\x1b[32ma\x1b[0m\x1b[32mt\x1b[0m\x1b[32mc\x1b[0m\x1b[32mh\x1b[0m\x1b[32m-\x1b[0m\x1b[32m1\x1b[0m\x1b[32m.\x1b[0m\x1b[32m2\x1b[0m\x1b[32m.\x1b[0m\x1b[32m3\x1b[0m end\n" +
+ "start \x1b[32mV\x1b[0m\x1b[32ma\x1b[0m\x1b[32ml\x1b[0m\x1b[32mu\x1b[0m\x1b[32me\x1b[0m\x1b[32mT\x1b[0m\x1b[32mo\x1b[0m\x1b[32mM\x1b[0m\x1b[32ma\x1b[0m\x1b[32mt\x1b[0m\x1b[32mc\x1b[0m\x1b[32mh\x1b[0m\x1b[32m-\x1b[0m\x1b[32m2\x1b[0m\x1b[32m.\x1b[0m\x1b[32m1\x1b[0m end \n"
+ result := Highlight(&operatorResult, input, false, false)
+ assert.Equal(t, expected, result)
+}
diff --git a/v2/pkg/protocols/common/helpers/writer/writer.go b/v2/pkg/protocols/common/helpers/writer/writer.go
new file mode 100644
index 000000000..91b98f33b
--- /dev/null
+++ b/v2/pkg/protocols/common/helpers/writer/writer.go
@@ -0,0 +1,35 @@
+package writer
+
+import (
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v2/pkg/output"
+ "github.com/projectdiscovery/nuclei/v2/pkg/progress"
+ "github.com/projectdiscovery/nuclei/v2/pkg/reporting"
+)
+
+// WriteResult is a helper for writing results to the output
+func WriteResult(data *output.InternalWrappedEvent, output output.Writer, progress progress.Progress, issuesClient *reporting.Client) bool {
+ // Handle the case where no result found for the template.
+ // In this case, we just show misc information about the failed
+ // match for the template.
+ if data.OperatorsResult == nil {
+ return false
+ }
+ var matched bool
+ for _, result := range data.Results {
+ if err := output.Write(result); err != nil {
+ gologger.Warning().Msgf("Could not write output event: %s\n", err)
+ }
+ if !matched {
+ matched = true
+ }
+ progress.IncrementMatched()
+
+ if issuesClient != nil {
+ if err := issuesClient.CreateIssue(result); err != nil {
+ gologger.Warning().Msgf("Could not create issue on tracker: %s", err)
+ }
+ }
+ }
+ return matched
+}
diff --git a/v2/pkg/protocols/common/hosterrorscache/hosterrorscache.go b/v2/pkg/protocols/common/hosterrorscache/hosterrorscache.go
index 14aefff8f..c509ff847 100644
--- a/v2/pkg/protocols/common/hosterrorscache/hosterrorscache.go
+++ b/v2/pkg/protocols/common/hosterrorscache/hosterrorscache.go
@@ -7,6 +7,7 @@ import (
"strings"
"github.com/bluele/gcache"
+
"github.com/projectdiscovery/gologger"
)
@@ -16,7 +17,7 @@ import (
// It uses an LRU cache internally for skipping unresponsive hosts
// that remain so for a duration.
type Cache struct {
- MaxHostError int
+ MaxHostError int
verbose bool
failedTargets gcache.Cache
}
@@ -24,11 +25,11 @@ type Cache struct {
const DefaultMaxHostsCount = 10000
// New returns a new host max errors cache
-func New(MaxHostError, maxHostsCount int) *Cache {
+func New(maxHostError, maxHostsCount int) *Cache {
gc := gcache.New(maxHostsCount).
ARC().
Build()
- return &Cache{failedTargets: gc, MaxHostError: MaxHostError}
+ return &Cache{failedTargets: gc, MaxHostError: maxHostError}
}
// SetVerbose sets the cache to log at verbose level
@@ -46,7 +47,6 @@ func (c *Cache) normalizeCacheValue(value string) string {
finalValue := value
if strings.HasPrefix(value, "http") {
if parsed, err := url.Parse(value); err == nil {
-
hostname := parsed.Host
finalPort := parsed.Port()
if finalPort == "" {
@@ -64,7 +64,7 @@ func (c *Cache) normalizeCacheValue(value string) string {
}
// ErrUnresponsiveHost is returned when a host is unresponsive
-//var ErrUnresponsiveHost = errors.New("skipping as host is unresponsive")
+// var ErrUnresponsiveHost = errors.New("skipping as host is unresponsive")
// Check returns true if a host should be skipped as it has been
// unresponsive for a certain number of times.
diff --git a/v2/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go b/v2/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go
index fa13bd82e..a366a65bd 100644
--- a/v2/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go
+++ b/v2/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go
@@ -11,15 +11,15 @@ func TestCacheCheckMarkFailed(t *testing.T) {
cache.MarkFailed("http://example.com:80")
if value, err := cache.failedTargets.Get("http://example.com:80"); err == nil && value != nil {
- require.Equal(t, 1, value, "could not get correct markfailed")
+ require.Equal(t, 1, value, "could not get correct number of marked failed hosts")
}
cache.MarkFailed("example.com:80")
if value, err := cache.failedTargets.Get("example.com:80"); err == nil && value != nil {
- require.Equal(t, 2, value, "could not get correct markfailed")
+ require.Equal(t, 2, value, "could not get correct number of marked failed hosts")
}
cache.MarkFailed("example.com")
if value, err := cache.failedTargets.Get("example.com"); err == nil && value != nil {
- require.Equal(t, 1, value, "could not get correct markfailed")
+ require.Equal(t, 1, value, "could not get correct number of marked failed hosts")
}
for i := 0; i < 3; i++ {
cache.MarkFailed("test")
diff --git a/v2/pkg/protocols/common/interactsh/interactsh.go b/v2/pkg/protocols/common/interactsh/interactsh.go
index 5c2914ff5..b810c346d 100644
--- a/v2/pkg/protocols/common/interactsh/interactsh.go
+++ b/v2/pkg/protocols/common/interactsh/interactsh.go
@@ -19,6 +19,7 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
"github.com/projectdiscovery/nuclei/v2/pkg/progress"
+ "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/writer"
"github.com/projectdiscovery/nuclei/v2/pkg/reporting"
)
@@ -72,6 +73,8 @@ type Options struct {
Progress progress.Progress
// Debug specifies whether debugging output should be shown for interactsh-client
Debug bool
+
+ NoInteractsh bool
}
const defaultMaxInteractionsCount = 5000
@@ -103,7 +106,24 @@ func New(options *Options) (*Client, error) {
return interactClient, nil
}
+// NewDefaultOptions returns the default options for interactsh client
+func NewDefaultOptions(output output.Writer, reporting *reporting.Client, progress progress.Progress) *Options {
+ return &Options{
+ ServerURL: "https://interactsh.com",
+ CacheSize: 5000,
+ Eviction: 60 * time.Second,
+ ColldownPeriod: 5 * time.Second,
+ PollDuration: 5 * time.Second,
+ Output: output,
+ IssuesClient: reporting,
+ Progress: progress,
+ }
+}
+
func (c *Client) firstTimeInitializeClient() error {
+ if c.options.NoInteractsh {
+ return nil // do not init if disabled
+ }
interactsh, err := client.New(&client.Options{
ServerURL: c.options.ServerURL,
Token: c.options.Authorization,
@@ -158,19 +178,8 @@ func (c *Client) processInteractionForRequest(interaction *server.Interaction, d
}
data.Event.Results = data.MakeResultFunc(data.Event)
- for _, result := range data.Event.Results {
- result.Interaction = interaction
- _ = c.options.Output.Write(result)
- if !c.matched {
- c.matched = true
- }
- c.options.Progress.IncrementMatched()
-
- if c.options.IssuesClient != nil {
- if err := c.options.IssuesClient.CreateIssue(result); err != nil {
- gologger.Warning().Msgf("Could not create issue on tracker: %s", err)
- }
- }
+ if writer.WriteResult(data.Event, c.options.Output, c.options.Progress, c.options.IssuesClient) {
+ c.matched = true
}
return true
}
@@ -206,12 +215,13 @@ func (c *Client) Close() bool {
//
// It accepts data to replace as well as the URL to replace placeholders
// with generated uniquely for each request.
-func (c *Client) ReplaceMarkers(data, interactshURL string) string {
- if !strings.Contains(data, interactshURLMarker) {
- return data
+func (c *Client) ReplaceMarkers(data string, interactshURLs []string) (string, []string) {
+ for strings.Contains(data, interactshURLMarker) {
+ url := c.URL()
+ interactshURLs = append(interactshURLs, url)
+ data = strings.Replace(data, interactshURLMarker, url, 1)
}
- replaced := strings.NewReplacer("{{interactsh-url}}", interactshURL).Replace(data)
- return replaced
+ return data, interactshURLs
}
// MakeResultEventFunc is a result making function for nuclei
@@ -227,30 +237,29 @@ type RequestData struct {
}
// RequestEvent is the event for a network request sent by nuclei.
-func (c *Client) RequestEvent(interactshURL string, data *RequestData) {
- id := strings.TrimSuffix(interactshURL, c.dotHostname)
+func (c *Client) RequestEvent(interactshURLs []string, data *RequestData) {
+ for _, interactshURL := range interactshURLs {
+ id := strings.TrimSuffix(interactshURL, c.dotHostname)
- interaction := c.interactions.Get(id)
- if interaction != nil {
- // If we have previous interactions, get them and process them.
- interactions, ok := interaction.Value().([]*server.Interaction)
- if !ok {
- c.requests.Set(id, data, c.eviction)
- return
- }
- matched := false
- for _, interaction := range interactions {
- if c.processInteractionForRequest(interaction, data) {
- matched = true
- break
+ interaction := c.interactions.Get(id)
+ if interaction != nil {
+ // If we have previous interactions, get them and process them.
+ interactions, ok := interaction.Value().([]*server.Interaction)
+ if !ok {
+ c.requests.Set(id, data, c.eviction)
+ return
}
+ for _, interaction := range interactions {
+ if c.processInteractionForRequest(interaction, data) {
+ c.interactions.Delete(id)
+ break
+ }
+ }
+ } else {
+ c.requests.Set(id, data, c.eviction)
}
- if matched {
- c.interactions.Delete(id)
- }
- } else {
- c.requests.Set(id, data, c.eviction)
}
+
}
// HasMatchers returns true if an operator has interactsh part
diff --git a/v2/pkg/protocols/common/protocolinit/init.go b/v2/pkg/protocols/common/protocolinit/init.go
index 1877a1ab3..307c33a2f 100644
--- a/v2/pkg/protocols/common/protocolinit/init.go
+++ b/v2/pkg/protocols/common/protocolinit/init.go
@@ -2,6 +2,7 @@ package protocolinit
import (
"github.com/corpix/uarand"
+
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/dns/dnsclientpool"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/http/httpclientpool"
diff --git a/v2/pkg/protocols/common/protocolstate/state.go b/v2/pkg/protocols/common/protocolstate/state.go
index 0f80f6647..28c9df525 100644
--- a/v2/pkg/protocols/common/protocolstate/state.go
+++ b/v2/pkg/protocols/common/protocolstate/state.go
@@ -2,6 +2,7 @@ package protocolstate
import (
"github.com/pkg/errors"
+
"github.com/projectdiscovery/fastdialer/fastdialer"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
)
@@ -18,6 +19,7 @@ func Init(options *types.Options) error {
if options.ResolversFile != "" {
opts.BaseResolvers = options.InternalResolversList
}
+ opts.WithDialerHistory = true
dialer, err := fastdialer.NewDialer(opts)
if err != nil {
return errors.Wrap(err, "could not create dialer")
diff --git a/v2/pkg/protocols/dns/dns.go b/v2/pkg/protocols/dns/dns.go
index 15f42dc02..55be679a6 100644
--- a/v2/pkg/protocols/dns/dns.go
+++ b/v2/pkg/protocols/dns/dns.go
@@ -7,8 +7,11 @@ import (
"github.com/miekg/dns"
"github.com/pkg/errors"
+ "github.com/weppos/publicsuffix-go/publicsuffix"
+
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
+ "github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/expressions"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/replacer"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/dns/dnsclientpool"
"github.com/projectdiscovery/retryabledns"
@@ -30,18 +33,8 @@ type Request struct {
// - value: "\"{{FQDN}}\""
Name string `yaml:"name,omitempty" jsonschema:"title=hostname to make dns request for,description=Name is the Hostname to make DNS request for"`
// description: |
- // Type is the type of DNS request to make.
- // values:
- // - "A"
- // - "NS"
- // - "DS"
- // - "CNAME"
- // - "SOA"
- // - "PTR"
- // - "MX"
- // - "TXT"
- // - "AAAA"
- Type string `yaml:"type,omitempty" jsonschema:"title=type of dns request to make,description=Type is the type of DNS request to make,enum=A,enum=NS,enum=DS,enum=CNAME,enum=SOA,enum=PTR,enum=MX,enum=TXT,enum=AAAA"`
+ // RequestType is the type of DNS request to make.
+ RequestType DNSRequestTypeHolder `yaml:"type,omitempty" jsonschema:"title=type of dns request to make,description=Type is the type of DNS request to make,enum=A,enum=NS,enum=DS,enum=CNAME,enum=SOA,enum=PTR,enum=MX,enum=TXT,enum=AAAA"`
// description: |
// Class is the class of the DNS request.
//
@@ -60,6 +53,15 @@ type Request struct {
// - name: Use a retry of 3 to 5 generally
// value: 5
Retries int `yaml:"retries,omitempty" jsonschema:"title=retries for dns request,description=Retries is the number of retries for the DNS request"`
+ // description: |
+ // Trace performs a trace operation for the target.
+ Trace bool `yaml:"trace,omitempty" jsonschema:"title=trace operation,description=Trace performs a trace operation for the target."`
+ // description: |
+ // TraceMaxRecursion is the number of max recursion allowed for trace operations
+ // examples:
+ // - name: Use a retry of 100 to 150 generally
+ // value: 100
+ TraceMaxRecursion int `yaml:"trace-max-recursion,omitempty" jsonschema:"title=trace-max-recursion level for dns request,description=TraceMaxRecursion is the number of max recursion allowed for trace operations"`
CompiledOperators *operators.Operators `yaml:"-"`
dnsClient *retryabledns.Client
@@ -71,11 +73,31 @@ type Request struct {
// description: |
// Recursion determines if resolver should recurse all records to get fresh results.
- Recursion bool `yaml:"recursion,omitempty" jsonschema:"title=recurse all servers,description=Recursion determines if resolver should recurse all records to get fresh results"`
+ Recursion *bool `yaml:"recursion,omitempty" jsonschema:"title=recurse all servers,description=Recursion determines if resolver should recurse all records to get fresh results"`
// Resolvers to use for the dns requests
Resolvers []string `yaml:"resolvers,omitempty" jsonschema:"title=Resolvers,description=Define resolvers to use within the template"`
}
+// RequestPartDefinitions contains a mapping of request part definitions and their
+// description. Multiple definitions are separated by commas.
+// Definitions not having a name (generated on runtime) are prefixed & suffixed by <>.
+var RequestPartDefinitions = map[string]string{
+ "template-id": "ID of the template executed",
+ "template-info": "Info Block of the template executed",
+ "template-path": "Path of the template executed",
+ "host": "Host is the input to the template",
+ "matched": "Matched is the input which was matched upon",
+ "request": "Request contains the DNS request in text format",
+ "type": "Type is the type of request made",
+ "rcode": "Rcode field returned for the DNS request",
+ "question": "Question contains the DNS question field",
+ "extra": "Extra contains the DNS response extra field",
+ "answer": "Answer contains the DNS response answer field",
+ "ns": "NS contains the DNS response NS field",
+ "raw,body,all": "Raw contains the raw DNS response (default)",
+ "trace": "Trace contains trace data for DNS request if enabled",
+}
+
func (request *Request) GetCompiledOperators() []*operators.Operators {
return []*operators.Operators{request.CompiledOperators}
}
@@ -87,6 +109,13 @@ func (request *Request) GetID() string {
// Compile compiles the protocol request for further execution.
func (request *Request) Compile(options *protocols.ExecuterOptions) error {
+ if request.Retries == 0 {
+ request.Retries = 3
+ }
+ if request.Recursion == nil {
+ recursion := true
+ request.Recursion = &recursion
+ }
dnsClientOptions := &dnsclientpool.Configuration{
Retries: request.Retries,
}
@@ -94,7 +123,7 @@ func (request *Request) Compile(options *protocols.ExecuterOptions) error {
dnsClientOptions.Resolvers = request.Resolvers
}
// Create a dns client for the class
- client, err := dnsclientpool.Get(options.Options, dnsClientOptions)
+ client, err := request.getDnsClient(options, nil)
if err != nil {
return errors.Wrap(err, "could not get dns client")
}
@@ -109,10 +138,32 @@ func (request *Request) Compile(options *protocols.ExecuterOptions) error {
}
request.class = classToInt(request.Class)
request.options = options
- request.question = questionTypeToInt(request.Type)
+ request.question = questionTypeToInt(request.RequestType.String())
return nil
}
+func (request *Request) getDnsClient(options *protocols.ExecuterOptions, metadata map[string]interface{}) (*retryabledns.Client, error) {
+ dnsClientOptions := &dnsclientpool.Configuration{
+ Retries: request.Retries,
+ }
+ if len(request.Resolvers) > 0 {
+ if len(request.Resolvers) > 0 {
+ for _, resolver := range request.Resolvers {
+ if expressions.ContainsUnresolvedVariables(resolver) != nil {
+ var err error
+ resolver, err = expressions.Evaluate(resolver, metadata)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not resolve resolvers expressions")
+ }
+ dnsClientOptions.Resolvers = append(dnsClientOptions.Resolvers, resolver)
+ }
+ }
+ }
+ dnsClientOptions.Resolvers = request.Resolvers
+ }
+ return dnsclientpool.Get(options.Options, dnsClientOptions)
+}
+
// Requests returns the total number of requests the YAML rule will perform
func (request *Request) Requests() int {
return 1
@@ -128,11 +179,11 @@ func (request *Request) Make(domain string) (*dns.Msg, error) {
// Build a request on the specified URL
req := new(dns.Msg)
req.Id = dns.Id()
- req.RecursionDesired = request.Recursion
+ req.RecursionDesired = *request.Recursion
var q dns.Question
- final := replacer.Replace(request.Name, map[string]interface{}{"FQDN": domain})
+ final := replacer.Replace(request.Name, generateDNSVariables(domain))
q.Name = dns.Fqdn(final)
q.Qclass = request.class
@@ -198,3 +249,19 @@ func classToInt(class string) uint16 {
}
return uint16(result)
}
+
+func generateDNSVariables(domain string) map[string]interface{} {
+ parsed, err := publicsuffix.Parse(strings.TrimSuffix(domain, "."))
+ if err != nil {
+ return map[string]interface{}{"FQDN": domain}
+ }
+
+ domainName := strings.Join([]string{parsed.SLD, parsed.TLD}, ".")
+ return map[string]interface{}{
+ "FQDN": domain,
+ "RDN": domainName,
+ "DN": parsed.SLD,
+ "TLD": parsed.TLD,
+ "SD": parsed.TRD,
+ }
+}
diff --git a/v2/pkg/protocols/dns/dns_test.go b/v2/pkg/protocols/dns/dns_test.go
index ae287984d..0aae3ed20 100644
--- a/v2/pkg/protocols/dns/dns_test.go
+++ b/v2/pkg/protocols/dns/dns_test.go
@@ -5,23 +5,35 @@ import (
"github.com/stretchr/testify/require"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
+func TestGenerateDNSVariables(t *testing.T) {
+ vars := generateDNSVariables("www.projectdiscovery.io")
+ require.Equal(t, map[string]interface{}{
+ "FQDN": "www.projectdiscovery.io",
+ "RDN": "projectdiscovery.io",
+ "DN": "projectdiscovery",
+ "TLD": "io",
+ "SD": "www",
+ }, vars, "could not get dns variables")
+}
+
func TestDNSCompileMake(t *testing.T) {
options := testutils.DefaultOptions
+ recursion := false
testutils.Init(options)
const templateID = "testing-dns"
request := &Request{
- Type: "A",
- Class: "INET",
- Retries: 5,
- ID: templateID,
- Recursion: false,
- Name: "{{FQDN}}",
+ RequestType: DNSRequestTypeHolder{DNSRequestType: A},
+ Class: "INET",
+ Retries: 5,
+ ID: templateID,
+ Recursion: &recursion,
+ Name: "{{FQDN}}",
}
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
ID: templateID,
diff --git a/v2/pkg/protocols/dns/dns_types.go b/v2/pkg/protocols/dns/dns_types.go
new file mode 100644
index 000000000..d61d9b6d0
--- /dev/null
+++ b/v2/pkg/protocols/dns/dns_types.go
@@ -0,0 +1,119 @@
+package dns
+
+import (
+ "encoding/json"
+ "errors"
+ "strings"
+
+ "github.com/alecthomas/jsonschema"
+)
+
+// DNSRequestType is the type of the method specified
+type DNSRequestType int
+
+// name:DNSRequestType
+const (
+ // name:A
+ A DNSRequestType = iota + 1
+ // name:NS
+ NS
+ // name:DS
+ DS
+ // name:CNAME
+ CNAME
+ // name:SOA
+ SOA
+ // name:PTR
+ PTR
+ // name:MX
+ MX
+ // name:TXT
+ TXT
+ // name:AAAA
+ AAAA
+ limit
+)
+
+// DNSRequestTypeMapping is a table for conversion of method from string.
+var DNSRequestTypeMapping = map[DNSRequestType]string{
+ A: "A",
+ NS: "NS",
+ DS: "DS",
+ CNAME: "CNAME",
+ SOA: "SOA",
+ PTR: "PTR",
+ MX: "MX",
+ TXT: "TXT",
+ AAAA: "AAAA",
+}
+
+// GetSupportedDNSRequestTypes returns list of supported types
+func GetSupportedDNSRequestTypes() []DNSRequestType {
+ var result []DNSRequestType
+ for index := DNSRequestType(1); index < limit; index++ {
+ result = append(result, index)
+ }
+ return result
+}
+
+func toDNSRequestTypes(valueToMap string) (DNSRequestType, error) {
+ normalizedValue := normalizeValue(valueToMap)
+ for key, currentValue := range DNSRequestTypeMapping {
+ if normalizedValue == currentValue {
+ return key, nil
+ }
+ }
+ return -1, errors.New("Invalid DNS request type: " + valueToMap)
+}
+
+func normalizeValue(value string) string {
+ return strings.TrimSpace(strings.ToUpper(value))
+}
+
+func (t DNSRequestType) String() string {
+ return DNSRequestTypeMapping[t]
+}
+
+// DNSRequestTypeHolder is used to hold internal type of the DNS type
+type DNSRequestTypeHolder struct {
+ DNSRequestType DNSRequestType `mapping:"true"`
+}
+
+func (holder DNSRequestTypeHolder) String() string {
+ return holder.DNSRequestType.String()
+}
+
+func (holder DNSRequestTypeHolder) JSONSchemaType() *jsonschema.Type {
+ gotType := &jsonschema.Type{
+ Type: "string",
+ Title: "type of DNS request to make",
+ Description: "Type is the type of DNS request to make,enum=A,enum=NS,enum=DS,enum=CNAME,enum=SOA,enum=PTR,enum=MX,enum=TXT,enum=AAAA",
+ }
+ for _, types := range GetSupportedDNSRequestTypes() {
+ gotType.Enum = append(gotType.Enum, types.String())
+ }
+ return gotType
+}
+
+func (holder *DNSRequestTypeHolder) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var marshalledTypes string
+ if err := unmarshal(&marshalledTypes); err != nil {
+ return err
+ }
+
+ computedType, err := toDNSRequestTypes(marshalledTypes)
+ if err != nil {
+ return err
+ }
+
+ holder.DNSRequestType = computedType
+ return nil
+}
+
+func (holder *DNSRequestTypeHolder) MarshalJSON() ([]byte, error) {
+ return json.Marshal(holder.DNSRequestType.String())
+}
+
+func (holder DNSRequestTypeHolder) MarshalYAML() (interface{}, error) {
+ return holder.DNSRequestType.String(), nil
+}
diff --git a/v2/pkg/protocols/dns/dnsclientpool/clientpool.go b/v2/pkg/protocols/dns/dnsclientpool/clientpool.go
index 46308c9fe..d603621a9 100644
--- a/v2/pkg/protocols/dns/dnsclientpool/clientpool.go
+++ b/v2/pkg/protocols/dns/dnsclientpool/clientpool.go
@@ -23,7 +23,7 @@ var defaultResolvers = []string{
"8.8.4.4:53", // Google
}
-// Init initializes the clientpool implementation
+// Init initializes the client pool implementation
func Init(options *types.Options) error {
// Don't create clients if already created in the past.
if normalClient != nil {
diff --git a/v2/pkg/protocols/dns/operators.go b/v2/pkg/protocols/dns/operators.go
index b9f0454b3..f8b445fd6 100644
--- a/v2/pkg/protocols/dns/operators.go
+++ b/v2/pkg/protocols/dns/operators.go
@@ -2,6 +2,8 @@ package dns
import (
"bytes"
+ "fmt"
+ "strings"
"time"
"github.com/miekg/dns"
@@ -12,17 +14,12 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/output"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
+ "github.com/projectdiscovery/retryabledns"
)
-// Match matches a generic data response again a given matcher
+// Match matches a generic data response against a given matcher
func (request *Request) Match(data map[string]interface{}, matcher *matchers.Matcher) (bool, []string) {
- partString := matcher.Part
- switch partString {
- case "body", "all", "":
- partString = "raw"
- }
-
- item, ok := data[partString]
+ item, ok := request.getMatchPart(matcher.Part, data)
if !ok {
return false, []string{}
}
@@ -50,29 +47,36 @@ func (request *Request) Match(data map[string]interface{}, matcher *matchers.Mat
// Extract performs extracting operation for an extractor on model and returns true or false.
func (request *Request) Extract(data map[string]interface{}, extractor *extractors.Extractor) map[string]struct{} {
- part := extractor.Part
- switch part {
- case "body", "all":
- part = "raw"
- }
-
- item, ok := data[part]
+ item, ok := request.getMatchPart(extractor.Part, data)
if !ok {
return nil
}
- itemStr := types.ToString(item)
switch extractor.GetType() {
case extractors.RegexExtractor:
- return extractor.ExtractRegex(itemStr)
+ return extractor.ExtractRegex(types.ToString(item))
case extractors.KValExtractor:
return extractor.ExtractKval(data)
}
return nil
}
+func (request *Request) getMatchPart(part string, data output.InternalEvent) (interface{}, bool) {
+ switch part {
+ case "body", "all", "":
+ part = "raw"
+ }
+
+ item, ok := data[part]
+ if !ok {
+ return "", false
+ }
+
+ return item, true
+}
+
// responseToDSLMap converts a DNS response to a map for use in DSL matching
-func (request *Request) responseToDSLMap(req, resp *dns.Msg, host, matched string) output.InternalEvent {
+func (request *Request) responseToDSLMap(req, resp *dns.Msg, host, matched string, traceData *retryabledns.TraceData) output.InternalEvent {
return output.InternalEvent{
"host": host,
"matched": matched,
@@ -86,6 +90,8 @@ func (request *Request) responseToDSLMap(req, resp *dns.Msg, host, matched strin
"template-id": request.options.TemplateID,
"template-info": request.options.TemplateInfo,
"template-path": request.options.TemplatePath,
+ "type": request.Type().String(),
+ "trace": traceToString(traceData, false),
}
}
@@ -99,10 +105,11 @@ func (request *Request) MakeResultEventItem(wrapped *output.InternalWrappedEvent
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
Info: wrapped.InternalEvent["template-info"].(model.Info),
- Type: "dns",
+ Type: types.ToString(wrapped.InternalEvent["type"]),
Host: types.ToString(wrapped.InternalEvent["host"]),
Matched: types.ToString(wrapped.InternalEvent["matched"]),
ExtractedResults: wrapped.OperatorsResult.OutputExtracts,
+ MatcherStatus: true,
Timestamp: time.Now(),
Request: types.ToString(wrapped.InternalEvent["request"]),
Response: types.ToString(wrapped.InternalEvent["raw"]),
@@ -125,3 +132,16 @@ func questionToString(resourceRecords []dns.Question) string {
}
return buffer.String()
}
+
+func traceToString(traceData *retryabledns.TraceData, withSteps bool) string {
+ buffer := &bytes.Buffer{}
+ if traceData != nil {
+ for i, dnsRecord := range traceData.DNSData {
+ if withSteps {
+ buffer.WriteString(fmt.Sprintf("request %d to resolver %s:\n", i, strings.Join(dnsRecord.Resolver, ",")))
+ }
+ buffer.WriteString(dnsRecord.Raw)
+ }
+ }
+ return buffer.String()
+}
diff --git a/v2/pkg/protocols/dns/operators_test.go b/v2/pkg/protocols/dns/operators_test.go
index c8d06124a..b8137bc26 100644
--- a/v2/pkg/protocols/dns/operators_test.go
+++ b/v2/pkg/protocols/dns/operators_test.go
@@ -8,27 +8,28 @@ import (
"github.com/miekg/dns"
"github.com/stretchr/testify/require"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
func TestResponseToDSLMap(t *testing.T) {
options := testutils.DefaultOptions
+ recursion := false
testutils.Init(options)
templateID := "testing-dns"
request := &Request{
- Type: "A",
- Class: "INET",
- Retries: 5,
- ID: templateID,
- Recursion: false,
- Name: "{{FQDN}}",
+ RequestType: DNSRequestTypeHolder{DNSRequestType: A},
+ Class: "INET",
+ Retries: 5,
+ ID: templateID,
+ Recursion: &recursion,
+ Name: "{{FQDN}}",
}
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
ID: templateID,
@@ -44,23 +45,24 @@ func TestResponseToDSLMap(t *testing.T) {
resp.Rcode = dns.RcodeSuccess
resp.Answer = append(resp.Answer, &dns.A{A: net.ParseIP("1.1.1.1"), Hdr: dns.RR_Header{Name: "one.one.one.one."}})
- event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one")
- require.Len(t, event, 12, "could not get correct number of items in dsl map")
+ event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one", nil)
+ require.Len(t, event, 14, "could not get correct number of items in dsl map")
require.Equal(t, dns.RcodeSuccess, event["rcode"], "could not get correct rcode")
}
func TestDNSOperatorMatch(t *testing.T) {
options := testutils.DefaultOptions
+ recursion := false
testutils.Init(options)
templateID := "testing-dns"
request := &Request{
- Type: "A",
- Class: "INET",
- Retries: 5,
- ID: templateID,
- Recursion: false,
- Name: "{{FQDN}}",
+ RequestType: DNSRequestTypeHolder{DNSRequestType: A},
+ Class: "INET",
+ Retries: 5,
+ ID: templateID,
+ Recursion: &recursion,
+ Name: "{{FQDN}}",
}
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
ID: templateID,
@@ -76,12 +78,12 @@ func TestDNSOperatorMatch(t *testing.T) {
resp.Rcode = dns.RcodeSuccess
resp.Answer = append(resp.Answer, &dns.A{A: net.ParseIP("1.1.1.1"), Hdr: dns.RR_Header{Name: "one.one.one.one."}})
- event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one")
+ event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one", nil)
t.Run("valid", func(t *testing.T) {
matcher := &matchers.Matcher{
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: []string{"1.1.1.1"},
}
err = matcher.CompileMatchers()
@@ -95,7 +97,7 @@ func TestDNSOperatorMatch(t *testing.T) {
t.Run("rcode", func(t *testing.T) {
matcher := &matchers.Matcher{
Part: "rcode",
- Type: "status",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.StatusMatcher},
Status: []int{dns.RcodeSuccess},
}
err = matcher.CompileMatchers()
@@ -109,7 +111,7 @@ func TestDNSOperatorMatch(t *testing.T) {
t.Run("negative", func(t *testing.T) {
matcher := &matchers.Matcher{
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Negative: true,
Words: []string{"random"},
}
@@ -124,7 +126,7 @@ func TestDNSOperatorMatch(t *testing.T) {
t.Run("invalid", func(t *testing.T) {
matcher := &matchers.Matcher{
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: []string{"random"},
}
err := matcher.CompileMatchers()
@@ -134,20 +136,45 @@ func TestDNSOperatorMatch(t *testing.T) {
require.False(t, isMatched, "could match invalid response matcher")
require.Equal(t, []string{}, matched)
})
+
+ t.Run("caseInsensitive", func(t *testing.T) {
+ req := new(dns.Msg)
+ req.Question = append(req.Question, dns.Question{Name: "ONE.ONE.ONE.ONE.", Qtype: dns.TypeA, Qclass: dns.ClassINET})
+
+ resp := new(dns.Msg)
+ resp.Rcode = dns.RcodeSuccess
+ resp.Answer = append(resp.Answer, &dns.A{A: net.ParseIP("1.1.1.1"), Hdr: dns.RR_Header{Name: "ONE.ONE.ONE.ONE."}})
+
+ event := request.responseToDSLMap(req, resp, "ONE.ONE.ONE.ONE", "ONE.ONE.ONE.ONE", nil)
+
+ matcher := &matchers.Matcher{
+ Part: "raw",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
+ Words: []string{"one.ONE.one.ONE"},
+ CaseInsensitive: true,
+ }
+ err = matcher.CompileMatchers()
+ require.Nil(t, err, "could not compile matcher")
+
+ isMatch, matched := request.Match(event, matcher)
+ require.True(t, isMatch, "could not match valid response")
+ require.Equal(t, []string{"one.one.one.one"}, matched)
+ })
}
func TestDNSOperatorExtract(t *testing.T) {
options := testutils.DefaultOptions
+ recursion := false
testutils.Init(options)
templateID := "testing-dns"
request := &Request{
- Type: "A",
- Class: "INET",
- Retries: 5,
- ID: templateID,
- Recursion: false,
- Name: "{{FQDN}}",
+ RequestType: DNSRequestTypeHolder{DNSRequestType: A},
+ Class: "INET",
+ Retries: 5,
+ ID: templateID,
+ Recursion: &recursion,
+ Name: "{{FQDN}}",
}
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
ID: templateID,
@@ -163,12 +190,12 @@ func TestDNSOperatorExtract(t *testing.T) {
resp.Rcode = dns.RcodeSuccess
resp.Answer = append(resp.Answer, &dns.A{A: net.ParseIP("1.1.1.1"), Hdr: dns.RR_Header{Name: "one.one.one.one."}})
- event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one")
+ event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one", nil)
t.Run("extract", func(t *testing.T) {
extractor := &extractors.Extractor{
Part: "raw",
- Type: "regex",
+ Type: extractors.ExtractorTypeHolder{ExtractorType: extractors.RegexExtractor},
Regex: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"},
}
err = extractor.CompileExtractors()
@@ -181,7 +208,7 @@ func TestDNSOperatorExtract(t *testing.T) {
t.Run("kval", func(t *testing.T) {
extractor := &extractors.Extractor{
- Type: "kval",
+ Type: extractors.ExtractorTypeHolder{ExtractorType: extractors.KValExtractor},
KVal: []string{"rcode"},
}
err = extractor.CompileExtractors()
@@ -196,25 +223,26 @@ func TestDNSOperatorExtract(t *testing.T) {
func TestDNSMakeResult(t *testing.T) {
options := testutils.DefaultOptions
+ recursion := false
testutils.Init(options)
templateID := "testing-dns"
request := &Request{
- Type: "A",
- Class: "INET",
- Retries: 5,
- ID: templateID,
- Recursion: false,
- Name: "{{FQDN}}",
+ RequestType: DNSRequestTypeHolder{DNSRequestType: A},
+ Class: "INET",
+ Retries: 5,
+ ID: templateID,
+ Recursion: &recursion,
+ Name: "{{FQDN}}",
Operators: operators.Operators{
Matchers: []*matchers.Matcher{{
Name: "test",
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: []string{"1.1.1.1"},
}},
Extractors: []*extractors.Extractor{{
Part: "raw",
- Type: "regex",
+ Type: extractors.ExtractorTypeHolder{ExtractorType: extractors.RegexExtractor},
Regex: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"},
}},
},
@@ -233,7 +261,7 @@ func TestDNSMakeResult(t *testing.T) {
resp.Rcode = dns.RcodeSuccess
resp.Answer = append(resp.Answer, &dns.A{A: net.ParseIP("1.1.1.1"), Hdr: dns.RR_Header{Name: "one.one.one.one."}})
- event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one")
+ event := request.responseToDSLMap(req, resp, "one.one.one.one", "one.one.one.one", nil)
finalEvent := &output.InternalWrappedEvent{InternalEvent: event}
if request.CompiledOperators != nil {
result, ok := request.CompiledOperators.Execute(event, request.Match, request.Extract, false)
diff --git a/v2/pkg/protocols/dns/request.go b/v2/pkg/protocols/dns/request.go
index 28a3309b0..1b6516b85 100644
--- a/v2/pkg/protocols/dns/request.go
+++ b/v2/pkg/protocols/dns/request.go
@@ -1,6 +1,7 @@
package dns
import (
+ "encoding/hex"
"net/url"
"github.com/pkg/errors"
@@ -11,10 +12,17 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/expressions"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/eventcreator"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/responsehighlighter"
+ templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
+ "github.com/projectdiscovery/retryabledns"
)
var _ protocols.Request = &Request{}
+// Type returns the type of the protocol request
+func (request *Request) Type() templateTypes.ProtocolType {
+ return templateTypes.DNSProtocol
+}
+
// ExecuteWithResults executes the protocol requests and returns results instead of writing them.
func (request *Request) ExecuteWithResults(input string, metadata /*TODO review unused parameter*/, previous output.InternalEvent, callback protocols.OutputEventCallback) error {
// Parse the URL and return domain if URL.
@@ -28,11 +36,19 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
// Compile each request for the template based on the URL
compiledRequest, err := request.Make(domain)
if err != nil {
- request.options.Output.Request(request.options.TemplateID, domain, "dns", err)
+ request.options.Output.Request(request.options.TemplatePath, domain, request.Type().String(), err)
request.options.Progress.IncrementFailedRequestsBy(1)
return errors.Wrap(err, "could not build request")
}
+ dnsClient := request.dnsClient
+ if varErr := expressions.ContainsUnresolvedVariables(request.Resolvers...); varErr != nil {
+ if dnsClient, varErr = request.getDnsClient(request.options, metadata); varErr != nil {
+ gologger.Warning().Msgf("[%s] Could not make dns request for %s: %v\n", request.options.TemplateID, domain, varErr)
+ return nil
+ }
+ }
+
requestString := compiledRequest.String()
if varErr := expressions.ContainsUnresolvedVariables(requestString); varErr != nil {
gologger.Warning().Msgf("[%s] Could not make dns request for %s: %v\n", request.options.TemplateID, domain, varErr)
@@ -44,35 +60,71 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
}
// Send the request to the target servers
- resp, err := request.dnsClient.Do(compiledRequest)
+ response, err := dnsClient.Do(compiledRequest)
if err != nil {
- request.options.Output.Request(request.options.TemplateID, domain, "dns", err)
+ request.options.Output.Request(request.options.TemplatePath, domain, request.Type().String(), err)
request.options.Progress.IncrementFailedRequestsBy(1)
}
- if resp == nil {
+ if response == nil {
return errors.Wrap(err, "could not send dns request")
}
request.options.Progress.IncrementRequests()
- request.options.Output.Request(request.options.TemplateID, domain, "dns", err)
- gologger.Verbose().Msgf("[%s] Sent DNS request to %s", request.options.TemplateID, domain)
+ request.options.Output.Request(request.options.TemplatePath, domain, request.Type().String(), err)
+ gologger.Verbose().Msgf("[%s] Sent DNS request to %s\n", request.options.TemplateID, domain)
- outputEvent := request.responseToDSLMap(compiledRequest, resp, input, input)
+ // perform trace if necessary
+ var traceData *retryabledns.TraceData
+ if request.Trace {
+ traceData, err = request.dnsClient.Trace(domain, request.question, request.TraceMaxRecursion)
+ if err != nil {
+ request.options.Output.Request(request.options.TemplatePath, domain, "dns", err)
+ }
+ }
+
+ outputEvent := request.responseToDSLMap(compiledRequest, response, input, input, traceData)
for k, v := range previous {
outputEvent[k] = v
}
event := eventcreator.CreateEvent(request, outputEvent, request.options.Options.Debug || request.options.Options.DebugResponse)
+ // TODO: dynamic values are not supported yet
- if request.options.Options.Debug || request.options.Options.DebugResponse {
- gologger.Debug().Msgf("[%s] Dumped DNS response for %s", request.options.TemplateID, domain)
- gologger.Print().Msgf("%s", responsehighlighter.Highlight(event.OperatorsResult, resp.String(), request.options.Options.NoColor))
+ dumpResponse(event, request.options, response.String(), domain)
+ if request.Trace {
+ dumpTraceData(event, request.options, traceToString(traceData, true), domain)
}
callback(event)
return nil
}
+func dumpResponse(event *output.InternalWrappedEvent, requestOptions *protocols.ExecuterOptions, response, domain string) {
+ cliOptions := requestOptions.Options
+ if cliOptions.Debug || cliOptions.DebugResponse {
+ hexDump := false
+ if responsehighlighter.HasBinaryContent(response) {
+ hexDump = true
+ response = hex.Dump([]byte(response))
+ }
+ highlightedResponse := responsehighlighter.Highlight(event.OperatorsResult, response, cliOptions.NoColor, hexDump)
+ gologger.Debug().Msgf("[%s] Dumped DNS response for %s\n\n%s", requestOptions.TemplateID, domain, highlightedResponse)
+ }
+}
+
+func dumpTraceData(event *output.InternalWrappedEvent, requestOptions *protocols.ExecuterOptions, traceData, domain string) {
+ cliOptions := requestOptions.Options
+ if cliOptions.Debug || cliOptions.DebugResponse {
+ hexDump := false
+ if responsehighlighter.HasBinaryContent(traceData) {
+ hexDump = true
+ traceData = hex.Dump([]byte(traceData))
+ }
+ highlightedResponse := responsehighlighter.Highlight(event.OperatorsResult, traceData, cliOptions.NoColor, hexDump)
+ gologger.Debug().Msgf("[%s] Dumped DNS Trace data for %s\n\n%s", requestOptions.TemplateID, domain, highlightedResponse)
+ }
+}
+
// isURL tests a string to determine if it is a well-structured url or not.
func isURL(toTest string) bool {
if _, err := url.ParseRequestURI(toTest); err != nil {
diff --git a/v2/pkg/protocols/dns/request_test.go b/v2/pkg/protocols/dns/request_test.go
index c77bffaed..5cf355450 100644
--- a/v2/pkg/protocols/dns/request_test.go
+++ b/v2/pkg/protocols/dns/request_test.go
@@ -5,37 +5,38 @@ import (
"github.com/stretchr/testify/require"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
func TestDNSExecuteWithResults(t *testing.T) {
options := testutils.DefaultOptions
+ recursion := false
testutils.Init(options)
templateID := "testing-dns"
request := &Request{
- Type: "A",
- Class: "INET",
- Retries: 5,
- ID: templateID,
- Recursion: false,
- Name: "{{FQDN}}",
+ RequestType: DNSRequestTypeHolder{DNSRequestType: A},
+ Class: "INET",
+ Retries: 5,
+ ID: templateID,
+ Recursion: &recursion,
+ Name: "{{FQDN}}",
Operators: operators.Operators{
Matchers: []*matchers.Matcher{{
Name: "test",
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: []string{"93.184.216.34"},
}},
Extractors: []*extractors.Extractor{{
Part: "raw",
- Type: "regex",
+ Type: extractors.ExtractorTypeHolder{ExtractorType: extractors.RegexExtractor},
Regex: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"},
}},
},
diff --git a/v2/pkg/protocols/file/file.go b/v2/pkg/protocols/file/file.go
index e61965025..b0902e7a0 100644
--- a/v2/pkg/protocols/file/file.go
+++ b/v2/pkg/protocols/file/file.go
@@ -52,6 +52,19 @@ type Request struct {
allExtensions bool
}
+// RequestPartDefinitions contains a mapping of request part definitions and their
+// description. Multiple definitions are separated by commas.
+// Definitions not having a name (generated on runtime) are prefixed & suffixed by <>.
+var RequestPartDefinitions = map[string]string{
+ "template-id": "ID of the template executed",
+ "template-info": "Info Block of the template executed",
+ "template-path": "Path of the template executed",
+ "matched": "Matched is the input which was matched upon",
+ "path": "Path is the path of file on local filesystem",
+ "type": "Type is the type of request made",
+ "raw,body,all,data": "Raw contains the raw file contents",
+}
+
// defaultDenylist is the default list of extensions to be denied
var defaultDenylist = []string{".3g2", ".3gp", ".7z", ".apk", ".arj", ".avi", ".axd", ".bmp", ".css", ".csv", ".deb", ".dll", ".doc", ".drv", ".eot", ".exe", ".flv", ".gif", ".gifv", ".gz", ".h264", ".ico", ".iso", ".jar", ".jpeg", ".jpg", ".lock", ".m4a", ".m4v", ".map", ".mkv", ".mov", ".mp3", ".mp4", ".mpeg", ".mpg", ".msi", ".ogg", ".ogm", ".ogv", ".otf", ".pdf", ".pkg", ".png", ".ppt", ".psd", ".rar", ".rm", ".rpm", ".svg", ".swf", ".sys", ".tar.gz", ".tar", ".tif", ".tiff", ".ttf", ".vob", ".wav", ".webm", ".wmv", ".woff", ".woff2", ".xcf", ".xls", ".xlsx", ".zip"}
diff --git a/v2/pkg/protocols/file/file_test.go b/v2/pkg/protocols/file/file_test.go
index 7338e1461..d568f9810 100644
--- a/v2/pkg/protocols/file/file_test.go
+++ b/v2/pkg/protocols/file/file_test.go
@@ -5,9 +5,9 @@ import (
"github.com/stretchr/testify/require"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
func TestFileCompile(t *testing.T) {
diff --git a/v2/pkg/protocols/file/find_test.go b/v2/pkg/protocols/file/find_test.go
index a2e2e9f36..58eb128b6 100644
--- a/v2/pkg/protocols/file/find_test.go
+++ b/v2/pkg/protocols/file/find_test.go
@@ -8,9 +8,9 @@ import (
"github.com/stretchr/testify/require"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
func TestFindInputPaths(t *testing.T) {
@@ -44,7 +44,7 @@ func TestFindInputPaths(t *testing.T) {
"test.js": "TEST",
}
for k, v := range files {
- err = ioutil.WriteFile(filepath.Join(tempDir, k), []byte(v), 0777)
+ err = ioutil.WriteFile(filepath.Join(tempDir, k), []byte(v), os.ModePerm)
require.Nil(t, err, "could not write temporary file")
}
expected := []string{"config.yaml", "final.yaml", "test.js"}
diff --git a/v2/pkg/protocols/file/operators.go b/v2/pkg/protocols/file/operators.go
index c0d95c5b0..41ef7fd3a 100644
--- a/v2/pkg/protocols/file/operators.go
+++ b/v2/pkg/protocols/file/operators.go
@@ -16,17 +16,10 @@ import (
// Match matches a generic data response again a given matcher
func (request *Request) Match(data map[string]interface{}, matcher *matchers.Matcher) (bool, []string) {
- partString := matcher.Part
- switch partString {
- case "body", "all", "data", "":
- partString = "raw"
- }
-
- item, ok := data[partString]
+ itemStr, ok := request.getMatchPart(matcher.Part, data)
if !ok {
return false, []string{}
}
- itemStr := types.ToString(item)
switch matcher.GetType() {
case matchers.SizeMatcher:
@@ -45,17 +38,10 @@ func (request *Request) Match(data map[string]interface{}, matcher *matchers.Mat
// Extract performs extracting operation for an extractor on model and returns true or false.
func (request *Request) Extract(data map[string]interface{}, extractor *extractors.Extractor) map[string]struct{} {
- partString := extractor.Part
- switch partString {
- case "body", "all", "data", "":
- partString = "raw"
- }
-
- item, ok := data[partString]
+ itemStr, ok := request.getMatchPart(extractor.Part, data)
if !ok {
return nil
}
- itemStr := types.ToString(item)
switch extractor.GetType() {
case extractors.RegexExtractor:
@@ -66,12 +52,28 @@ func (request *Request) Extract(data map[string]interface{}, extractor *extracto
return nil
}
+func (request *Request) getMatchPart(part string, data output.InternalEvent) (string, bool) {
+ switch part {
+ case "body", "all", "data", "":
+ part = "raw"
+ }
+
+ item, ok := data[part]
+ if !ok {
+ return "", false
+ }
+ itemStr := types.ToString(item)
+
+ return itemStr, true
+}
+
// responseToDSLMap converts a file response to a map for use in DSL matching
func (request *Request) responseToDSLMap(raw, inputFilePath, matchedFileName string) output.InternalEvent {
return output.InternalEvent{
"path": inputFilePath,
"matched": matchedFileName,
"raw": raw,
+ "type": request.Type().String(),
"template-id": request.options.TemplateID,
"template-info": request.options.TemplateInfo,
"template-path": request.options.TemplatePath,
@@ -119,10 +121,11 @@ func (request *Request) GetCompiledOperators() []*operators.Operators {
func (request *Request) MakeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent {
data := &output.ResultEvent{
+ MatcherStatus: true,
TemplateID: types.ToString(wrapped.InternalEvent["template-id"]),
TemplatePath: types.ToString(wrapped.InternalEvent["template-path"]),
Info: wrapped.InternalEvent["template-info"].(model.Info),
- Type: "file",
+ Type: types.ToString(wrapped.InternalEvent["type"]),
Path: types.ToString(wrapped.InternalEvent["path"]),
Matched: types.ToString(wrapped.InternalEvent["matched"]),
Host: types.ToString(wrapped.InternalEvent["host"]),
diff --git a/v2/pkg/protocols/file/operators_test.go b/v2/pkg/protocols/file/operators_test.go
index b03d641b5..37cc62d52 100644
--- a/v2/pkg/protocols/file/operators_test.go
+++ b/v2/pkg/protocols/file/operators_test.go
@@ -5,13 +5,13 @@ import (
"github.com/stretchr/testify/require"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
func TestResponseToDSLMap(t *testing.T) {
@@ -35,7 +35,7 @@ func TestResponseToDSLMap(t *testing.T) {
resp := "test-data\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one")
- require.Len(t, event, 6, "could not get correct number of items in dsl map")
+ require.Len(t, event, 7, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp")
}
@@ -60,13 +60,13 @@ func TestFileOperatorMatch(t *testing.T) {
resp := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one")
- require.Len(t, event, 6, "could not get correct number of items in dsl map")
+ require.Len(t, event, 7, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp")
t.Run("valid", func(t *testing.T) {
matcher := &matchers.Matcher{
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: []string{"1.1.1.1"},
}
err = matcher.CompileMatchers()
@@ -80,7 +80,7 @@ func TestFileOperatorMatch(t *testing.T) {
t.Run("negative", func(t *testing.T) {
matcher := &matchers.Matcher{
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Negative: true,
Words: []string{"random"},
}
@@ -95,7 +95,7 @@ func TestFileOperatorMatch(t *testing.T) {
t.Run("invalid", func(t *testing.T) {
matcher := &matchers.Matcher{
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: []string{"random"},
}
err := matcher.CompileMatchers()
@@ -105,6 +105,26 @@ func TestFileOperatorMatch(t *testing.T) {
require.False(t, isMatched, "could match invalid response matcher")
require.Equal(t, []string{}, matched)
})
+
+ t.Run("caseInsensitive", func(t *testing.T) {
+ resp := "TEST-DATA\r\n1.1.1.1\r\n"
+ event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one")
+ require.Len(t, event, 7, "could not get correct number of items in dsl map")
+ require.Equal(t, resp, event["raw"], "could not get correct resp")
+
+ matcher := &matchers.Matcher{
+ Part: "raw",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
+ Words: []string{"TeSt-DaTA"},
+ CaseInsensitive: true,
+ }
+ err = matcher.CompileMatchers()
+ require.Nil(t, err, "could not compile matcher")
+
+ isMatched, matched := request.Match(event, matcher)
+ require.True(t, isMatched, "could not match valid response")
+ require.Equal(t, []string{"test-data"}, matched)
+ })
}
func TestFileOperatorExtract(t *testing.T) {
@@ -128,13 +148,13 @@ func TestFileOperatorExtract(t *testing.T) {
resp := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(resp, "one.one.one.one", "one.one.one.one")
- require.Len(t, event, 6, "could not get correct number of items in dsl map")
+ require.Len(t, event, 7, "could not get correct number of items in dsl map")
require.Equal(t, resp, event["raw"], "could not get correct resp")
t.Run("extract", func(t *testing.T) {
extractor := &extractors.Extractor{
Part: "raw",
- Type: "regex",
+ Type: extractors.ExtractorTypeHolder{ExtractorType: extractors.RegexExtractor},
Regex: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"},
}
err = extractor.CompileExtractors()
@@ -147,7 +167,7 @@ func TestFileOperatorExtract(t *testing.T) {
t.Run("kval", func(t *testing.T) {
extractor := &extractors.Extractor{
- Type: "kval",
+ Type: extractors.ExtractorTypeHolder{ExtractorType: extractors.KValExtractor},
KVal: []string{"raw"},
}
err = extractor.CompileExtractors()
@@ -180,13 +200,13 @@ func testFileMakeResultOperators(t *testing.T, matcherCondition string) *output.
matcher := []*matchers.Matcher{
{
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: expectedValue,
},
{
Name: namedMatcherName,
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: expectedValue,
},
}
@@ -230,7 +250,7 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi
Matchers: matchers,
Extractors: []*extractors.Extractor{{
Part: "raw",
- Type: "regex",
+ Type: extractors.ExtractorTypeHolder{ExtractorType: extractors.RegexExtractor},
Regex: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"},
}},
},
@@ -246,7 +266,7 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi
fileContent := "test-data\r\n1.1.1.1\r\n"
event := request.responseToDSLMap(fileContent, "/tmp", matchedFileName)
- require.Len(t, event, 6, "could not get correct number of items in dsl map")
+ require.Len(t, event, 7, "could not get correct number of items in dsl map")
require.Equal(t, fileContent, event["raw"], "could not get correct resp")
finalEvent := &output.InternalWrappedEvent{InternalEvent: event}
diff --git a/v2/pkg/protocols/file/request.go b/v2/pkg/protocols/file/request.go
index 61bb87185..7c582c32a 100644
--- a/v2/pkg/protocols/file/request.go
+++ b/v2/pkg/protocols/file/request.go
@@ -1,6 +1,7 @@
package file
import (
+ "encoding/hex"
"io/ioutil"
"os"
@@ -13,10 +14,16 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/eventcreator"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/responsehighlighter"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/tostring"
+ templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
)
var _ protocols.Request = &Request{}
+// Type returns the type of the protocol request
+func (request *Request) Type() templateTypes.ProtocolType {
+ return templateTypes.FileProtocol
+}
+
// ExecuteWithResults executes the protocol requests and returns results instead of writing them.
func (request *Request) ExecuteWithResults(input string, metadata /*TODO review unused parameter*/, previous output.InternalEvent, callback protocols.OutputEventCallback) error {
wg := sizedwaitgroup.New(request.options.Options.BulkSize)
@@ -49,30 +56,40 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
gologger.Error().Msgf("Could not read file path %s: %s\n", filePath, err)
return
}
- dataStr := tostring.UnsafeToString(buffer)
+ fileContent := tostring.UnsafeToString(buffer)
gologger.Verbose().Msgf("[%s] Sent FILE request to %s", request.options.TemplateID, filePath)
- outputEvent := request.responseToDSLMap(dataStr, input, filePath)
+ outputEvent := request.responseToDSLMap(fileContent, input, filePath)
for k, v := range previous {
outputEvent[k] = v
}
event := eventcreator.CreateEvent(request, outputEvent, request.options.Options.Debug || request.options.Options.DebugResponse)
- if request.options.Options.Debug || request.options.Options.DebugResponse {
- gologger.Info().Msgf("[%s] Dumped file request for %s", request.options.TemplateID, filePath)
- gologger.Print().Msgf("%s", responsehighlighter.Highlight(event.OperatorsResult, dataStr, request.options.Options.NoColor))
- }
+ dumpResponse(event, request.options, fileContent, filePath)
callback(event)
}(data)
})
wg.Wait()
if err != nil {
- request.options.Output.Request(request.options.TemplateID, input, "file", err)
+ request.options.Output.Request(request.options.TemplatePath, input, request.Type().String(), err)
request.options.Progress.IncrementFailedRequestsBy(1)
return errors.Wrap(err, "could not send file request")
}
request.options.Progress.IncrementRequests()
return nil
}
+
+func dumpResponse(event *output.InternalWrappedEvent, requestOptions *protocols.ExecuterOptions, fileContent string, filePath string) {
+ cliOptions := requestOptions.Options
+ if cliOptions.Debug || cliOptions.DebugResponse {
+ hexDump := false
+ if responsehighlighter.HasBinaryContent(fileContent) {
+ hexDump = true
+ fileContent = hex.Dump([]byte(fileContent))
+ }
+ highlightedResponse := responsehighlighter.Highlight(event.OperatorsResult, fileContent, cliOptions.NoColor, hexDump)
+ gologger.Debug().Msgf("[%s] Dumped file request for %s\n\n%s", requestOptions.TemplateID, filePath, highlightedResponse)
+ }
+}
diff --git a/v2/pkg/protocols/file/request_test.go b/v2/pkg/protocols/file/request_test.go
index 0c9f7e9cc..076c3b884 100644
--- a/v2/pkg/protocols/file/request_test.go
+++ b/v2/pkg/protocols/file/request_test.go
@@ -8,13 +8,13 @@ import (
"github.com/stretchr/testify/require"
- "github.com/projectdiscovery/nuclei/v2/internal/testutils"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
+ "github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
func TestFileExecuteWithResults(t *testing.T) {
@@ -32,12 +32,12 @@ func TestFileExecuteWithResults(t *testing.T) {
Matchers: []*matchers.Matcher{{
Name: "test",
Part: "raw",
- Type: "word",
+ Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher},
Words: []string{"1.1.1.1"},
}},
Extractors: []*extractors.Extractor{{
Part: "raw",
- Type: "regex",
+ Type: extractors.ExtractorTypeHolder{ExtractorType: extractors.RegexExtractor},
Regex: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"},
}},
},
@@ -57,7 +57,7 @@ func TestFileExecuteWithResults(t *testing.T) {
"config.yaml": "TEST\r\n1.1.1.1\r\n",
}
for k, v := range files {
- err = ioutil.WriteFile(filepath.Join(tempDir, k), []byte(v), 0777)
+ err = ioutil.WriteFile(filepath.Join(tempDir, k), []byte(v), os.ModePerm)
require.Nil(t, err, "could not write temporary file")
}
diff --git a/v2/pkg/protocols/headless/engine/action.go b/v2/pkg/protocols/headless/engine/action.go
index 5f97217b2..e65eed8be 100644
--- a/v2/pkg/protocols/headless/engine/action.go
+++ b/v2/pkg/protocols/headless/engine/action.go
@@ -2,110 +2,6 @@ package engine
import "strings"
-// ActionType defines the action type for a browser action
-type ActionType int8
-
-// Types to be executed by the user.
-const (
- // ActionNavigate performs a navigation to the specified URL
- // URL can include nuclei payload data such as URL, Hostname, etc.
- ActionNavigate ActionType = iota + 1
- // ActionScript executes a JS snippet on the page.
- ActionScript
- // ActionClick performs the left-click action on an Element.
- ActionClick
- // ActionRightClick performs the right-click action on an Element.
- ActionRightClick
- // ActionTextInput performs an action for a text input
- ActionTextInput
- // ActionScreenshot performs the screenshot action writing to a file.
- ActionScreenshot
- // ActionTimeInput performs an action on a time input.
- ActionTimeInput
- // ActionSelectInput performs an action on a select input.
- ActionSelectInput
- // ActionFilesInput performs an action on a file input.
- ActionFilesInput
- // ActionWaitLoad waits for the page to stop loading.
- ActionWaitLoad
- // ActionGetResource performs a get resource action on an element
- ActionGetResource
- // ActionExtract performs an extraction on an element
- ActionExtract
- // ActionSetMethod sets the request method
- ActionSetMethod
- // ActionAddHeader adds a header to the request
- ActionAddHeader
- // ActionSetHeader sets a header in the request
- ActionSetHeader
- // ActionDeleteHeader deletes a header from the request
- ActionDeleteHeader
- // ActionSetBody sets the value of the request body
- ActionSetBody
- // ActionWaitEvent waits for a specific event.
- ActionWaitEvent
- // ActionKeyboard performs a keyboard action event on a page.
- ActionKeyboard
- // ActionDebug debug slows down headless and adds a sleep to each page.
- ActionDebug
- // ActionSleep executes a sleep for a specified duration
- ActionSleep
- // ActionWaitVisible waits until an element appears.
- ActionWaitVisible
-)
-
-// ActionStringToAction converts an action from string to internal representation
-var ActionStringToAction = map[string]ActionType{
- "navigate": ActionNavigate,
- "script": ActionScript,
- "click": ActionClick,
- "rightclick": ActionRightClick,
- "text": ActionTextInput,
- "screenshot": ActionScreenshot,
- "time": ActionTimeInput,
- "select": ActionSelectInput,
- "files": ActionFilesInput,
- "waitload": ActionWaitLoad,
- "getresource": ActionGetResource,
- "extract": ActionExtract,
- "setmethod": ActionSetMethod,
- "addheader": ActionAddHeader,
- "setheader": ActionSetHeader,
- "deleteheader": ActionDeleteHeader,
- "setbody": ActionSetBody,
- "waitevent": ActionWaitEvent,
- "keyboard": ActionKeyboard,
- "debug": ActionDebug,
- "sleep": ActionSleep,
- "waitvisible": ActionWaitVisible,
-}
-
-// ActionToActionString converts an action from internal representation to string
-var ActionToActionString = map[ActionType]string{
- ActionNavigate: "navigate",
- ActionScript: "script",
- ActionClick: "click",
- ActionRightClick: "rightclick",
- ActionTextInput: "text",
- ActionScreenshot: "screenshot",
- ActionTimeInput: "time",
- ActionSelectInput: "select",
- ActionFilesInput: "files",
- ActionWaitLoad: "waitload",
- ActionGetResource: "getresource",
- ActionExtract: "extract",
- ActionSetMethod: "set-method",
- ActionAddHeader: "addheader",
- ActionSetHeader: "setheader",
- ActionDeleteHeader: "deleteheader",
- ActionSetBody: "setbody",
- ActionWaitEvent: "waitevent",
- ActionKeyboard: "keyboard",
- ActionDebug: "debug",
- ActionSleep: "sleep",
- ActionWaitVisible: "waitvisible",
-}
-
// Action is an action taken by the browser to reach a navigation
//
// Each step that the browser executes is an action. Most navigations
@@ -130,35 +26,13 @@ type Action struct {
Description string `yaml:"description,omitempty" jsonschema:"title=description for headless action,description=Description of the headless action"`
// description: |
// Action is the type of the action to perform.
- // values:
- // - "navigate"
- // - "script"
- // - "click"
- // - "rightclick"
- // - "text"
- // - "screenshot"
- // - "time"
- // - "select"
- // - "files"
- // - "waitload"
- // - "getresource"
- // - "extract"
- // - "setmethod"
- // - "addheader"
- // - "setheader"
- // - "deleteheader"
- // - "setbody"
- // - "waitevent"
- // - "keyboard"
- // - "debug"
- // - "sleep"
- ActionType string `yaml:"action" jsonschema:"title=action to perform,description=Type of actions to perform,enum=navigate,enum=script,enum=click,enum=rightclick,enum=text,enum=screenshot,enum=time,enum=select,enum=files,enum=waitload,enum=getresource,enum=extract,enum=setmethod,enum=addheader,enum=setheader,enum=deleteheader,enum=setbody,enum=waitevent,enum=keyboard,enum=debug,enum=sleep"`
+ ActionType ActionTypeHolder `yaml:"action" jsonschema:"title=action to perform,description=Type of actions to perform,enum=navigate,enum=script,enum=click,enum=rightclick,enum=text,enum=screenshot,enum=time,enum=select,enum=files,enum=waitload,enum=getresource,enum=extract,enum=setmethod,enum=addheader,enum=setheader,enum=deleteheader,enum=setbody,enum=waitevent,enum=keyboard,enum=debug,enum=sleep"`
}
// String returns the string representation of an action
func (a *Action) String() string {
builder := &strings.Builder{}
- builder.WriteString(a.ActionType)
+ builder.WriteString(a.ActionType.String())
if a.Name != "" {
builder.WriteString(" Name:")
builder.WriteString(a.Name)
diff --git a/v2/pkg/protocols/headless/engine/action_types.go b/v2/pkg/protocols/headless/engine/action_types.go
new file mode 100644
index 000000000..03fcf2e54
--- /dev/null
+++ b/v2/pkg/protocols/headless/engine/action_types.go
@@ -0,0 +1,207 @@
+package engine
+
+import (
+ "encoding/json"
+ "errors"
+ "strings"
+
+ "github.com/alecthomas/jsonschema"
+)
+
+// ActionType defines the action type for a browser action
+type ActionType int8
+
+// Types to be executed by the user.
+// name:ActionType
+const (
+ // ActionNavigate performs a navigation to the specified URL
+ // name:navigate
+ ActionNavigate ActionType = iota + 1
+ // ActionScript executes a JS snippet on the page.
+ // name:script
+ ActionScript
+ // ActionClick performs the left-click action on an Element.
+ // name:click
+ ActionClick
+ // ActionRightClick performs the right-click action on an Element.
+ // name:rightclick
+ ActionRightClick
+ // ActionTextInput performs an action for a text input
+ // name:text
+ ActionTextInput
+ // ActionScreenshot performs the screenshot action writing to a file.
+ // name:screenshot
+ ActionScreenshot
+ // ActionTimeInput performs an action on a time input.
+ // name:time
+ ActionTimeInput
+ // ActionSelectInput performs an action on a select input.
+ // name:select
+ ActionSelectInput
+ // ActionFilesInput performs an action on a file input.
+ // name:files
+ ActionFilesInput
+ // ActionWaitLoad waits for the page to stop loading.
+ // name:waitload
+ ActionWaitLoad
+ // ActionGetResource performs a get resource action on an element
+ // name:getresource
+ ActionGetResource
+ // ActionExtract performs an extraction on an element
+ // name:extract
+ ActionExtract
+ // ActionSetMethod sets the request method
+ // name:setmethod
+ ActionSetMethod
+ // ActionAddHeader adds a header to the request
+ // name:addheader
+ ActionAddHeader
+ // ActionSetHeader sets a header in the request
+ // name:setheader
+ ActionSetHeader
+ // ActionDeleteHeader deletes a header from the request
+ // name:deleteheader
+ ActionDeleteHeader
+ // ActionSetBody sets the value of the request body
+ // name:setbody
+ ActionSetBody
+ // ActionWaitEvent waits for a specific event.
+ // name:waitevent
+ ActionWaitEvent
+ // ActionKeyboard performs a keyboard action event on a page.
+ // name:keyboard
+ ActionKeyboard
+ // ActionDebug debug slows down headless and adds a sleep to each page.
+ // name:debug
+ ActionDebug
+ // ActionSleep executes a sleep for a specified duration
+ // name:sleep
+ ActionSleep
+ // ActionWaitVisible waits until an element appears.
+ // name:waitvisible
+ ActionWaitVisible
+ // limit
+ limit
+)
+
+// ActionStringToAction converts an action from string to internal representation
+var ActionStringToAction = map[string]ActionType{
+ "navigate": ActionNavigate,
+ "script": ActionScript,
+ "click": ActionClick,
+ "rightclick": ActionRightClick,
+ "text": ActionTextInput,
+ "screenshot": ActionScreenshot,
+ "time": ActionTimeInput,
+ "select": ActionSelectInput,
+ "files": ActionFilesInput,
+ "waitload": ActionWaitLoad,
+ "getresource": ActionGetResource,
+ "extract": ActionExtract,
+ "setmethod": ActionSetMethod,
+ "addheader": ActionAddHeader,
+ "setheader": ActionSetHeader,
+ "deleteheader": ActionDeleteHeader,
+ "setbody": ActionSetBody,
+ "waitevent": ActionWaitEvent,
+ "keyboard": ActionKeyboard,
+ "debug": ActionDebug,
+ "sleep": ActionSleep,
+ "waitvisible": ActionWaitVisible,
+}
+
+// ActionToActionString converts an action from internal representation to string
+var ActionToActionString = map[ActionType]string{
+ ActionNavigate: "navigate",
+ ActionScript: "script",
+ ActionClick: "click",
+ ActionRightClick: "rightclick",
+ ActionTextInput: "text",
+ ActionScreenshot: "screenshot",
+ ActionTimeInput: "time",
+ ActionSelectInput: "select",
+ ActionFilesInput: "files",
+ ActionWaitLoad: "waitload",
+ ActionGetResource: "getresource",
+ ActionExtract: "extract",
+ ActionSetMethod: "set-method",
+ ActionAddHeader: "addheader",
+ ActionSetHeader: "setheader",
+ ActionDeleteHeader: "deleteheader",
+ ActionSetBody: "setbody",
+ ActionWaitEvent: "waitevent",
+ ActionKeyboard: "keyboard",
+ ActionDebug: "debug",
+ ActionSleep: "sleep",
+ ActionWaitVisible: "waitvisible",
+}
+
+// GetSupportedActionTypes returns list of supported types
+func GetSupportedActionTypes() []ActionType {
+ var result []ActionType
+ for index := ActionType(1); index < limit; index++ {
+ result = append(result, index)
+ }
+ return result
+}
+
+func toActionTypes(valueToMap string) (ActionType, error) {
+ normalizedValue := normalizeValue(valueToMap)
+ for key, currentValue := range ActionToActionString {
+ if normalizedValue == currentValue {
+ return key, nil
+ }
+ }
+ return -1, errors.New("Invalid action type: " + valueToMap)
+}
+
+func normalizeValue(value string) string {
+ return strings.TrimSpace(strings.ToLower(value))
+}
+
+func (t ActionType) String() string {
+ return ActionToActionString[t]
+}
+
+// ActionTypeHolder is used to hold internal type of the action
+type ActionTypeHolder struct {
+ ActionType ActionType `mapping:"true"`
+}
+
+func (holder ActionTypeHolder) String() string {
+ return holder.ActionType.String()
+}
+func (holder ActionTypeHolder) JSONSchemaType() *jsonschema.Type {
+ gotType := &jsonschema.Type{
+ Type: "string",
+ Title: "action to perform",
+ Description: "Type of actions to perform,enum=navigate,enum=script,enum=click,enum=rightclick,enum=text,enum=screenshot,enum=time,enum=select,enum=files,enum=waitload,enum=getresource,enum=extract,enum=setmethod,enum=addheader,enum=setheader,enum=deleteheader,enum=setbody,enum=waitevent,enum=keyboard,enum=debug,enum=sleep",
+ }
+ for _, types := range GetSupportedActionTypes() {
+ gotType.Enum = append(gotType.Enum, types.String())
+ }
+ return gotType
+}
+
+func (holder *ActionTypeHolder) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var marshalledTypes string
+ if err := unmarshal(&marshalledTypes); err != nil {
+ return err
+ }
+
+ computedType, err := toActionTypes(marshalledTypes)
+ if err != nil {
+ return err
+ }
+
+ holder.ActionType = computedType
+ return nil
+}
+
+func (holder *ActionTypeHolder) MarshalJSON() ([]byte, error) {
+ return json.Marshal(holder.ActionType.String())
+}
+
+func (holder ActionTypeHolder) MarshalYAML() (interface{}, error) {
+ return holder.ActionType.String(), nil
+}
diff --git a/v2/pkg/protocols/headless/engine/engine.go b/v2/pkg/protocols/headless/engine/engine.go
index a506cbda4..6c1046b67 100644
--- a/v2/pkg/protocols/headless/engine/engine.go
+++ b/v2/pkg/protocols/headless/engine/engine.go
@@ -5,6 +5,7 @@ import (
"io/ioutil"
"net/http"
"os"
+ "runtime"
"strings"
"github.com/corpix/uarand"
@@ -21,7 +22,7 @@ import (
type Browser struct {
customAgent string
tempDir string
- previouspids map[int32]struct{} // track already running pids
+ previousPIDs map[int32]struct{} // track already running PIDs
engine *rod.Browser
httpclient *http.Client
options *types.Options
@@ -33,7 +34,7 @@ func New(options *types.Options) (*Browser, error) {
if err != nil {
return nil, errors.Wrap(err, "could not create temporary directory")
}
- previouspids := findChromeProcesses()
+ previousPIDs := findChromeProcesses()
chromeLauncher := launcher.New().
Leakless(false).
@@ -44,12 +45,15 @@ func New(options *types.Options) (*Browser, error) {
Set("disable-notifications", "true").
Set("hide-scrollbars", "true").
Set("window-size", fmt.Sprintf("%d,%d", 1080, 1920)).
- Set("no-sandbox", "true").
Set("mute-audio", "true").
Set("incognito", "true").
Delete("use-mock-keychain").
UserDataDir(dataStore)
+ if MustDisableSandbox() {
+ chromeLauncher = chromeLauncher.NoSandbox(true)
+ }
+
if options.UseInstalledChrome {
if chromePath, hasChrome := launcher.LookPath(); hasChrome {
chromeLauncher.Bin(chromePath)
@@ -63,8 +67,8 @@ func New(options *types.Options) (*Browser, error) {
} else {
chromeLauncher = chromeLauncher.Headless(true)
}
- if options.ProxyURL != "" {
- chromeLauncher = chromeLauncher.Proxy(options.ProxyURL)
+ if types.ProxyURL != "" {
+ chromeLauncher = chromeLauncher.Proxy(types.ProxyURL)
}
launcherURL, err := chromeLauncher.Launch()
if err != nil {
@@ -88,7 +92,12 @@ func New(options *types.Options) (*Browser, error) {
if customAgent == "" {
customAgent = uarand.GetRandom()
}
- httpclient := newhttpClient(options)
+
+ httpclient, err := newHttpClient(options)
+ if err != nil {
+ return nil, err
+ }
+
engine := &Browser{
tempDir: dataStore,
customAgent: customAgent,
@@ -96,10 +105,17 @@ func New(options *types.Options) (*Browser, error) {
httpclient: httpclient,
options: options,
}
- engine.previouspids = previouspids
+ engine.previousPIDs = previousPIDs
return engine, nil
}
+// MustDisableSandbox determines if the current os and user needs sandbox mode disabled
+func MustDisableSandbox() bool {
+ // linux with root user needs "--no-sandbox" option
+ // https://github.com/chromium/chromium/blob/c4d3c31083a2e1481253ff2d24298a1dfe19c754/chrome/test/chromedriver/client/chromedriver.py#L209
+ return runtime.GOOS == "linux" && os.Geteuid() == 0
+}
+
// Close closes the browser engine
func (b *Browser) Close() {
b.engine.Close()
@@ -118,7 +134,7 @@ func (b *Browser) killChromeProcesses() {
continue
}
// skip chrome processes that were already running
- if _, ok := b.previouspids[process.Pid]; ok {
+ if _, ok := b.previousPIDs[process.Pid]; ok {
continue
}
_ = process.Kill()
diff --git a/v2/pkg/protocols/headless/engine/http_client.go b/v2/pkg/protocols/headless/engine/http_client.go
index b4747d541..9b1c5b0df 100644
--- a/v2/pkg/protocols/headless/engine/http_client.go
+++ b/v2/pkg/protocols/headless/engine/http_client.go
@@ -1,34 +1,79 @@
package engine
import (
+ "context"
"crypto/tls"
+ "fmt"
+ "net"
"net/http"
+ "net/http/cookiejar"
"net/url"
"time"
+ "github.com/projectdiscovery/nuclei/v2/pkg/protocols/utils"
+
+ "golang.org/x/net/proxy"
+
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/protocolstate"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
)
-// newhttpClient creates a new http client for headless communication with a timeout
-func newhttpClient(options *types.Options) *http.Client {
+// newHttpClient creates a new http client for headless communication with a timeout
+func newHttpClient(options *types.Options) (*http.Client, error) {
dialer := protocolstate.Dialer
+
+ // Set the base TLS configuration definition
+ tlsConfig := &tls.Config{
+ Renegotiation: tls.RenegotiateOnceAsClient,
+ InsecureSkipVerify: true,
+ }
+
+ // Add the client certificate authentication to the request if it's configured
+ var err error
+ tlsConfig, err = utils.AddConfiguredClientCertToRequest(tlsConfig, options)
+ if err != nil {
+ return nil, err
+ }
+
transport := &http.Transport{
DialContext: dialer.Dial,
MaxIdleConns: 500,
MaxIdleConnsPerHost: 500,
MaxConnsPerHost: 500,
- TLSClientConfig: &tls.Config{
- Renegotiation: tls.RenegotiateOnceAsClient,
- InsecureSkipVerify: true,
- },
+ TLSClientConfig: tlsConfig,
}
-
- if options.ProxyURL != "" {
- if proxyURL, err := url.Parse(options.ProxyURL); err == nil {
+ if types.ProxyURL != "" {
+ if proxyURL, err := url.Parse(types.ProxyURL); err == nil {
transport.Proxy = http.ProxyURL(proxyURL)
}
+ } else if types.ProxySocksURL != "" {
+ var proxyAuth *proxy.Auth
+ socksURL, proxyErr := url.Parse(types.ProxySocksURL)
+ if proxyErr == nil {
+ proxyAuth = &proxy.Auth{}
+ proxyAuth.User = socksURL.User.Username()
+ proxyAuth.Password, _ = socksURL.User.Password()
+ }
+ dialer, proxyErr := proxy.SOCKS5("tcp", fmt.Sprintf("%s:%s", socksURL.Hostname(), socksURL.Port()), proxyAuth, proxy.Direct)
+ dc := dialer.(interface {
+ DialContext(ctx context.Context, network, addr string) (net.Conn, error)
+ })
+ if proxyErr == nil {
+ transport.DialContext = dc.DialContext
+ }
}
- return &http.Client{Transport: transport, Timeout: time.Duration(options.Timeout*3) * time.Second}
+ jar, _ := cookiejar.New(nil)
+
+ httpclient := &http.Client{
+ Transport: transport,
+ Timeout: time.Duration(options.Timeout*3) * time.Second,
+ Jar: jar,
+ CheckRedirect: func(req *http.Request, via []*http.Request) error {
+ // the browser should follow redirects not us
+ return http.ErrUseLastResponse
+ },
+ }
+
+ return httpclient, nil
}
diff --git a/v2/pkg/protocols/headless/engine/page_actions.go b/v2/pkg/protocols/headless/engine/page_actions.go
index fc6e821ea..b186dcc5a 100644
--- a/v2/pkg/protocols/headless/engine/page_actions.go
+++ b/v2/pkg/protocols/headless/engine/page_actions.go
@@ -24,9 +24,7 @@ func (p *Page) ExecuteActions(baseURL *url.URL, actions []*Action) (map[string]s
outData := make(map[string]string)
for _, act := range actions {
- actionType := ActionStringToAction[act.ActionType]
-
- switch actionType {
+ switch act.ActionType.ActionType {
case ActionNavigate:
err = p.NavigateURL(act, outData, baseURL)
case ActionScript:
@@ -401,12 +399,12 @@ func (p *Page) SelectInputElement(act *Action, out map[string]string /*TODO revi
return errors.Wrap(err, "could not scroll into view")
}
- selectedbool := false
+ selectedBool := false
if act.GetArg("selected") == "true" {
- selectedbool = true
+ selectedBool = true
}
by := act.GetArg("selector")
- if err := element.Select([]string{value}, selectedbool, selectorBy(by)); err != nil {
+ if err := element.Select([]string{value}, selectedBool, selectorBy(by)); err != nil {
return errors.Wrap(err, "could not select input")
}
return nil
@@ -511,7 +509,7 @@ func (p *Page) WaitEvent(act *Action, out map[string]string /*TODO review unused
protoEvent := &protoEvent{event: event}
// Uses another instance in order to be able to chain the timeout only to the wait operation
- pagec := p.page
+ pageCopy := p.page
timeout := act.GetArg("timeout")
if timeout != "" {
ts, err := strconv.Atoi(timeout)
@@ -519,11 +517,11 @@ func (p *Page) WaitEvent(act *Action, out map[string]string /*TODO review unused
return errors.Wrap(err, "could not get timeout")
}
if ts > 0 {
- pagec = p.page.Timeout(time.Duration(ts) * time.Second)
+ pageCopy = p.page.Timeout(time.Duration(ts) * time.Second)
}
}
// Just wait the event to happen
- pagec.WaitEvent(protoEvent)()
+ pageCopy.WaitEvent(protoEvent)()
return nil
}
diff --git a/v2/pkg/protocols/headless/engine/page_actions_test.go b/v2/pkg/protocols/headless/engine/page_actions_test.go
index 902b978ac..4c3414e47 100644
--- a/v2/pkg/protocols/headless/engine/page_actions_test.go
+++ b/v2/pkg/protocols/headless/engine/page_actions_test.go
@@ -28,7 +28,7 @@ func TestActionNavigate(t *testing.T) {