Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

removed expect-ct it's deprecated #15

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ Install it as follows.
1. Go to Extender, Extensions, and click on Add Extension. Select python and load the burpecheaders.py file.
![Load the burpsecheaders.py file](./pics/burp1.png)
2. Once BurpSuite loads the plugin successfully, visit a website and observe that the plugin reports issues under the scanner tab.
![Scanner shows issues of the plugin](./pics/burp2.png)
![Scanner shows issues of the plugin](./pics/burp2.png)

Observe that the plugin highlights the offending header/directives/keywords in the response headers.
![BurpSuite highlights the insecure headers](./pics/burp3.png)
Expand Down Expand Up @@ -405,7 +405,7 @@ The HTTP Strict Transport Security (HSTS) header ensures that all communication

The header has the following directives:

- **max-age**: specifies the number of seconds the browser regards the host as a known HSTS Host.
- **max-age**: specifies the number of seconds the browser regards the host as a known HSTS Host.
- **includeSubdomains**: this optional directive indicates that the HSTS Policy applies to this HSTS Host as well as any subdomains of
the host's domain name.
- **preload**: the `preload` directive indicates that the domain can be preloaded in the browser as a known HSTS host.
Expand Down Expand Up @@ -681,7 +681,7 @@ The tool also identifies the following syntactical errors (`SyntaxChecker`) for
```
- **Missing Directive** (`MissingDirectiveChecker`): using a header without a required directive is an issue. The tool will thus mark the following as an error as max-age is missing.
```http
Expect-CT: enforce
Strict-Transport-Security: includeSubDomains
```
- **Empty Directives** (`EmptyDirectiveChecker`): using a directive without a required value is an issue. The tool will thus mark the following as an error.
```http
Expand Down
1 change: 0 additions & 1 deletion securityheaders/checkers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
from .xpoweredby import *
from .xxssprotection import *
from .other import *
from .expectct import *
from .xpcdp import *
from .setcookie import *

Expand Down
5 changes: 0 additions & 5 deletions securityheaders/checkers/expectct/__init__.py

This file was deleted.

9 changes: 0 additions & 9 deletions securityheaders/checkers/expectct/checker.py

This file was deleted.

17 changes: 0 additions & 17 deletions securityheaders/checkers/expectct/httpreporturi.py

This file was deleted.

22 changes: 0 additions & 22 deletions securityheaders/checkers/expectct/notenforce.py

This file was deleted.

29 changes: 0 additions & 29 deletions securityheaders/checkers/expectct/test_httpreporturi.py

This file was deleted.

51 changes: 0 additions & 51 deletions securityheaders/checkers/expectct/test_notenforce.py

This file was deleted.

5 changes: 3 additions & 2 deletions securityheaders/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,15 @@
from .xwebkitcsp import *
from .xcsp import *
from .xdownloadoptions import *
from .expectct import *
from .xaspnetversion import *
from .xaspnetmvcversion import *
from .hpkp import *
from .xpcdp import *
from .setcookie import *

__all__ = ['annotations','csp','cors','clearsitedata','hsts','xcontenttypeoptions','xframeoptions','xxssprotection','featurepolicy','referrerpolicy','server','xpoweredby', 'expectct','xcsp','xwebkitcsp','xpcdp','xaspnetversion','xaspnetmvcversion','hpkp','xdownloadoptions']
__all__ = ['annotations', 'csp', 'cors', 'clearsitedata', 'hsts', 'xcontenttypeoptions', 'xframeoptions',
'xxssprotection', 'featurepolicy', 'referrerpolicy', 'server', 'xpoweredby', 'xcsp', 'xwebkitcsp', 'xpcdp',
'xaspnetversion', 'xaspnetmvcversion', 'hpkp', 'xdownloadoptions']
clazzes = list(Util.inheritors(Header))
clazzes.extend(Util.inheritors(Directive))
clazzes.extend(Util.inheritors(Keyword))
Expand Down
4 changes: 0 additions & 4 deletions securityheaders/models/expectct/__init__.py

This file was deleted.

34 changes: 0 additions & 34 deletions securityheaders/models/expectct/expectct.py

This file was deleted.

29 changes: 0 additions & 29 deletions securityheaders/models/expectct/expectctdirective.py

This file was deleted.

26 changes: 13 additions & 13 deletions securityheaders/securityheader.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import ssl
import random
from functools import partial
from anytree import ContStyle, RenderTree
from anytree import ContStyle, RenderTree

try:
from multiprocessing import Pool, freeze_support
Expand Down Expand Up @@ -44,14 +44,14 @@ def _unpickle_method(func_name, obj, cls):
break
return func.__get__(obj, cls)

try:
try:
import copy_reg
except ModuleNotFoundError:
import copyreg as copy_reg

import types
copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)


class SecurityHeaders(object):
def __init__(self):
Expand All @@ -75,7 +75,7 @@ def get_options(self):

def get_all_checker_names(self):
return CheckerFactory().getnames()

def get_all_header_names(self):
return sorted(ModelFactory().getheadernames())

Expand Down Expand Up @@ -137,7 +137,7 @@ def check_headers_with_map(self, headermap, options=None):
options['checks'] = []
if not 'unwanted' in options.keys():
options['unwanted'] = []
checks = CheckerFactory().getactualcheckers(options['checks'])
checks = CheckerFactory().getactualcheckers(options['checks'])
unwanted = CheckerFactory().getactualcheckers(options['unwanted'])
options['checks'] = [e for e in checks if e not in unwanted]
options['unwanted'] = None
Expand All @@ -149,7 +149,7 @@ def check_headers_with_map(self, headermap, options=None):
for check in options[checker].keys():
if leaf not in options.keys():
options[leaf]=dict()
options[leaf][check] = options[checker][check]
options[leaf][check] = options[checker][check]
return HeaderEvaluator().evaluate(headermap,options)

def check_headers_parallel(self, urls, options=None, callback=None):
Expand All @@ -164,7 +164,7 @@ def check_headers_parallel(self, urls, options=None, callback=None):
result = pool.apply_async(self.check_headers, args=(url, options.get('redirects'), options), callback=callback)
results.append(result)
pool.close()
pool.join()
pool.join()
return results
else:
raise Exception('no parallelism supported')
Expand All @@ -175,7 +175,7 @@ def check_headers(self, url, follow_redirects = 3, options=None):

Args:
url (str): Target URL in format: scheme://hostname/path/to/file
follow_redirects (Optional[str]): How deep we follow the redirects,
follow_redirects (Optional[str]): How deep we follow the redirects,
value 0 disables redirects.
"""
if not options:
Expand All @@ -193,7 +193,7 @@ def check_headers(self, url, follow_redirects = 3, options=None):
else:
url = 'https://' + url.strip()

parsed = urlparse(url)
parsed = urlparse(url)
hostname = parsed.netloc
if not hostname:
return []
Expand Down Expand Up @@ -222,7 +222,7 @@ def check_headers(self, url, follow_redirects = 3, options=None):
else:
""" Unknown protocol scheme """
return {}

conn.request('GET', path, None, headers)
res = conn.getresponse()
headers = res.getheaders()
Expand All @@ -231,14 +231,14 @@ def check_headers(self, url, follow_redirects = 3, options=None):
if (res.status >= 300 and res.status < 400 and follow_redirects > 0):
for header in headers:
if (header[0] == 'location'):
return self.check_headers((urlid, header[1]), follow_redirects - 1, options)
return self.check_headers((urlid, header[1]), follow_redirects - 1, options)

""" Loop through headers and evaluate the risk """
result = self.check_headers_with_map(headers, options)
for finding in result:
finding.url = url
finding.urlid = urlid

return result
except Exception as e:
return [Finding(None, FindingType.ERROR, str(e), FindingSeverity.ERROR, None, None,url , urlid)]