diff --git a/https_everywhere/_chrome_preload_hsts.py b/https_everywhere/_chrome_preload_hsts.py index 69438fe..2290f99 100644 --- a/https_everywhere/_chrome_preload_hsts.py +++ b/https_everywhere/_chrome_preload_hsts.py @@ -2,7 +2,6 @@ import os.path import requests - from logging_helper import setup_logging from ._fetch import _storage_location @@ -66,9 +65,7 @@ def _preload_including_subdomains( includeSubdomains = entry.get("include_subdomains") if not includeSubdomains: - logger.info( - "{}: Ignoring !include_subdomains entry: {!r}".format(name, entry) - ) + logger.info("{}: Ignoring !include_subdomains entry: {!r}".format(name, entry)) continue if remove_overlap and overlap_order_check: diff --git a/https_everywhere/_mozilla_preload_hsts.py b/https_everywhere/_mozilla_preload_hsts.py index 0945d53..cb3515c 100644 --- a/https_everywhere/_mozilla_preload_hsts.py +++ b/https_everywhere/_mozilla_preload_hsts.py @@ -1,7 +1,6 @@ import os.path import requests - from logging_helper import setup_logging from ._fetch import _storage_location @@ -33,7 +32,7 @@ def _load_preload_data(filename): negative = set() lines = [line.strip() for line in f.readlines()] start = lines.index("%%") - lines = lines[start + 1 :] + lines = lines[start + 1:] end = lines.index("%%") lines = lines[:end] for line in lines: diff --git a/https_everywhere/_rules.py b/https_everywhere/_rules.py index f5f33f3..d6e305f 100644 --- a/https_everywhere/_rules.py +++ b/https_everywhere/_rules.py @@ -10,16 +10,16 @@ from ._fetch import fetch_update from ._fixme import ( # _FIXME_MULTIPLE_RULEST_PREFIXES, - _FIXME_REJECT_PATTERNS, - _FIXME_VERY_BAD_EXPANSION, - _FIXME_ODD_STARS, - _FIXME_LEADING_STAR_GLOBS, _FIXME_BROKEN_REGEX_MATCHES, + _FIXME_LEADING_STAR_GLOBS, + _FIXME_ODD_STARS, + _FIXME_REJECT_PATTERNS, _FIXME_SUBDOMAIN_PREFIXES, + _FIXME_VERY_BAD_EXPANSION, ) try: - from ._unregex import expand_pattern, ExpansionError + from ._unregex import ExpansionError, expand_pattern except ImportError: expand_pattern, ExpansionError = None, None @@ -190,8 +190,7 @@ def remove_www(self): and self.pattern_hostname.replace("www.", "") == self.replacement_hostname ) or ( self.pattern_hostname.startswith(r"(?:www.)?") - and self.pattern_hostname.replace(r"(?:www.)?", "") - == self.replacement_hostname + and self.pattern_hostname.replace(r"(?:www.)?", "") == self.replacement_hostname ) @property @@ -214,9 +213,7 @@ def _is_rule_only_force_https(ruleset, rule): ] ) if len(various_targets) == 1: - logger.debug( - "{} == {} == {}".format(rule.pattern, targets, rule.replacement) - ) + logger.debug("{} == {} == {}".format(rule.pattern, targets, rule.replacement)) return True logger.info("mismatch {}".format(sorted(various_targets))) @@ -285,9 +282,7 @@ def _reduce_ruleset(ruleset): last_rule = _Rule(*rules[-1], ruleset=ruleset) if _is_rule_only_force_https(ruleset, last_rule): logger.warning( - "{} last rule of {} rules reduced to simple force https".format( - ruleset, len(rules) - ) + "{} last rule of {} rules reduced to simple force https".format(ruleset, len(rules)) ) ruleset._rules[-1] = FORCE_HTTPS_RULE_COMPILED @@ -309,16 +304,12 @@ def _reduce_ruleset(ruleset): except ExpansionError as e: # TypeError occurs if sre_yield 0.2.0 was installed logger.info( - "expansion failure in rule {} {}: {}".format( - ruleset.targets, ruleset.rules, e - ) + "expansion failure in rule {} {}: {}".format(ruleset.targets, ruleset.rules, e) ) return except Exception as e: # pragma: no cover logger.warning( - "unknown failure in rule {} {}: {}".format( - ruleset.targets, ruleset.rules, e - ) + "unknown failure in rule {} {}: {}".format(ruleset.targets, ruleset.rules, e) ) raise assert rule.pattern_targets @@ -328,9 +319,7 @@ def _reduce_ruleset(ruleset): for pat in pattern_targets: if pat.startswith(".") or pat.endswith("."): - logger.info( - '{} expands to invalid hostname "{}"'.format(rule.pattern, pat) - ) + logger.info('{} expands to invalid hostname "{}"'.format(rule.pattern, pat)) continue assert set(pat) - set("(|)") == set(pat) @@ -425,10 +414,7 @@ def _reduce_ruleset(ruleset): target = targets[0] # None with pattern and replacement that are the same as target - assert ( - target != rule.pattern_hostname - or target != rule.replacement_hostname - ) + assert target != rule.pattern_hostname or target != rule.replacement_hostname if target == rule.pattern_hostname: # ~120 cases @@ -594,9 +580,7 @@ def _reduce_rules(rulesets, check=False, simplify=False): continue parts = item.split(".") - assert not all( - part.isdigit() or part == "*" for part in parts - ), orig_ruleset + assert not all(part.isdigit() or part == "*" for part in parts), orig_ruleset # https://github.com/EFForg/https-everywhere/issues/18897 if "voxmedia.com" not in item: @@ -656,9 +640,7 @@ def _reduce_rules(rulesets, check=False, simplify=False): reduced_rules.append(rule) if not reduced_rules: - logger.warning( - "Rejecting ruleset {} as it has no usable rules".format(name) - ) + logger.warning("Rejecting ruleset {} as it has no usable rules".format(name)) continue rules = reduced_rules @@ -800,9 +782,7 @@ def _get_ruleset(hostname, rulesets=None): parts = hostname.split(".") if len(parts) > 5: - subdomain_rule = "*.{}.{}.{}.{}".format( - parts[-4], parts[-3], parts[-2], parts[-1] - ) + subdomain_rule = "*.{}.{}.{}.{}".format(parts[-4], parts[-3], parts[-2], parts[-1]) ruleset = rulesets.get(subdomain_rule) if ruleset: return ruleset @@ -856,9 +836,7 @@ def https_url_rewrite(url, rulesets=None): new_url = rule[0].sub(rule[1], url) except Exception as e: # pragma: no cover logger.warning( - "failed during rule {} -> {} , input {}: {}".format( - rule[0], rule[1], url, e - ) + "failed during rule {} -> {} , input {}: {}".format(rule[0], rule[1], url, e) ) raise diff --git a/https_everywhere/_unregex.py b/https_everywhere/_unregex.py index c69e141..faafb4a 100644 --- a/https_everywhere/_unregex.py +++ b/https_everywhere/_unregex.py @@ -1,16 +1,14 @@ import sre_parse -from logging_helper import setup_logging - +import sre_yield import urllib3 +from logging_helper import setup_logging from urllib3.util.url import parse_url as urlparse -import sre_yield - from ._fixme import ( + _FIXME_EXTRA_REPLACEMENTS, _FIXME_SUBDOMAIN_PREFIXES, _FIXME_SUBDOMAIN_SUFFIXES, - _FIXME_EXTRA_REPLACEMENTS, ) logger = setup_logging() @@ -69,9 +67,7 @@ def expand_pattern(pattern, max_count=100): # TODO: build test case for this pass - pattern = pattern_without_subdomain_prefix.replace("*.", "~~").replace( - ".*", ",," - ) + pattern = pattern_without_subdomain_prefix.replace("*.", "~~").replace(".*", ",,") for match, replacement in _FIXME_EXTRA_REPLACEMENTS: pattern = pattern.replace(match, replacement) @@ -85,9 +81,7 @@ def expand_pattern(pattern, max_count=100): try: rv = sre_yield.AllStrings(c, max_count=10, relaxed=True)[: max_count + 1] except TypeError: - raise ExpansionError( - "sre_yield 0.2.0 installed; please install master for expansion" - ) + raise ExpansionError("sre_yield 0.2.0 installed; please install master for expansion") # https://github.com/google/sre_yield/issues/16 assert rv.__len__() <= max_count + 1, (rv.__len__(), max_count, pattern, c) diff --git a/https_everywhere/_util.py b/https_everywhere/_util.py index 23b4705..aad381d 100644 --- a/https_everywhere/_util.py +++ b/https_everywhere/_util.py @@ -32,8 +32,6 @@ def _check_in(domains, hostname): return subdomain_rule if len(parts) > 4: - subdomain_rule = "{}.{}.{}.{}".format( - parts[-4], parts[-3], parts[-2], parts[-1] - ) + subdomain_rule = "{}.{}.{}.{}".format(parts[-4], parts[-3], parts[-2], parts[-1]) if subdomain_rule in domains: return subdomain_rule diff --git a/https_everywhere/adapter.py b/https_everywhere/adapter.py index 4d16a5c..b0fbefe 100644 --- a/https_everywhere/adapter.py +++ b/https_everywhere/adapter.py @@ -1,17 +1,15 @@ from __future__ import unicode_literals -from logging_helper import setup_logging - -import urllib3 -from urllib3.util.url import parse_url - import requests +import urllib3 +from logging_helper import setup_logging from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.timeout import Timeout +from urllib3.util.url import parse_url -from ._rules import https_url_rewrite, _get_rulesets from ._chrome_preload_hsts import _preload_including_subdomains from ._mozilla_preload_hsts import _preload_remove_negative +from ._rules import _get_rulesets, https_url_rewrite from ._util import _check_in PY2 = str != "".__class__ @@ -93,9 +91,7 @@ def send(self, request, *args, **kwargs): url = None elif isinstance(rv, requests.Response): logger.info( - "adapter responding to {} with {}: {!r}".format( - request.url, rv.url, rv.headers - ) + "adapter responding to {} with {}: {!r}".format(request.url, rv.url, rv.headers) ) rv.request = request rv.url = request.url @@ -258,9 +254,7 @@ def _follow_redirects_on_http(self, url): previous_url = current_url url = location else: - raise RuntimeError( - "{} redirected to {}".format(current_url, location) - ) + raise RuntimeError("{} redirected to {}".format(current_url, location)) def send(self, request, *args, **kwargs): url = request.url @@ -281,17 +275,13 @@ def send(self, request, *args, **kwargs): if not isinstance(redirect, str): # Following redirects may provide a redirect response object # This was the modwsgi scenario - logger.info( - "upgrading {} to https with {}".format(url, redirect.url) - ) + logger.info("upgrading {} to https with {}".format(url, redirect.url)) return redirect elif redirect != url: if redirect.startswith("http://"): tail = url[7:] else: - raise RuntimeError( - "Unexpectedly {} redirected to {}".format(url, redirect) - ) + raise RuntimeError("Unexpectedly {} redirected to {}".format(url, redirect)) logger.info("upgrading {} to https".format(url)) response = self._generate_redirect("https://" + tail) @@ -336,18 +326,14 @@ def send(self, request, *args, **kwargs): url = request.url if not url.startswith("https://"): - response = super(SafeUpgradeHTTPSAdapter, self).send( - request, *args, **kwargs - ) + response = super(SafeUpgradeHTTPSAdapter, self).send(request, *args, **kwargs) logger.debug("http response reason: {}".format(response.reason)) if response.reason != _REASON: # pragma: no cover return response request.url = response.headers["location"] try: - response = super(SafeUpgradeHTTPSAdapter, self).send( - request, *args, **kwargs - ) + response = super(SafeUpgradeHTTPSAdapter, self).send(request, *args, **kwargs) redirect = response.headers.get("location") if not redirect or redirect != url: return response diff --git a/https_everywhere/session.py b/https_everywhere/session.py index fb22dae..8abfde2 100644 --- a/https_everywhere/session.py +++ b/https_everywhere/session.py @@ -1,6 +1,6 @@ from requests import Session -from .adapter import HTTPSEverywhereAdapter, HTTPAdapter +from .adapter import HTTPAdapter, HTTPSEverywhereAdapter class HTTPSEverywhereSession(Session):