Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

black, autopep, isort #45

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions https_everywhere/_chrome_preload_hsts.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import os.path

import requests

from logging_helper import setup_logging

from ._fetch import _storage_location
Expand Down Expand Up @@ -66,9 +65,7 @@ def _preload_including_subdomains(
includeSubdomains = entry.get("include_subdomains")

if not includeSubdomains:
logger.info(
"{}: Ignoring !include_subdomains entry: {!r}".format(name, entry)
)
logger.info("{}: Ignoring !include_subdomains entry: {!r}".format(name, entry))
continue

if remove_overlap and overlap_order_check:
Expand Down
3 changes: 1 addition & 2 deletions https_everywhere/_mozilla_preload_hsts.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import os.path

import requests

from logging_helper import setup_logging

from ._fetch import _storage_location
Expand Down Expand Up @@ -33,7 +32,7 @@ def _load_preload_data(filename):
negative = set()
lines = [line.strip() for line in f.readlines()]
start = lines.index("%%")
lines = lines[start + 1 :]
lines = lines[start + 1:]
end = lines.index("%%")
lines = lines[:end]
for line in lines:
Expand Down
54 changes: 16 additions & 38 deletions https_everywhere/_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,16 @@
from ._fetch import fetch_update
from ._fixme import (
# _FIXME_MULTIPLE_RULEST_PREFIXES,
_FIXME_REJECT_PATTERNS,
_FIXME_VERY_BAD_EXPANSION,
_FIXME_ODD_STARS,
_FIXME_LEADING_STAR_GLOBS,
_FIXME_BROKEN_REGEX_MATCHES,
_FIXME_LEADING_STAR_GLOBS,
_FIXME_ODD_STARS,
_FIXME_REJECT_PATTERNS,
_FIXME_SUBDOMAIN_PREFIXES,
_FIXME_VERY_BAD_EXPANSION,
)

try:
from ._unregex import expand_pattern, ExpansionError
from ._unregex import ExpansionError, expand_pattern
except ImportError:
expand_pattern, ExpansionError = None, None

Expand Down Expand Up @@ -190,8 +190,7 @@ def remove_www(self):
and self.pattern_hostname.replace("www.", "") == self.replacement_hostname
) or (
self.pattern_hostname.startswith(r"(?:www.)?")
and self.pattern_hostname.replace(r"(?:www.)?", "")
== self.replacement_hostname
and self.pattern_hostname.replace(r"(?:www.)?", "") == self.replacement_hostname
)

@property
Expand All @@ -214,9 +213,7 @@ def _is_rule_only_force_https(ruleset, rule):
]
)
if len(various_targets) == 1:
logger.debug(
"{} == {} == {}".format(rule.pattern, targets, rule.replacement)
)
logger.debug("{} == {} == {}".format(rule.pattern, targets, rule.replacement))
return True

logger.info("mismatch {}".format(sorted(various_targets)))
Expand Down Expand Up @@ -285,9 +282,7 @@ def _reduce_ruleset(ruleset):
last_rule = _Rule(*rules[-1], ruleset=ruleset)
if _is_rule_only_force_https(ruleset, last_rule):
logger.warning(
"{} last rule of {} rules reduced to simple force https".format(
ruleset, len(rules)
)
"{} last rule of {} rules reduced to simple force https".format(ruleset, len(rules))
)
ruleset._rules[-1] = FORCE_HTTPS_RULE_COMPILED

Expand All @@ -309,16 +304,12 @@ def _reduce_ruleset(ruleset):
except ExpansionError as e:
# TypeError occurs if sre_yield 0.2.0 was installed
logger.info(
"expansion failure in rule {} {}: {}".format(
ruleset.targets, ruleset.rules, e
)
"expansion failure in rule {} {}: {}".format(ruleset.targets, ruleset.rules, e)
)
return
except Exception as e: # pragma: no cover
logger.warning(
"unknown failure in rule {} {}: {}".format(
ruleset.targets, ruleset.rules, e
)
"unknown failure in rule {} {}: {}".format(ruleset.targets, ruleset.rules, e)
)
raise
assert rule.pattern_targets
Expand All @@ -328,9 +319,7 @@ def _reduce_ruleset(ruleset):

for pat in pattern_targets:
if pat.startswith(".") or pat.endswith("."):
logger.info(
'{} expands to invalid hostname "{}"'.format(rule.pattern, pat)
)
logger.info('{} expands to invalid hostname "{}"'.format(rule.pattern, pat))
continue

assert set(pat) - set("(|)") == set(pat)
Expand Down Expand Up @@ -425,10 +414,7 @@ def _reduce_ruleset(ruleset):
target = targets[0]

# None with pattern and replacement that are the same as target
assert (
target != rule.pattern_hostname
or target != rule.replacement_hostname
)
assert target != rule.pattern_hostname or target != rule.replacement_hostname

if target == rule.pattern_hostname:
# ~120 cases
Expand Down Expand Up @@ -594,9 +580,7 @@ def _reduce_rules(rulesets, check=False, simplify=False):
continue

parts = item.split(".")
assert not all(
part.isdigit() or part == "*" for part in parts
), orig_ruleset
assert not all(part.isdigit() or part == "*" for part in parts), orig_ruleset

# https://github.com/EFForg/https-everywhere/issues/18897
if "voxmedia.com" not in item:
Expand Down Expand Up @@ -656,9 +640,7 @@ def _reduce_rules(rulesets, check=False, simplify=False):
reduced_rules.append(rule)

if not reduced_rules:
logger.warning(
"Rejecting ruleset {} as it has no usable rules".format(name)
)
logger.warning("Rejecting ruleset {} as it has no usable rules".format(name))
continue

rules = reduced_rules
Expand Down Expand Up @@ -800,9 +782,7 @@ def _get_ruleset(hostname, rulesets=None):
parts = hostname.split(".")

if len(parts) > 5:
subdomain_rule = "*.{}.{}.{}.{}".format(
parts[-4], parts[-3], parts[-2], parts[-1]
)
subdomain_rule = "*.{}.{}.{}.{}".format(parts[-4], parts[-3], parts[-2], parts[-1])
ruleset = rulesets.get(subdomain_rule)
if ruleset:
return ruleset
Expand Down Expand Up @@ -856,9 +836,7 @@ def https_url_rewrite(url, rulesets=None):
new_url = rule[0].sub(rule[1], url)
except Exception as e: # pragma: no cover
logger.warning(
"failed during rule {} -> {} , input {}: {}".format(
rule[0], rule[1], url, e
)
"failed during rule {} -> {} , input {}: {}".format(rule[0], rule[1], url, e)
)
raise

Expand Down
16 changes: 5 additions & 11 deletions https_everywhere/_unregex.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
import sre_parse

from logging_helper import setup_logging

import sre_yield
import urllib3
from logging_helper import setup_logging
from urllib3.util.url import parse_url as urlparse

import sre_yield

from ._fixme import (
_FIXME_EXTRA_REPLACEMENTS,
_FIXME_SUBDOMAIN_PREFIXES,
_FIXME_SUBDOMAIN_SUFFIXES,
_FIXME_EXTRA_REPLACEMENTS,
)

logger = setup_logging()
Expand Down Expand Up @@ -69,9 +67,7 @@ def expand_pattern(pattern, max_count=100):
# TODO: build test case for this
pass

pattern = pattern_without_subdomain_prefix.replace("*.", "~~").replace(
".*", ",,"
)
pattern = pattern_without_subdomain_prefix.replace("*.", "~~").replace(".*", ",,")

for match, replacement in _FIXME_EXTRA_REPLACEMENTS:
pattern = pattern.replace(match, replacement)
Expand All @@ -85,9 +81,7 @@ def expand_pattern(pattern, max_count=100):
try:
rv = sre_yield.AllStrings(c, max_count=10, relaxed=True)[: max_count + 1]
except TypeError:
raise ExpansionError(
"sre_yield 0.2.0 installed; please install master for expansion"
)
raise ExpansionError("sre_yield 0.2.0 installed; please install master for expansion")

# https://github.com/google/sre_yield/issues/16
assert rv.__len__() <= max_count + 1, (rv.__len__(), max_count, pattern, c)
Expand Down
4 changes: 1 addition & 3 deletions https_everywhere/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@ def _check_in(domains, hostname):
return subdomain_rule

if len(parts) > 4:
subdomain_rule = "{}.{}.{}.{}".format(
parts[-4], parts[-3], parts[-2], parts[-1]
)
subdomain_rule = "{}.{}.{}.{}".format(parts[-4], parts[-3], parts[-2], parts[-1])
if subdomain_rule in domains:
return subdomain_rule
34 changes: 10 additions & 24 deletions https_everywhere/adapter.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
from __future__ import unicode_literals

from logging_helper import setup_logging

import urllib3
from urllib3.util.url import parse_url

import requests
import urllib3
from logging_helper import setup_logging
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.timeout import Timeout
from urllib3.util.url import parse_url

from ._rules import https_url_rewrite, _get_rulesets
from ._chrome_preload_hsts import _preload_including_subdomains
from ._mozilla_preload_hsts import _preload_remove_negative
from ._rules import _get_rulesets, https_url_rewrite
from ._util import _check_in

PY2 = str != "".__class__
Expand Down Expand Up @@ -93,9 +91,7 @@ def send(self, request, *args, **kwargs):
url = None
elif isinstance(rv, requests.Response):
logger.info(
"adapter responding to {} with {}: {!r}".format(
request.url, rv.url, rv.headers
)
"adapter responding to {} with {}: {!r}".format(request.url, rv.url, rv.headers)
)
rv.request = request
rv.url = request.url
Expand Down Expand Up @@ -258,9 +254,7 @@ def _follow_redirects_on_http(self, url):
previous_url = current_url
url = location
else:
raise RuntimeError(
"{} redirected to {}".format(current_url, location)
)
raise RuntimeError("{} redirected to {}".format(current_url, location))

def send(self, request, *args, **kwargs):
url = request.url
Expand All @@ -281,17 +275,13 @@ def send(self, request, *args, **kwargs):
if not isinstance(redirect, str):
# Following redirects may provide a redirect response object
# This was the modwsgi scenario
logger.info(
"upgrading {} to https with {}".format(url, redirect.url)
)
logger.info("upgrading {} to https with {}".format(url, redirect.url))
return redirect
elif redirect != url:
if redirect.startswith("http://"):
tail = url[7:]
else:
raise RuntimeError(
"Unexpectedly {} redirected to {}".format(url, redirect)
)
raise RuntimeError("Unexpectedly {} redirected to {}".format(url, redirect))
logger.info("upgrading {} to https".format(url))

response = self._generate_redirect("https://" + tail)
Expand Down Expand Up @@ -336,18 +326,14 @@ def send(self, request, *args, **kwargs):
url = request.url

if not url.startswith("https://"):
response = super(SafeUpgradeHTTPSAdapter, self).send(
request, *args, **kwargs
)
response = super(SafeUpgradeHTTPSAdapter, self).send(request, *args, **kwargs)
logger.debug("http response reason: {}".format(response.reason))
if response.reason != _REASON: # pragma: no cover
return response
request.url = response.headers["location"]

try:
response = super(SafeUpgradeHTTPSAdapter, self).send(
request, *args, **kwargs
)
response = super(SafeUpgradeHTTPSAdapter, self).send(request, *args, **kwargs)
redirect = response.headers.get("location")
if not redirect or redirect != url:
return response
Expand Down
2 changes: 1 addition & 1 deletion https_everywhere/session.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from requests import Session

from .adapter import HTTPSEverywhereAdapter, HTTPAdapter
from .adapter import HTTPAdapter, HTTPSEverywhereAdapter


class HTTPSEverywhereSession(Session):
Expand Down