Skip to content

Rename URLMatcher -> URLPattern #1109

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 1, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions httpx/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
)
from ._utils import (
NetRCInfo,
URLMatcher,
URLPattern,
enforce_http_url,
get_environment_proxies,
get_logger,
Expand Down Expand Up @@ -474,9 +474,9 @@ def __init__(
trust_env=trust_env,
)
self._proxies: typing.Dict[
URLMatcher, typing.Optional[httpcore.SyncHTTPTransport]
URLPattern, typing.Optional[httpcore.SyncHTTPTransport]
] = {
URLMatcher(key): None
URLPattern(key): None
if proxy is None
else self._init_proxy_transport(
proxy,
Expand Down Expand Up @@ -547,8 +547,8 @@ def _transport_for_url(self, request: Request) -> httpcore.SyncHTTPTransport:
enforce_http_url(request)

if self._proxies and not should_not_be_proxied(url):
for matcher, transport in self._proxies.items():
if matcher.matches(url):
for pattern, transport in self._proxies.items():
if pattern.matches(url):
return self._transport if transport is None else transport

return self._transport
Expand Down Expand Up @@ -998,9 +998,9 @@ def __init__(
trust_env=trust_env,
)
self._proxies: typing.Dict[
URLMatcher, typing.Optional[httpcore.AsyncHTTPTransport]
URLPattern, typing.Optional[httpcore.AsyncHTTPTransport]
] = {
URLMatcher(key): None
URLPattern(key): None
if proxy is None
else self._init_proxy_transport(
proxy,
Expand Down Expand Up @@ -1071,8 +1071,8 @@ def _transport_for_url(self, request: Request) -> httpcore.AsyncHTTPTransport:
enforce_http_url(request)

if self._proxies and not should_not_be_proxied(url):
for matcher, transport in self._proxies.items():
if matcher.matches(url):
for pattern, transport in self._proxies.items():
if pattern.matches(url):
return self._transport if transport is None else transport

return self._transport
Expand Down
18 changes: 9 additions & 9 deletions httpx/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,24 +434,24 @@ def elapsed(self) -> timedelta:
return timedelta(seconds=self.end - self.start)


class URLMatcher:
class URLPattern:
"""
A utility class currently used for making lookups against proxy keys...

# Wildcard matching...
>>> pattern = URLMatcher("all")
>>> pattern = URLPattern("all")
>>> pattern.matches(httpx.URL("http://example.com"))
True

# Witch scheme matching...
>>> pattern = URLMatcher("https")
>>> pattern = URLPattern("https")
>>> pattern.matches(httpx.URL("https://example.com"))
True
>>> pattern.matches(httpx.URL("http://example.com"))
False

# With domain matching...
>>> pattern = URLMatcher("https://example.com")
>>> pattern = URLPattern("https://example.com")
>>> pattern.matches(httpx.URL("https://example.com"))
True
>>> pattern.matches(httpx.URL("http://example.com"))
Expand All @@ -460,7 +460,7 @@ class URLMatcher:
False

# Wildcard scheme, with domain matching...
>>> pattern = URLMatcher("all://example.com")
>>> pattern = URLPattern("all://example.com")
>>> pattern.matches(httpx.URL("https://example.com"))
True
>>> pattern.matches(httpx.URL("http://example.com"))
Expand All @@ -469,7 +469,7 @@ class URLMatcher:
False

# With port matching...
>>> pattern = URLMatcher("https://example.com:1234")
>>> pattern = URLPattern("https://example.com:1234")
>>> pattern.matches(httpx.URL("https://example.com:1234"))
True
>>> pattern.matches(httpx.URL("https://example.com"))
Expand Down Expand Up @@ -500,7 +500,7 @@ def matches(self, other: "URL") -> bool:
@property
def priority(self) -> tuple:
"""
The priority allows URLMatcher instances to be sortable, so that
The priority allows URLPattern instances to be sortable, so that
we can match from most specific to least specific.
"""
port_priority = -1 if self.port is not None else 0
Expand All @@ -511,11 +511,11 @@ def priority(self) -> tuple:
def __hash__(self) -> int:
return hash(self.pattern)

def __lt__(self, other: "URLMatcher") -> bool:
def __lt__(self, other: "URLPattern") -> bool:
return self.priority < other.priority

def __eq__(self, other: typing.Any) -> bool:
return isinstance(other, URLMatcher) and self.pattern == other.pattern
return isinstance(other, URLPattern) and self.pattern == other.pattern


def warn_deprecated(message: str) -> None: # pragma: nocover
Expand Down
8 changes: 4 additions & 4 deletions tests/client/test_proxies.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import pytest

import httpx
from httpx._utils import URLMatcher
from httpx._utils import URLPattern


def url_to_origin(url: str):
Expand Down Expand Up @@ -37,9 +37,9 @@ def test_proxies_parameter(proxies, expected_proxies):
client = httpx.AsyncClient(proxies=proxies)

for proxy_key, url in expected_proxies:
matcher = URLMatcher(proxy_key)
assert matcher in client._proxies
proxy = client._proxies[matcher]
pattern = URLPattern(proxy_key)
assert pattern in client._proxies
proxy = client._proxies[pattern]
assert isinstance(proxy, httpcore.AsyncHTTPProxy)
assert proxy.proxy_origin == url_to_origin(url)

Expand Down
24 changes: 12 additions & 12 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from httpx._utils import (
ElapsedTimer,
NetRCInfo,
URLMatcher,
URLPattern,
get_ca_bundle_from_env,
get_environment_proxies,
guess_json_utf,
Expand Down Expand Up @@ -331,21 +331,21 @@ def test_not_same_origin():
],
)
def test_url_matches(pattern, url, expected):
matcher = URLMatcher(pattern)
assert matcher.matches(httpx.URL(url)) == expected
pattern = URLPattern(pattern)
assert pattern.matches(httpx.URL(url)) == expected


def test_matcher_priority():
def test_pattern_priority():
matchers = [
URLMatcher("all://"),
URLMatcher("http://"),
URLMatcher("http://example.com"),
URLMatcher("http://example.com:123"),
URLPattern("all://"),
URLPattern("http://"),
URLPattern("http://example.com"),
URLPattern("http://example.com:123"),
]
random.shuffle(matchers)
assert sorted(matchers) == [
URLMatcher("http://example.com:123"),
URLMatcher("http://example.com"),
URLMatcher("http://"),
URLMatcher("all://"),
URLPattern("http://example.com:123"),
URLPattern("http://example.com"),
URLPattern("http://"),
URLPattern("all://"),
]