Skip to content

Commit

Permalink
Use a common "master_host" test fixture
Browse files Browse the repository at this point in the history
  • Loading branch information
kristjanvalur committed May 4, 2023
1 parent 126e5fd commit ebf044b
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 64 deletions.
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ def mock_cluster_resp_slaves(request, **kwargs):
def master_host(request):
url = request.config.getoption("--redis-url")
parts = urlparse(url)
yield parts.hostname, parts.port
return parts.hostname, (parts.port or 6379)


@pytest.fixture()
Expand Down
8 changes: 0 additions & 8 deletions tests/test_asyncio/conftest.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import random
from contextlib import asynccontextmanager as _asynccontextmanager
from typing import Union
from urllib.parse import urlparse

import pytest
import pytest_asyncio
Expand Down Expand Up @@ -209,13 +208,6 @@ async def mock_cluster_resp_slaves(create_redis, **kwargs):
return _gen_cluster_mock_resp(r, response)


@pytest_asyncio.fixture(scope="session")
def master_host(request):
url = request.config.getoption("--redis-url")
parts = urlparse(url)
return parts.hostname


async def wait_for_command(
client: redis.Redis, monitor: Monitor, command: str, key: Union[str, None] = None
):
Expand Down
20 changes: 4 additions & 16 deletions tests/test_asyncio/test_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,18 +102,6 @@ async def pipe(
await writer.drain()


@pytest.fixture
def redis_addr(request):
redis_url = request.config.getoption("--redis-url")
scheme, netloc = urlparse(redis_url)[:2]
assert scheme == "redis"
if ":" in netloc:
host, port = netloc.split(":")
return host, int(port)
else:
return netloc, 6379


@pytest_asyncio.fixture()
async def slowlog(r: RedisCluster) -> None:
"""
Expand Down Expand Up @@ -874,15 +862,16 @@ async def test_default_node_is_replaced_after_exception(self, r):
# Rollback to the old default node
r.replace_default_node(curr_default_node)

async def test_address_remap(self, create_redis, redis_addr):
async def test_address_remap(self, create_redis, master_host):
"""Test that we can create a rediscluster object with
a host-port remapper and map connections through proxy objects
"""

# we remap the first n nodes
offset = 1000
n = 6
ports = [redis_addr[1] + i for i in range(n)]
hostname, master_port = master_host
ports = [master_port + i for i in range(n)]

def address_remap(address):
# remap first three nodes to our local proxy
Expand All @@ -895,8 +884,7 @@ def address_remap(address):

# create the proxies
proxies = [
NodeProxy(("127.0.0.1", port + offset), (redis_addr[0], port))
for port in ports
NodeProxy(("127.0.0.1", port + offset), (hostname, port)) for port in ports
]
await asyncio.gather(*[p.start() for p in proxies])
try:
Expand Down
33 changes: 12 additions & 21 deletions tests/test_asyncio/test_cwe_404.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import asyncio
import contextlib
import urllib.parse

import pytest

Expand All @@ -9,23 +8,14 @@
from redis.asyncio.connection import async_timeout


@pytest.fixture
def redis_addr(request):
redis_url = request.config.getoption("--redis-url")
scheme, netloc = urllib.parse.urlparse(redis_url)[:2]
assert scheme == "redis"
if ":" in netloc:
return netloc.split(":")
else:
return netloc, "6379"


class DelayProxy:
def __init__(self, addr, redis_addr, delay: float = 0.0):
self.addr = addr
self.redis_addr = redis_addr
self.delay = delay
self.send_event = asyncio.Event()
self.server = None
self.task = None

async def __aenter__(self):
await self.start()
Expand All @@ -42,7 +32,7 @@ async def start(self):
self.server = await asyncio.start_server(
self.handle, *self.addr, reuse_address=True
)
self.ROUTINE = asyncio.create_task(self.server.serve_forever())
self.task = asyncio.create_task(self.server.serve_forever())

@contextlib.contextmanager
def set_delay(self, delay: float = 0.0):
Expand Down Expand Up @@ -71,9 +61,9 @@ async def handle(self, reader, writer):

async def stop(self):
# clean up enough so that we can reuse the looper
self.ROUTINE.cancel()
self.task.cancel()
try:
await self.ROUTINE
await self.task
except asyncio.CancelledError:
pass
loop = self.server.get_loop()
Expand All @@ -100,11 +90,11 @@ async def pipe(

@pytest.mark.onlynoncluster
@pytest.mark.parametrize("delay", argvalues=[0.05, 0.5, 1, 2])
async def test_standalone(delay, redis_addr):
async def test_standalone(delay, master_host):

# create a tcp socket proxy that relays data to Redis and back,
# inserting 0.1 seconds of delay
async with DelayProxy(addr=("127.0.0.1", 5380), redis_addr=redis_addr) as dp:
async with DelayProxy(addr=("127.0.0.1", 5380), redis_addr=master_host) as dp:

for b in [True, False]:
# note that we connect to proxy, rather than to Redis directly
Expand Down Expand Up @@ -141,8 +131,8 @@ async def op(r):
@pytest.mark.xfail(reason="cancel does not cause disconnect")
@pytest.mark.onlynoncluster
@pytest.mark.parametrize("delay", argvalues=[0.05, 0.5, 1, 2])
async def test_standalone_pipeline(delay, redis_addr):
async with DelayProxy(addr=("127.0.0.1", 5380), redis_addr=redis_addr) as dp:
async def test_standalone_pipeline(delay, master_host):
async with DelayProxy(addr=("127.0.0.1", 5380), redis_addr=master_host) as dp:
for b in [True, False]:
async with Redis(
host="127.0.0.1", port=5380, single_connection_client=b
Expand Down Expand Up @@ -191,12 +181,13 @@ async def op(pipe):


@pytest.mark.onlycluster
async def test_cluster(request, redis_addr):
async def test_cluster(master_host):

delay = 0.1
cluster_port = 6372
remap_base = 7372
n_nodes = 6
hostname, _ = master_host

def remap(address):
host, port = address
Expand All @@ -206,7 +197,7 @@ def remap(address):
for i in range(n_nodes):
port = cluster_port + i
remapped = remap_base + i
forward_addr = redis_addr[0], port
forward_addr = hostname, port
proxy = DelayProxy(addr=("127.0.0.1", remapped), redis_addr=forward_addr)
proxies.append(proxy)

Expand Down
2 changes: 1 addition & 1 deletion tests/test_asyncio/test_sentinel.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

@pytest_asyncio.fixture(scope="module")
def master_ip(master_host):
yield socket.gethostbyname(master_host)
yield socket.gethostbyname(master_host[0])


class SentinelTestClient:
Expand Down
21 changes: 4 additions & 17 deletions tests/test_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from queue import LifoQueue, Queue
from time import sleep
from unittest.mock import DEFAULT, Mock, call, patch
from urllib.parse import urlparse

import pytest

Expand Down Expand Up @@ -125,18 +124,6 @@ def close(self):
self.server.shutdown()


@pytest.fixture
def redis_addr(request):
redis_url = request.config.getoption("--redis-url")
scheme, netloc = urlparse(redis_url)[:2]
assert scheme == "redis"
if ":" in netloc:
host, port = netloc.split(":")
return host, int(port)
else:
return netloc, 6379


@pytest.fixture()
def slowlog(request, r):
"""
Expand Down Expand Up @@ -907,15 +894,16 @@ def raise_connection_error():
assert "myself" not in nodes.get(curr_default_node.name).get("flags")
assert r.get_default_node() != curr_default_node

def test_address_remap(self, request, redis_addr):
def test_address_remap(self, request, master_host):
"""Test that we can create a rediscluster object with
a host-port remapper and map connections through proxy objects
"""

# we remap the first n nodes
offset = 1000
n = 6
ports = [redis_addr[1] + i for i in range(n)]
hostname, master_port = master_host
ports = [master_port + i for i in range(n)]

def address_remap(address):
# remap first three nodes to our local proxy
Expand All @@ -928,8 +916,7 @@ def address_remap(address):

# create the proxies
proxies = [
NodeProxy(("127.0.0.1", port + offset), (redis_addr[0], port))
for port in ports
NodeProxy(("127.0.0.1", port + offset), (hostname, port)) for port in ports
]
for p in proxies:
p.start()
Expand Down

0 comments on commit ebf044b

Please sign in to comment.