Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit 93a25e7

Browse files
committed
Add redis
1 parent c0ff5b3 commit 93a25e7

File tree

5 files changed

+146
-9
lines changed

5 files changed

+146
-9
lines changed

mypy.ini

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,3 +75,6 @@ ignore_missing_imports = True
7575

7676
[mypy-jwt.*]
7777
ignore_missing_imports = True
78+
79+
[mypy-txredisapi]
80+
ignore_missing_imports = True

synapse/app/homeserver.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,8 @@
6060
from synapse.module_api import ModuleApi
6161
from synapse.python_dependencies import check_requirements
6262
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
63+
from synapse.replication.tcp.protocol import RedisFactory # noqa: F401
64+
from synapse.replication.tcp.resource import ReplicationStreamer # noqa: F401
6365
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
6466
from synapse.rest import ClientRestResource
6567
from synapse.rest.admin import AdminRestResource
@@ -282,6 +284,10 @@ def start_listening(self, listeners):
282284
)
283285
for s in services:
284286
reactor.addSystemEventTrigger("before", "shutdown", s.stopListening)
287+
288+
# factory = RedisFactory(self, ReplicationStreamer(self))
289+
# self.get_reactor().connectTCP("redis", 6379, factory)
290+
285291
elif listener["type"] == "metrics":
286292
if not self.get_config().enable_metrics:
287293
logger.warning(

synapse/python_dependencies.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,7 @@
7575
"Jinja2>=2.9",
7676
"bleach>=1.4.3",
7777
"typing-extensions>=3.7.4",
78+
"txredisapi",
7879
]
7980

8081
CONDITIONAL_REQUIREMENTS = {

synapse/replication/tcp/client.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
from twisted.internet.protocol import ReconnectingClientFactory
2323

2424
from synapse.replication.slave.storage._base import BaseSlavedStore
25-
from synapse.replication.tcp.protocol import (
25+
from synapse.replication.tcp.protocol import ( # RedisFactory,
2626
AbstractReplicationClientHandler,
2727
ClientReplicationStreamProtocol,
2828
)
@@ -107,11 +107,15 @@ def start_replication(self, hs):
107107
using TCP.
108108
"""
109109
client_name = hs.config.worker_name
110-
self.factory = ReplicationClientFactory(hs, client_name, self)
111110
host = hs.config.worker_replication_host
112111
port = hs.config.worker_replication_port
112+
113+
self.factory = ReplicationClientFactory(hs, client_name, self)
113114
hs.get_reactor().connectTCP(host, port, self.factory)
114115

116+
# self.factory = RedisFactory(hs, self)
117+
# hs.get_reactor().connectTCP("redis", 6379, self.factory)
118+
115119
def new_connection(self, connection):
116120
self.connection = connection
117121
if connection:
@@ -122,7 +126,7 @@ def new_connection(self, connection):
122126
def lost_connection(self, connection):
123127
self.connection = None
124128

125-
def on_user_sync(self, conn_id, user_id, is_syncing, last_sync_ms):
129+
async def on_user_sync(self, conn_id, user_id, is_syncing, last_sync_ms):
126130
pass
127131

128132
def federation_ack(self, token):
@@ -249,7 +253,7 @@ def finished_connecting(self):
249253
"""Called when we have successfully subscribed and caught up to all
250254
streams we're interested in.
251255
"""
252-
logger.info("Finished connecting to server")
256+
logger.debug("Finished connecting to server")
253257

254258
# We don't reset the delay any earlier as otherwise if there is a
255259
# problem during start up we'll end up tight looping connecting to the

synapse/replication/tcp/protocol.py

Lines changed: 128 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -55,11 +55,13 @@
5555

5656
from six import iteritems
5757

58+
import txredisapi as redis
5859
from prometheus_client import Counter
5960

6061
from twisted.protocols.basic import LineOnlyReceiver
6162
from twisted.python.failure import Failure
6263

64+
from synapse.logging.context import PreserveLoggingContext
6365
from synapse.metrics import LaterGauge
6466
from synapse.metrics.background_process_metrics import run_as_background_process
6567
from synapse.replication.tcp.commands import (
@@ -420,6 +422,8 @@ class CommandHandler:
420422
def __init__(self, hs, handler):
421423
self.handler = handler
422424

425+
self.is_master = hs.config.worker.worker_app is None
426+
423427
self.clock = hs.get_clock()
424428

425429
self.streams = {
@@ -458,11 +462,22 @@ def lost_connection(self, connection):
458462
self.handler.lost_connection(connection)
459463

460464
async def on_USER_SYNC(self, cmd: UserSyncCommand):
465+
if not self.connection:
466+
raise Exception("Not connected")
467+
461468
await self.handler.on_user_sync(
462469
self.connection.conn_id, cmd.user_id, cmd.is_syncing, cmd.last_sync_ms
463470
)
464471

465472
async def on_REPLICATE(self, cmd: ReplicateCommand):
473+
# We only want to announce positions by the writer of the streams.
474+
# Currently this is just the master process.
475+
if not self.is_master:
476+
return
477+
478+
if not self.connection:
479+
raise Exception("Not connected")
480+
466481
for stream_name, stream in self.streams.items():
467482
current_token = stream.current_token()
468483
self.connection.send_command(PositionCommand(stream_name, current_token))
@@ -483,15 +498,14 @@ async def on_SYNC(self, cmd: SyncCommand):
483498
self.handler.on_sync(cmd.data)
484499

485500
async def on_RDATA(self, cmd: RdataCommand):
501+
486502
stream_name = cmd.stream_name
487503
inbound_rdata_count.labels(stream_name).inc()
488504

489505
try:
490506
row = STREAMS_MAP[stream_name].parse_row(cmd.row)
491507
except Exception:
492-
logger.exception(
493-
"[%s] Failed to parse RDATA: %r %r", self.id(), stream_name, cmd.row
494-
)
508+
logger.exception("[%s] Failed to parse RDATA: %r", stream_name, cmd.row)
495509
raise
496510

497511
if cmd.token is None or stream_name in self.streams_connecting:
@@ -519,7 +533,7 @@ async def on_POSITION(self, cmd: PositionCommand):
519533
return
520534

521535
# Fetch all updates between then and now.
522-
limited = True
536+
limited = cmd.token != current_token
523537
while limited:
524538
updates, current_token, limited = await stream.get_updates_since(
525539
current_token, cmd.token
@@ -582,7 +596,7 @@ def lost_connection(self, connection):
582596
raise NotImplementedError()
583597

584598
@abc.abstractmethod
585-
def on_user_sync(
599+
async def on_user_sync(
586600
self, conn_id: str, user_id: str, is_syncing: bool, last_sync_ms: int
587601
):
588602
"""A client has started/stopped syncing on a worker.
@@ -794,3 +808,112 @@ def transport_kernel_read_buffer_size(protocol, read=True):
794808
inbound_rdata_count = Counter(
795809
"synapse_replication_tcp_protocol_inbound_rdata_count", "", ["stream_name"]
796810
)
811+
812+
813+
class RedisSubscriber(redis.SubscriberProtocol):
814+
def connectionMade(self):
815+
logger.info("MADE CONNECTION")
816+
self.subscribe(self.stream_name)
817+
self.send_command(ReplicateCommand("ALL"))
818+
819+
self.handler.new_connection(self)
820+
821+
def messageReceived(self, pattern, channel, message):
822+
if message.strip() == "":
823+
# Ignore blank lines
824+
return
825+
826+
line = message
827+
cmd_name, rest_of_line = line.split(" ", 1)
828+
829+
cmd_cls = COMMAND_MAP[cmd_name]
830+
try:
831+
cmd = cmd_cls.from_line(rest_of_line)
832+
except Exception as e:
833+
logger.exception(
834+
"[%s] failed to parse line %r: %r", self.id(), cmd_name, rest_of_line
835+
)
836+
self.send_error(
837+
"failed to parse line for %r: %r (%r):" % (cmd_name, e, rest_of_line)
838+
)
839+
return
840+
841+
# Now lets try and call on_<CMD_NAME> function
842+
run_as_background_process(
843+
"replication-" + cmd.get_logcontext_id(), self.handle_command, cmd
844+
)
845+
846+
async def handle_command(self, cmd: Command):
847+
"""Handle a command we have received over the replication stream.
848+
849+
By default delegates to on_<COMMAND>, which should return an awaitable.
850+
851+
Args:
852+
cmd: received command
853+
"""
854+
# First call any command handlers on this instance. These are for TCP
855+
# specific handling.
856+
cmd_func = getattr(self, "on_%s" % (cmd.NAME,), None)
857+
if cmd_func:
858+
await cmd_func(cmd)
859+
860+
# Then call out to the handler.
861+
cmd_func = getattr(self.handler, "on_%s" % (cmd.NAME,), None)
862+
if cmd_func:
863+
await cmd_func(cmd)
864+
865+
def connectionLost(self, reason):
866+
logger.info("LOST CONNECTION")
867+
self.handler.lost_connection(self)
868+
869+
def send_command(self, cmd):
870+
"""Send a command if connection has been established.
871+
872+
Args:
873+
cmd (Command)
874+
"""
875+
string = "%s %s" % (cmd.NAME, cmd.to_line())
876+
if "\n" in string:
877+
raise Exception("Unexpected newline in command: %r", string)
878+
879+
encoded_string = string.encode("utf-8")
880+
881+
async def _send():
882+
with PreserveLoggingContext():
883+
await self.redis_connection.publish(self.stream_name, encoded_string)
884+
885+
run_as_background_process("send-cmd", _send)
886+
887+
def stream_update(self, stream_name, token, data):
888+
"""Called when a new update is available to stream to clients.
889+
890+
We need to check if the client is interested in the stream or not
891+
"""
892+
self.send_command(RdataCommand(stream_name, token, data))
893+
894+
def send_sync(self, data):
895+
self.send_command(SyncCommand(data))
896+
897+
def send_remote_server_up(self, server: str):
898+
self.send_command(RemoteServerUpCommand(server))
899+
900+
901+
class RedisFactory(redis.SubscriberFactory):
902+
903+
maxDelay = 5
904+
continueTrying = True
905+
protocol = RedisSubscriber
906+
907+
def __init__(self, hs, handler):
908+
super(RedisFactory, self).__init__()
909+
910+
self.handler = CommandHandler(hs, handler)
911+
self.stream_name = hs.hostname
912+
913+
def buildProtocol(self, addr):
914+
p = super(RedisFactory, self).buildProtocol(addr)
915+
p.handler = self.handler
916+
p.redis_connection = redis.lazyConnection("redis")
917+
p.conn_id = random_string(5) # TODO: FIXME
918+
p.stream_name = self.stream_name
919+
return p

0 commit comments

Comments
 (0)