Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Clean up newline quote marks around the codebase (#6362)
Browse files Browse the repository at this point in the history
* commit '3916e1b97':
  Clean up newline quote marks around the codebase (#6362)
  update macOS installation instructions
  • Loading branch information
anoadragon453 committed Mar 18, 2020
2 parents 2001e42 + 3916e1b commit 11f3cb0
Show file tree
Hide file tree
Showing 26 changed files with 52 additions and 47 deletions.
12 changes: 10 additions & 2 deletions INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,9 @@ sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
sudo yum groupinstall "Development Tools"
```

#### Mac OS X
#### macOS

Installing prerequisites on Mac OS X:
Installing prerequisites on macOS:

```
xcode-select --install
Expand All @@ -144,6 +144,14 @@ sudo pip install virtualenv
brew install pkg-config libffi
```

On macOS Catalina (10.15) you may need to explicitly install OpenSSL
via brew and inform `pip` about it so that `psycopg2` builds:

```
brew install openssl@1.1
export LDFLAGS=-L/usr/local/Cellar/openssl\@1.1/1.1.1d/lib/
```

#### OpenSUSE

Installing prerequisites on openSUSE:
Expand Down
1 change: 1 addition & 0 deletions changelog.d/6362.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Clean up some unnecessary quotation marks around the codebase.
2 changes: 1 addition & 1 deletion synapse/app/federation_sender.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def __init__(self, db_conn, hs):
self.federation_out_pos_startup = self._get_federation_out_pos(db_conn)

def _get_federation_out_pos(self, db_conn):
sql = "SELECT stream_id FROM federation_stream_position" " WHERE type = ?"
sql = "SELECT stream_id FROM federation_stream_position WHERE type = ?"
sql = self.database_engine.convert_param_style(sql)

txn = db_conn.cursor()
Expand Down
2 changes: 1 addition & 1 deletion synapse/appservice/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def _get():

if not _is_valid_3pe_metadata(info):
logger.warning(
"query_3pe_protocol to %s did not return a" " valid result", uri
"query_3pe_protocol to %s did not return a valid result", uri
)
return None

Expand Down
2 changes: 1 addition & 1 deletion synapse/config/appservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def _load_appservice(hostname, as_info, config_filename):
for regex_obj in as_info["namespaces"][ns]:
if not isinstance(regex_obj, dict):
raise ValueError(
"Expected namespace entry in %s to be an object," " but got %s",
"Expected namespace entry in %s to be an object, but got %s",
ns,
regex_obj,
)
Expand Down
2 changes: 1 addition & 1 deletion synapse/config/room_directory.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def __init__(self, option_name, rule):
self.action = action
else:
raise ConfigError(
"%s rules can only have action of 'allow'" " or 'deny'" % (option_name,)
"%s rules can only have action of 'allow' or 'deny'" % (option_name,)
)

self._alias_matches_all = alias == "*"
Expand Down
6 changes: 3 additions & 3 deletions synapse/config/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def read_config(self, config, **kwargs):
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in " "federation_ip_range_blacklist: %s" % e
"Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
)

if self.public_baseurl is not None:
Expand Down Expand Up @@ -992,14 +992,14 @@ def add_arguments(parser):
"--print-pidfile",
action="store_true",
default=None,
help="Print the path to the pidfile just" " before daemonizing",
help="Print the path to the pidfile just before daemonizing",
)
server_group.add_argument(
"--manhole",
metavar="PORT",
dest="manhole",
type=int,
help="Turn on the twisted telnet manhole" " service on the given port.",
help="Turn on the twisted telnet manhole service on the given port.",
)


Expand Down
4 changes: 2 additions & 2 deletions synapse/federation/persistence.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def have_responded(self, origin, transaction):
response code and response body.
"""
if not transaction.transaction_id:
raise RuntimeError("Cannot persist a transaction with no " "transaction_id")
raise RuntimeError("Cannot persist a transaction with no transaction_id")

return self.store.get_received_txn_response(transaction.transaction_id, origin)

Expand All @@ -56,7 +56,7 @@ def set_response(self, origin, transaction, code, response):
Deferred
"""
if not transaction.transaction_id:
raise RuntimeError("Cannot persist a transaction with no " "transaction_id")
raise RuntimeError("Cannot persist a transaction with no transaction_id")

return self.store.set_received_txn_response(
transaction.transaction_id, origin, code, response
Expand Down
4 changes: 2 additions & 2 deletions synapse/federation/sender/transaction_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def send_new_transaction(self, destination, pending_pdus, pending_edus):
txn_id = str(self._next_txn_id)

logger.debug(
"TX [%s] {%s} Attempting new transaction" " (pdus: %d, edus: %d)",
"TX [%s] {%s} Attempting new transaction (pdus: %d, edus: %d)",
destination,
txn_id,
len(pdus),
Expand All @@ -103,7 +103,7 @@ def send_new_transaction(self, destination, pending_pdus, pending_edus):
self._next_txn_id += 1

logger.info(
"TX [%s] {%s} Sending transaction [%s]," " (PDUs: %d, EDUs: %d)",
"TX [%s] {%s} Sending transaction [%s], (PDUs: %d, EDUs: %d)",
destination,
txn_id,
transaction.transaction_id,
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/directory.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def create_association(
if not service.is_interested_in_alias(room_alias.to_string()):
raise SynapseError(
400,
"This application service has not reserved" " this kind of alias.",
"This application service has not reserved this kind of alias.",
errcode=Codes.EXCLUSIVE,
)
else:
Expand Down
2 changes: 1 addition & 1 deletion synapse/http/servlet.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def parse_boolean_from_args(args, name, default=None, required=False):
return {b"true": True, b"false": False}[args[name][0]]
except Exception:
message = (
"Boolean query parameter %r must be one of" " ['true', 'false']"
"Boolean query parameter %r must be one of ['true', 'false']"
) % (name,)
raise SynapseError(400, message)
else:
Expand Down
5 changes: 2 additions & 3 deletions synapse/push/httppusher.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def _unsafe_process(self):
# fixed, we don't suddenly deliver a load
# of old notifications.
logger.warning(
"Giving up on a notification to user %s, " "pushkey %s",
"Giving up on a notification to user %s, pushkey %s",
self.user_id,
self.pushkey,
)
Expand Down Expand Up @@ -299,8 +299,7 @@ def _process_one(self, push_action):
# for sanity, we only remove the pushkey if it
# was the one we actually sent...
logger.warning(
("Ignoring rejected pushkey %s because we" " didn't send it"),
pk,
("Ignoring rejected pushkey %s because we didn't send it"), pk,
)
else:
logger.info("Pushkey %s was rejected: removing", pk)
Expand Down
4 changes: 2 additions & 2 deletions synapse/push/mailer.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@


MESSAGE_FROM_PERSON_IN_ROOM = (
"You have a message on %(app)s from %(person)s " "in the %(room)s room..."
"You have a message on %(app)s from %(person)s in the %(room)s room..."
)
MESSAGE_FROM_PERSON = "You have a message on %(app)s from %(person)s..."
MESSAGES_FROM_PERSON = "You have messages on %(app)s from %(person)s..."
Expand All @@ -55,7 +55,7 @@
"You have messages on %(app)s from %(person)s and others..."
)
INVITE_FROM_PERSON_TO_ROOM = (
"%(person)s has invited you to join the " "%(room)s room on %(app)s..."
"%(person)s has invited you to join the %(room)s room on %(app)s..."
)
INVITE_FROM_PERSON = "%(person)s has invited you to chat on %(app)s..."

Expand Down
2 changes: 1 addition & 1 deletion synapse/rest/media/v1/preview_url_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ async def _async_render_GET(self, request):
pattern = entry[attrib]
value = getattr(url_tuple, attrib)
logger.debug(
"Matching attrib '%s' with value '%s' against" " pattern '%s'",
"Matching attrib '%s' with value '%s' against pattern '%s'",
attrib,
value,
pattern,
Expand Down
2 changes: 1 addition & 1 deletion synapse/server_notices/consent_server_notices.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(self, hs):
)
if "body" not in self._server_notice_content:
raise ConfigError(
"user_consent server_notice_consent must contain a 'body' " "key."
"user_consent server_notice_consent must contain a 'body' key."
)

self._consent_uri_builder = ConsentURIBuilder(hs.config)
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -851,7 +851,7 @@ def _simple_upsert_txn_native_upsert(
allvalues.update(values)
latter = "UPDATE SET " + ", ".join(k + "=EXCLUDED." + k for k in values)

sql = ("INSERT INTO %s (%s) VALUES (%s) " "ON CONFLICT (%s) DO %s") % (
sql = ("INSERT INTO %s (%s) VALUES (%s) ON CONFLICT (%s) DO %s") % (
table,
", ".join(k for k in allvalues),
", ".join("?" for _ in allvalues),
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/data_stores/main/deviceinbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ def _add_messages_to_local_device_inbox_txn(
devices = list(messages_by_device.keys())
if len(devices) == 1 and devices[0] == "*":
# Handle wildcard device_ids.
sql = "SELECT device_id FROM devices" " WHERE user_id = ?"
sql = "SELECT device_id FROM devices WHERE user_id = ?"
txn.execute(sql, (user_id,))
message_json = json.dumps(messages_by_device["*"])
for row in txn:
Expand Down
6 changes: 3 additions & 3 deletions synapse/storage/data_stores/main/end_to_end_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,9 @@ def _get_e2e_device_keys_txn(
result.setdefault(user_id, {})[device_id] = None

# get signatures on the device
signature_sql = (
"SELECT * " " FROM e2e_cross_signing_signatures " " WHERE %s"
) % (" OR ".join("(" + q + ")" for q in signature_query_clauses))
signature_sql = ("SELECT * FROM e2e_cross_signing_signatures WHERE %s") % (
" OR ".join("(" + q + ")" for q in signature_query_clauses)
)

txn.execute(signature_sql, signature_query_params)
rows = self.cursor_to_dict(txn)
Expand Down
8 changes: 3 additions & 5 deletions synapse/storage/data_stores/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,9 +713,7 @@ def _update_outliers_txn(self, txn, events_and_contexts):

metadata_json = encode_json(event.internal_metadata.get_dict())

sql = (
"UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?"
)
sql = "UPDATE event_json SET internal_metadata = ? WHERE event_id = ?"
txn.execute(sql, (metadata_json, event.event_id))

# Add an entry to the ex_outlier_stream table to replicate the
Expand All @@ -732,7 +730,7 @@ def _update_outliers_txn(self, txn, events_and_contexts):
},
)

sql = "UPDATE events SET outlier = ?" " WHERE event_id = ?"
sql = "UPDATE events SET outlier = ? WHERE event_id = ?"
txn.execute(sql, (False, event.event_id))

# Update the event_backward_extremities table now that this
Expand Down Expand Up @@ -1482,7 +1480,7 @@ def _purge_history_txn(self, txn, room_id, token_str, delete_local_events):

# We do joins against events_to_purge for e.g. calculating state
# groups to purge, etc., so lets make an index.
txn.execute("CREATE INDEX events_to_purge_id" " ON events_to_purge(event_id)")
txn.execute("CREATE INDEX events_to_purge_id ON events_to_purge(event_id)")

txn.execute("SELECT event_id, should_delete FROM events_to_purge")
event_rows = txn.fetchall()
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/data_stores/main/filtering.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def _do_txn(txn):
if filter_id_response is not None:
return filter_id_response[0]

sql = "SELECT MAX(filter_id) FROM user_filters " "WHERE user_id = ?"
sql = "SELECT MAX(filter_id) FROM user_filters WHERE user_id = ?"
txn.execute(sql, (user_localpart,))
max_id = txn.fetchone()[0]
if max_id is None:
Expand Down
6 changes: 3 additions & 3 deletions synapse/storage/data_stores/main/media_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ def delete_url_cache(self, media_ids):
if len(media_ids) == 0:
return

sql = "DELETE FROM local_media_repository_url_cache" " WHERE media_id = ?"
sql = "DELETE FROM local_media_repository_url_cache WHERE media_id = ?"

def _delete_url_cache_txn(txn):
txn.executemany(sql, [(media_id,) for media_id in media_ids])
Expand Down Expand Up @@ -365,11 +365,11 @@ def delete_url_cache_media(self, media_ids):
return

def _delete_url_cache_media_txn(txn):
sql = "DELETE FROM local_media_repository" " WHERE media_id = ?"
sql = "DELETE FROM local_media_repository WHERE media_id = ?"

txn.executemany(sql, [(media_id,) for media_id in media_ids])

sql = "DELETE FROM local_media_repository_thumbnails" " WHERE media_id = ?"
sql = "DELETE FROM local_media_repository_thumbnails WHERE media_id = ?"

txn.executemany(sql, [(media_id,) for media_id in media_ids])

Expand Down
4 changes: 1 addition & 3 deletions synapse/storage/data_stores/main/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,9 +399,7 @@ def get_users_by_id_case_insensitive(self, user_id):
"""

def f(txn):
sql = (
"SELECT name, password_hash FROM users" " WHERE lower(name) = lower(?)"
)
sql = "SELECT name, password_hash FROM users WHERE lower(name) = lower(?)"
txn.execute(sql, (user_id,))
return dict(txn)

Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/data_stores/main/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -616,7 +616,7 @@ def get_max_topological_token(self, room_id, stream_key):

def _get_max_topological_txn(self, txn, room_id):
txn.execute(
"SELECT MAX(topological_ordering) FROM events" " WHERE room_id = ?",
"SELECT MAX(topological_ordering) FROM events WHERE room_id = ?",
(room_id,),
)

Expand Down
4 changes: 1 addition & 3 deletions synapse/storage/data_stores/main/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,7 @@ def get_all_updated_tags_txn(txn):
)

def get_tag_content(txn, tag_ids):
sql = (
"SELECT tag, content" " FROM room_tags" " WHERE user_id=? AND room_id=?"
)
sql = "SELECT tag, content FROM room_tags WHERE user_id=? AND room_id=?"
results = []
for stream_id, user_id, room_id in tag_ids:
txn.execute(sql, (user_id, room_id))
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/prepare_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,7 @@ def _apply_module_schema_files(cur, database_engine, modname, names_and_streams)
# Mark as done.
cur.execute(
database_engine.convert_param_style(
"INSERT INTO applied_module_schemas (module_name, file)" " VALUES (?,?)"
"INSERT INTO applied_module_schemas (module_name, file) VALUES (?,?)"
),
(modname, name),
)
Expand Down
9 changes: 6 additions & 3 deletions synapse/streams/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,12 @@ def from_request(cls, request, raise_invalid_params=True, default_limit=None):
raise SynapseError(400, "Invalid request.")

def __repr__(self):
return (
"PaginationConfig(from_tok=%r, to_tok=%r," " direction=%r, limit=%r)"
) % (self.from_token, self.to_token, self.direction, self.limit)
return ("PaginationConfig(from_tok=%r, to_tok=%r, direction=%r, limit=%r)") % (
self.from_token,
self.to_token,
self.direction,
self.limit,
)

def get_source_config(self, source_name):
keyname = "%s_key" % source_name
Expand Down

0 comments on commit 11f3cb0

Please sign in to comment.