Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit 6b7ce1d

Browse files
authored
Remove some unused database functions. (#8085)
1 parent 894dae7 commit 6b7ce1d

File tree

7 files changed

+19
-244
lines changed

7 files changed

+19
-244
lines changed

changelog.d/8085.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Remove some unused database functions.

synapse/storage/databases/main/event_federation.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -257,11 +257,6 @@ def _get_auth_chain_difference_txn(
257257
# Return all events where not all sets can reach them.
258258
return {eid for eid, n in event_to_missing_sets.items() if n}
259259

260-
def get_oldest_events_in_room(self, room_id):
261-
return self.db_pool.runInteraction(
262-
"get_oldest_events_in_room", self._get_oldest_events_in_room_txn, room_id
263-
)
264-
265260
def get_oldest_events_with_depth_in_room(self, room_id):
266261
return self.db_pool.runInteraction(
267262
"get_oldest_events_with_depth_in_room",
@@ -303,14 +298,6 @@ async def get_max_depth_of(self, event_ids: List[str]) -> int:
303298
else:
304299
return max(row["depth"] for row in rows)
305300

306-
def _get_oldest_events_in_room_txn(self, txn, room_id):
307-
return self.db_pool.simple_select_onecol_txn(
308-
txn,
309-
table="event_backward_extremities",
310-
keyvalues={"room_id": room_id},
311-
retcol="event_id",
312-
)
313-
314301
def get_prev_events_for_room(self, room_id: str):
315302
"""
316303
Gets a subset of the current forward extremities in the given room.

synapse/storage/databases/main/events_worker.py

Lines changed: 1 addition & 169 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343
from synapse.storage.database import DatabasePool
4444
from synapse.storage.util.id_generators import StreamIdGenerator
4545
from synapse.types import get_domain_from_id
46-
from synapse.util.caches.descriptors import Cache, cached, cachedInlineCallbacks
46+
from synapse.util.caches.descriptors import Cache, cachedInlineCallbacks
4747
from synapse.util.iterutils import batch_iter
4848
from synapse.util.metrics import Measure
4949

@@ -137,42 +137,6 @@ def get_received_ts(self, event_id):
137137
desc="get_received_ts",
138138
)
139139

140-
def get_received_ts_by_stream_pos(self, stream_ordering):
141-
"""Given a stream ordering get an approximate timestamp of when it
142-
happened.
143-
144-
This is done by simply taking the received ts of the first event that
145-
has a stream ordering greater than or equal to the given stream pos.
146-
If none exists returns the current time, on the assumption that it must
147-
have happened recently.
148-
149-
Args:
150-
stream_ordering (int)
151-
152-
Returns:
153-
Deferred[int]
154-
"""
155-
156-
def _get_approximate_received_ts_txn(txn):
157-
sql = """
158-
SELECT received_ts FROM events
159-
WHERE stream_ordering >= ?
160-
LIMIT 1
161-
"""
162-
163-
txn.execute(sql, (stream_ordering,))
164-
row = txn.fetchone()
165-
if row and row[0]:
166-
ts = row[0]
167-
else:
168-
ts = self.clock.time_msec()
169-
170-
return ts
171-
172-
return self.db_pool.runInteraction(
173-
"get_approximate_received_ts", _get_approximate_received_ts_txn
174-
)
175-
176140
@defer.inlineCallbacks
177141
def get_event(
178142
self,
@@ -923,36 +887,6 @@ def have_seen_events_txn(txn, chunk):
923887
)
924888
return results
925889

926-
def _get_total_state_event_counts_txn(self, txn, room_id):
927-
"""
928-
See get_total_state_event_counts.
929-
"""
930-
# We join against the events table as that has an index on room_id
931-
sql = """
932-
SELECT COUNT(*) FROM state_events
933-
INNER JOIN events USING (room_id, event_id)
934-
WHERE room_id=?
935-
"""
936-
txn.execute(sql, (room_id,))
937-
row = txn.fetchone()
938-
return row[0] if row else 0
939-
940-
def get_total_state_event_counts(self, room_id):
941-
"""
942-
Gets the total number of state events in a room.
943-
944-
Args:
945-
room_id (str)
946-
947-
Returns:
948-
Deferred[int]
949-
"""
950-
return self.db_pool.runInteraction(
951-
"get_total_state_event_counts",
952-
self._get_total_state_event_counts_txn,
953-
room_id,
954-
)
955-
956890
def _get_current_state_event_counts_txn(self, txn, room_id):
957891
"""
958892
See get_current_state_event_counts.
@@ -1222,97 +1156,6 @@ def get_deltas_for_stream_id_txn(txn, stream_id):
12221156

12231157
return rows, to_token, True
12241158

1225-
@cached(num_args=5, max_entries=10)
1226-
def get_all_new_events(
1227-
self,
1228-
last_backfill_id,
1229-
last_forward_id,
1230-
current_backfill_id,
1231-
current_forward_id,
1232-
limit,
1233-
):
1234-
"""Get all the new events that have arrived at the server either as
1235-
new events or as backfilled events"""
1236-
have_backfill_events = last_backfill_id != current_backfill_id
1237-
have_forward_events = last_forward_id != current_forward_id
1238-
1239-
if not have_backfill_events and not have_forward_events:
1240-
return defer.succeed(AllNewEventsResult([], [], [], [], []))
1241-
1242-
def get_all_new_events_txn(txn):
1243-
sql = (
1244-
"SELECT e.stream_ordering, e.event_id, e.room_id, e.type,"
1245-
" state_key, redacts"
1246-
" FROM events AS e"
1247-
" LEFT JOIN redactions USING (event_id)"
1248-
" LEFT JOIN state_events USING (event_id)"
1249-
" WHERE ? < stream_ordering AND stream_ordering <= ?"
1250-
" ORDER BY stream_ordering ASC"
1251-
" LIMIT ?"
1252-
)
1253-
if have_forward_events:
1254-
txn.execute(sql, (last_forward_id, current_forward_id, limit))
1255-
new_forward_events = txn.fetchall()
1256-
1257-
if len(new_forward_events) == limit:
1258-
upper_bound = new_forward_events[-1][0]
1259-
else:
1260-
upper_bound = current_forward_id
1261-
1262-
sql = (
1263-
"SELECT event_stream_ordering, event_id, state_group"
1264-
" FROM ex_outlier_stream"
1265-
" WHERE ? > event_stream_ordering"
1266-
" AND event_stream_ordering >= ?"
1267-
" ORDER BY event_stream_ordering DESC"
1268-
)
1269-
txn.execute(sql, (last_forward_id, upper_bound))
1270-
forward_ex_outliers = txn.fetchall()
1271-
else:
1272-
new_forward_events = []
1273-
forward_ex_outliers = []
1274-
1275-
sql = (
1276-
"SELECT -e.stream_ordering, e.event_id, e.room_id, e.type,"
1277-
" state_key, redacts"
1278-
" FROM events AS e"
1279-
" LEFT JOIN redactions USING (event_id)"
1280-
" LEFT JOIN state_events USING (event_id)"
1281-
" WHERE ? > stream_ordering AND stream_ordering >= ?"
1282-
" ORDER BY stream_ordering DESC"
1283-
" LIMIT ?"
1284-
)
1285-
if have_backfill_events:
1286-
txn.execute(sql, (-last_backfill_id, -current_backfill_id, limit))
1287-
new_backfill_events = txn.fetchall()
1288-
1289-
if len(new_backfill_events) == limit:
1290-
upper_bound = new_backfill_events[-1][0]
1291-
else:
1292-
upper_bound = current_backfill_id
1293-
1294-
sql = (
1295-
"SELECT -event_stream_ordering, event_id, state_group"
1296-
" FROM ex_outlier_stream"
1297-
" WHERE ? > event_stream_ordering"
1298-
" AND event_stream_ordering >= ?"
1299-
" ORDER BY event_stream_ordering DESC"
1300-
)
1301-
txn.execute(sql, (-last_backfill_id, -upper_bound))
1302-
backward_ex_outliers = txn.fetchall()
1303-
else:
1304-
new_backfill_events = []
1305-
backward_ex_outliers = []
1306-
1307-
return AllNewEventsResult(
1308-
new_forward_events,
1309-
new_backfill_events,
1310-
forward_ex_outliers,
1311-
backward_ex_outliers,
1312-
)
1313-
1314-
return self.db_pool.runInteraction("get_all_new_events", get_all_new_events_txn)
1315-
13161159
async def is_event_after(self, event_id1, event_id2):
13171160
"""Returns True if event_id1 is after event_id2 in the stream
13181161
"""
@@ -1357,14 +1200,3 @@ def get_next_event_to_expire_txn(txn):
13571200
return self.db_pool.runInteraction(
13581201
desc="get_next_event_to_expire", func=get_next_event_to_expire_txn
13591202
)
1360-
1361-
1362-
AllNewEventsResult = namedtuple(
1363-
"AllNewEventsResult",
1364-
[
1365-
"new_forward_events",
1366-
"new_backfill_events",
1367-
"forward_ex_outliers",
1368-
"backward_ex_outliers",
1369-
],
1370-
)

synapse/storage/databases/main/presence.py

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -157,24 +157,3 @@ async def get_presence_for_users(self, user_ids):
157157

158158
def get_current_presence_token(self):
159159
return self._presence_id_gen.get_current_token()
160-
161-
def allow_presence_visible(self, observed_localpart, observer_userid):
162-
return self.db_pool.simple_insert(
163-
table="presence_allow_inbound",
164-
values={
165-
"observed_user_id": observed_localpart,
166-
"observer_user_id": observer_userid,
167-
},
168-
desc="allow_presence_visible",
169-
or_ignore=True,
170-
)
171-
172-
def disallow_presence_visible(self, observed_localpart, observer_userid):
173-
return self.db_pool.simple_delete_one(
174-
table="presence_allow_inbound",
175-
keyvalues={
176-
"observed_user_id": observed_localpart,
177-
"observer_user_id": observer_userid,
178-
},
179-
desc="disallow_presence_visible",
180-
)

synapse/storage/databases/main/registration.py

Lines changed: 0 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1345,43 +1345,6 @@ def validate_threepid_session_txn(txn):
13451345
"validate_threepid_session_txn", validate_threepid_session_txn
13461346
)
13471347

1348-
def upsert_threepid_validation_session(
1349-
self,
1350-
medium,
1351-
address,
1352-
client_secret,
1353-
send_attempt,
1354-
session_id,
1355-
validated_at=None,
1356-
):
1357-
"""Upsert a threepid validation session
1358-
Args:
1359-
medium (str): The medium of the 3PID
1360-
address (str): The address of the 3PID
1361-
client_secret (str): A unique string provided by the client to
1362-
help identify this validation attempt
1363-
send_attempt (int): The latest send_attempt on this session
1364-
session_id (str): The id of this validation session
1365-
validated_at (int|None): The unix timestamp in milliseconds of
1366-
when the session was marked as valid
1367-
"""
1368-
insertion_values = {
1369-
"medium": medium,
1370-
"address": address,
1371-
"client_secret": client_secret,
1372-
}
1373-
1374-
if validated_at:
1375-
insertion_values["validated_at"] = validated_at
1376-
1377-
return self.db_pool.simple_upsert(
1378-
table="threepid_validation_session",
1379-
keyvalues={"session_id": session_id},
1380-
values={"last_send_attempt": send_attempt},
1381-
insertion_values=insertion_values,
1382-
desc="upsert_threepid_validation_session",
1383-
)
1384-
13851348
def start_or_continue_validation_session(
13861349
self,
13871350
medium,

synapse/storage/databases/main/room.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,6 @@
3535
logger = logging.getLogger(__name__)
3636

3737

38-
OpsLevel = collections.namedtuple(
39-
"OpsLevel", ("ban_level", "kick_level", "redact_level")
40-
)
41-
4238
RatelimitOverride = collections.namedtuple(
4339
"RatelimitOverride", ("messages_per_second", "burst_count")
4440
)
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
/* Copyright 2020 The Matrix.org Foundation C.I.C.
2+
*
3+
* Licensed under the Apache License, Version 2.0 (the "License");
4+
* you may not use this file except in compliance with the License.
5+
* You may obtain a copy of the License at
6+
*
7+
* http://www.apache.org/licenses/LICENSE-2.0
8+
*
9+
* Unless required by applicable law or agreed to in writing, software
10+
* distributed under the License is distributed on an "AS IS" BASIS,
11+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
* See the License for the specific language governing permissions and
13+
* limitations under the License.
14+
*/
15+
16+
-- This table is no longer used.
17+
DROP TABLE IF EXISTS presence_allow_inbound;

0 commit comments

Comments
 (0)