Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.
Closed

Master #15881

Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
5cfdae4
Fixing SQL Query
hi-anshul May 3, 2023
cd4359a
Inline SQL queries using boolean parameters #15515
hi-anshul May 3, 2023
3857478
Update purge_events.py
hi-anshul May 3, 2023
2ea3b58
Merge branch 'develop' into develop
hi-anshul May 3, 2023
44aedd8
Merge branch 'matrix-org:develop' into develop
hi-anshul May 22, 2023
7d60307
Updated all Inline SQL queries using boolean parameters as supported …
hi-anshul May 22, 2023
36c4012
Updated all Inline SQL queries using boolean parameters as supported …
hi-anshul May 22, 2023
e5c5d35
Merge branch 'develop' into develop
hi-anshul May 22, 2023
07e90d4
Merge branch 'matrix-org:develop' into develop
hi-anshul Jun 16, 2023
c29c05f
Merge branch 'matrix-org:develop' into develop
hi-anshul Jun 19, 2023
be3fbc1
Merge branch 'matrix-org:develop' into develop
hi-anshul Jun 19, 2023
8737a24
Updated Inline SQL queries using boolean parameters as supported in S…
hi-anshul Jun 19, 2023
24f4de0
Merge branch 'develop' into develop
hi-anshul Jun 20, 2023
7eb4f91
Update 15515.misc
hi-anshul Jun 21, 2023
e5a668d
Merge branch 'matrix-org:develop' into develop
hi-anshul Jun 21, 2023
aa97aa2
Merge branch 'develop' into develop
hi-anshul Jun 21, 2023
f733e25
Update event_federation.py
hi-anshul Jun 23, 2023
7ae7fef
Update stream.py
hi-anshul Jun 23, 2023
b33a4d2
Merge branch 'matrix-org:develop' into develop
hi-anshul Jun 23, 2023
9345361
Bump authlib from 1.2.0 to 1.2.1 (#15864)
dependabot[bot] Jul 3, 2023
aea94ca
Bump importlib-metadata from 6.6.0 to 6.7.0 (#15865)
dependabot[bot] Jul 3, 2023
411ba44
Bump types-pyopenssl from 23.2.0.0 to 23.2.0.1 (#15866)
dependabot[bot] Jul 3, 2023
a587de9
Bump sentry-sdk from 1.25.1 to 1.26.0 (#15867)
dependabot[bot] Jul 3, 2023
53aa26e
Add a timeout that aborts any Postgres statement taking more than 1 h…
reivilibre Jul 3, 2023
cd8b73a
Fix the `devenv up` configuration which was ignoring the config overr…
reivilibre Jul 3, 2023
07d7cbf
devices: use combined ANY clause for faster cleanup (#15861)
pacien Jul 3, 2023
670d590
Pin `pydantic` to <2.0.0 (#15862)
PaarthShah Jul 4, 2023
861752b
Merge branch 'master' into develop
reivilibre Jul 4, 2023
c8e8189
Add not_user_type param to the list accounts admin API (#15844)
weeman1337 Jul 4, 2023
152e015
Merge branch 'matrix-org:develop' into develop
hi-anshul Jul 5, 2023
6a636ad
Updated Inline SQL queries using boolean parameters as supported in S…
hi-anshul Jul 5, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ __pycache__/
/logs
/media_store/
/uploads
/homeserver-config-overrides.d

# For direnv users
/.envrc
Expand Down
1 change: 1 addition & 0 deletions changelog.d/15515.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Updated Inline SQL queries using boolean parameters as supported in SQLite 3.27.
1 change: 1 addition & 0 deletions changelog.d/15844.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add `not_user_type` param to the list accounts admin API.
1 change: 1 addition & 0 deletions changelog.d/15853.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add a timeout that aborts any Postgres statement taking more than 1 hour.
1 change: 1 addition & 0 deletions changelog.d/15854.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix the `devenv up` configuration which was ignoring the config overrides.
1 change: 1 addition & 0 deletions changelog.d/15861.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Optimised cleanup of old entries in device_lists_stream.
3 changes: 3 additions & 0 deletions changelog.d/15862.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Pin `pydantic` to ^=1.7.4 to avoid backwards-incompatible API changes from the 2.0.0 release.
Resolves https://github.com/matrix-org/synapse/issues/15858.
Contributed by @PaarthShah.
3 changes: 3 additions & 0 deletions docs/admin_api/user_admin_api.md
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,9 @@ The following parameters should be set in the URL:

- `dir` - Direction of media order. Either `f` for forwards or `b` for backwards.
Setting this value to `b` will reverse the above sort order. Defaults to `f`.
- `not_user_type` - Exclude certain user types, such as bot users, from the request.
Can be provided multiple times. Possible values are `bot`, `support` or "empty string".
"empty string" here means to exclude users without a type.

Caution. The database only has indexes on the columns `name` and `creation_ts`.
This means that if a different sort order is used (`is_guest`, `admin`,
Expand Down
2 changes: 1 addition & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@
EOF
'';
# Start synapse when `devenv up` is run.
processes.synapse.exec = "poetry run python -m synapse.app.homeserver -c homeserver.yaml --config-directory homeserver-config-overrides.d";
processes.synapse.exec = "poetry run python -m synapse.app.homeserver -c homeserver.yaml -c homeserver-config-overrides.d";

# Define the perl modules we require to run SyTest.
#
Expand Down
26 changes: 13 additions & 13 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 9 additions & 0 deletions synapse/rest/admin/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
parse_integer,
parse_json_object_from_request,
parse_string,
parse_strings_from_args,
)
from synapse.http.site import SynapseRequest
from synapse.rest.admin._base import (
Expand Down Expand Up @@ -64,6 +65,9 @@ class UsersRestServletV2(RestServlet):
The parameter `guests` can be used to exclude guest users.
The parameter `deactivated` can be used to include deactivated users.
The parameter `order_by` can be used to order the result.
The parameter `not_user_type` can be used to exclude certain user types.
Possible values are `bot`, `support` or "empty string".
"empty string" here means to exclude users without a type.
"""

def __init__(self, hs: "HomeServer"):
Expand Down Expand Up @@ -131,6 +135,10 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:

direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS)

# twisted.web.server.Request.args is incorrectly defined as Optional[Any]
args: Dict[bytes, List[bytes]] = request.args # type: ignore
not_user_types = parse_strings_from_args(args, "not_user_type")

users, total = await self.store.get_users_paginate(
start,
limit,
Expand All @@ -141,6 +149,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
order_by,
direction,
approved,
not_user_types,
)

# If support for MSC3866 is not enabled, don't show the approval flag.
Expand Down
37 changes: 37 additions & 0 deletions synapse/storage/databases/main/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

from synapse.api.constants import Direction
from synapse.config.homeserver import HomeServerConfig
from synapse.storage._base import make_in_list_sql_clause
from synapse.storage.database import (
DatabasePool,
LoggingDatabaseConnection,
Expand Down Expand Up @@ -170,6 +171,7 @@ async def get_users_paginate(
order_by: str = UserSortOrder.NAME.value,
direction: Direction = Direction.FORWARDS,
approved: bool = True,
not_user_types: Optional[List[str]] = None,
) -> Tuple[List[JsonDict], int]:
"""Function to retrieve a paginated list of users from
users list. This will return a json list of users and the
Expand All @@ -185,6 +187,7 @@ async def get_users_paginate(
order_by: the sort order of the returned list
direction: sort ascending or descending
approved: whether to include approved users
not_user_types: list of user types to exclude
Returns:
A tuple of a list of mappings from user to information and a count of total users.
"""
Expand Down Expand Up @@ -222,6 +225,40 @@ def get_users_paginate_txn(
# be already existing users that we consider as already approved.
filters.append("approved IS FALSE")

if not_user_types:
if len(not_user_types) == 1 and not_user_types[0] == "":
# Only exclude NULL type users
filters.append("user_type IS NOT NULL")
else:
not_user_types_has_empty = False
not_user_types_without_empty = []

for not_user_type in not_user_types:
if not_user_type == "":
not_user_types_has_empty = True
else:
not_user_types_without_empty.append(not_user_type)

not_user_type_clause, not_user_type_args = make_in_list_sql_clause(
self.database_engine,
"u.user_type",
not_user_types_without_empty,
)

if not_user_types_has_empty:
# NULL values should be excluded.
# They evaluate to false > nothing to do here.
filters.append("NOT %s" % (not_user_type_clause))
else:
# NULL values should *not* be excluded.
# Add a special predicate to the query.
filters.append(
"(NOT %s OR %s IS NULL)"
% (not_user_type_clause, "u.user_type")
)

args.extend(not_user_type_args)

where_clause = "WHERE " + " AND ".join(filters) if len(filters) > 0 else ""

sql_base = f"""
Expand Down
14 changes: 9 additions & 5 deletions synapse/storage/databases/main/devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -1950,12 +1950,16 @@ def _add_device_change_to_stream_txn(

# Delete older entries in the table, as we really only care about
# when the latest change happened.
txn.execute_batch(
"""
cleanup_obsolete_stmt = """
DELETE FROM device_lists_stream
WHERE user_id = ? AND device_id = ? AND stream_id < ?
""",
[(user_id, device_id, min_stream_id) for device_id in device_ids],
WHERE user_id = ? AND stream_id < ? AND %s
"""
device_ids_clause, device_ids_args = make_in_list_sql_clause(
txn.database_engine, "device_id", device_ids
)
txn.execute(
cleanup_obsolete_stmt % (device_ids_clause,),
[user_id, min_stream_id] + device_ids_args,
)

self.db_pool.simple_insert_many_txn(
Expand Down
3 changes: 1 addition & 2 deletions synapse/storage/databases/main/event_federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -831,7 +831,7 @@ def get_backfill_points_in_room_txn(
* because the schema change is in a background update, it's not
* necessarily safe to assume that it will have been completed.
*/
AND edge.is_state is ? /* False */
AND edge.is_state is FALSE
/**
* We only want backwards extremities that are older than or at
* the same position of the given `current_depth` (where older
Expand Down Expand Up @@ -874,7 +874,6 @@ def get_backfill_points_in_room_txn(
sql,
(
room_id,
False,
current_depth,
self._clock.time_msec(),
BACKFILL_EVENT_EXPONENTIAL_BACKOFF_MAXIMUM_DOUBLING_STEPS,
Expand Down
12 changes: 6 additions & 6 deletions synapse/storage/databases/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -1455,8 +1455,8 @@ def _update_outliers_txn(
},
)

sql = "UPDATE events SET outlier = ? WHERE event_id = ?"
txn.execute(sql, (False, event.event_id))
sql = "UPDATE events SET outlier = FALSE WHERE event_id = ?"
txn.execute(sql, (event.event_id))

# Update the event_backward_extremities table now that this
# event isn't an outlier any more.
Expand Down Expand Up @@ -1549,13 +1549,13 @@ def event_dict(event: EventBase) -> JsonDict:
for event, _ in events_and_contexts
if not event.internal_metadata.is_redacted()
]
sql = "UPDATE redactions SET have_censored = ? WHERE "
sql = "UPDATE redactions SET have_censored = FALSE WHERE "
clause, args = make_in_list_sql_clause(
self.database_engine,
"redacts",
unredacted_events,
)
txn.execute(sql + clause, [False] + args)
txn.execute(sql + clause, args)

self.db_pool.simple_insert_many_txn(
txn,
Expand Down Expand Up @@ -2318,14 +2318,14 @@ def _update_backward_extremeties(
" SELECT 1 FROM events"
" LEFT JOIN event_edges edge"
" ON edge.event_id = events.event_id"
" WHERE events.event_id = ? AND events.room_id = ? AND (events.outlier = ? OR edge.event_id IS NULL)"
" WHERE events.event_id = ? AND events.room_id = ? AND (events.outlier = FALSE OR edge.event_id IS NULL)"
" )"
)

txn.execute_batch(
query,
[
(e_id, ev.room_id, e_id, ev.room_id, e_id, ev.room_id, False)
(e_id, ev.room_id, e_id, ev.room_id, e_id, ev.room_id)
for ev in events
for e_id in ev.prev_event_ids()
if not ev.internal_metadata.is_outlier()
Expand Down
11 changes: 5 additions & 6 deletions synapse/storage/databases/main/purge_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,12 +249,11 @@ def _purge_history_txn(
# Mark all state and own events as outliers
logger.info("[purge] marking remaining events as outliers")
txn.execute(
"UPDATE events SET outlier = ?"
" WHERE event_id IN ("
" SELECT event_id FROM events_to_purge "
" WHERE NOT should_delete"
")",
(True,),
"UPDATE events SET outlier = TRUE"
"WHERE event_id IN ("
" SELECT event_id FROM events_to_purge "
" WHERE NOT should_delete"
")"
)

# synapse tries to take out an exclusive lock on room_depth whenever it
Expand Down
6 changes: 3 additions & 3 deletions synapse/storage/databases/main/push_rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,19 +560,19 @@ def _upsert_push_rule_txn(
if isinstance(self.database_engine, PostgresEngine):
sql = """
INSERT INTO push_rules_enable (id, user_name, rule_id, enabled)
VALUES (?, ?, ?, ?)
VALUES (?, ?, ?, TRUE)
ON CONFLICT DO NOTHING
"""
elif isinstance(self.database_engine, Sqlite3Engine):
sql = """
INSERT OR IGNORE INTO push_rules_enable (id, user_name, rule_id, enabled)
VALUES (?, ?, ?, ?)
VALUES (?, ?, ?, TRUE)
"""
else:
raise RuntimeError("Unknown database engine")

new_enable_id = self._push_rules_enable_id_gen.get_next()
txn.execute(sql, (new_enable_id, user_id, rule_id, 1))
txn.execute(sql, (new_enable_id, user_id, rule_id))

async def delete_push_rule(self, user_id: str, rule_id: str) -> None:
"""
Expand Down
4 changes: 2 additions & 2 deletions synapse/storage/databases/main/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,9 +454,9 @@ def select_users_txn(
) -> List[Tuple[str, int]]:
sql = (
"SELECT user_id, expiration_ts_ms FROM account_validity"
" WHERE email_sent = ? AND (expiration_ts_ms - ?) <= ?"
" WHERE email_sent = FALSE AND (expiration_ts_ms - ?) <= ?"
)
values = [False, now_ms, renew_at]
values = [now_ms, renew_at]
txn.execute(sql, values)
return cast(List[Tuple[str, int]], txn.fetchall())

Expand Down
8 changes: 4 additions & 4 deletions synapse/storage/databases/main/room.py
Original file line number Diff line number Diff line change
Expand Up @@ -936,11 +936,11 @@ def _get_media_mxcs_in_room_txn(
JOIN event_json USING (room_id, event_id)
WHERE room_id = ?
%(where_clause)s
AND contains_url = ? AND outlier = ?
AND contains_url = TRUE AND outlier = FALSE
ORDER BY stream_ordering DESC
LIMIT ?
"""
txn.execute(sql % {"where_clause": ""}, (room_id, True, False, 100))
txn.execute(sql % {"where_clause": ""}, (room_id, 100))

local_media_mxcs = []
remote_media_mxcs = []
Expand Down Expand Up @@ -1086,9 +1086,9 @@ def _quarantine_media_txn(

# set quarantine
if quarantined_by is not None:
sql += "AND safe_from_quarantine = ?"
sql += "AND safe_from_quarantine = FALSE"
txn.executemany(
sql, [(quarantined_by, media_id, False) for media_id in local_mxcs]
sql, [(quarantined_by, media_id) for media_id in local_mxcs]
)
# remove from quarantine
else:
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/databases/main/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -1475,7 +1475,7 @@ def _paginate_room_events_txn(
event.topological_ordering, event.stream_ordering
FROM events AS event
%(join_clause)s
WHERE event.outlier = ? AND event.room_id = ? AND %(bounds)s
WHERE event.outlier = FALSE AND event.room_id = ? AND %(bounds)s
ORDER BY event.topological_ordering %(order)s,
event.stream_ordering %(order)s LIMIT ?
""" % {
Expand Down
Loading