-
Notifications
You must be signed in to change notification settings - Fork 85
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Support HS unbinding 3pids #67
Changes from 25 commits
2a303aa
af03054
10c9a47
d018a08
389e41f
9ab64a5
e007df6
b5213fb
d7e55e6
5da85e4
59ae3d3
bb23b8c
be74377
844d3d6
acc0d14
58259dc
b96f0ee
a6ba1c4
fac721c
3328685
94b49f9
9a3bda5
8b0b866
33f91ae
a90ec3a
5adef48
14226e7
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,7 @@ | ||
# -*- coding: utf-8 -*- | ||
|
||
# Copyright 2014 OpenMarket Ltd | ||
# Copyright 2018 New Vector Ltd | ||
# | ||
# Licensed under the Apache License, Version 2.0 (the "License"); | ||
# you may not use this file except in compliance with the License. | ||
|
@@ -28,7 +29,18 @@ def __init__(self, syd): | |
logger.info("Using DB file %s", dbFilePath) | ||
|
||
self.db = sqlite3.connect(dbFilePath) | ||
curVer = self._getSchemaVersion() | ||
|
||
# We always run the schema files if the version is zero: either the db is | ||
# completely empty and schema-less or it has the v0 schema, which is safe to | ||
# replay the schema files. The files in the sql directory are the v0 schema, so | ||
# a new installations will start as v0 then be upgraded to the current version. | ||
if curVer == 0: | ||
self._createSchema() | ||
self._upgradeSchema() | ||
|
||
def _createSchema(self): | ||
logger.info("Running schema files...") | ||
schemaDir = os.path.dirname(__file__) | ||
|
||
c = self.db.cursor() | ||
|
@@ -47,3 +59,89 @@ def __init__(self, syd): | |
|
||
c.close() | ||
self.db.commit() | ||
|
||
def _upgradeSchema(self): | ||
curVer = self._getSchemaVersion() | ||
|
||
if curVer < 1: | ||
cur = self.db.cursor() | ||
|
||
# add auto_increment to the primary key of local_threepid_associations to ensure ids are never re-used, | ||
# allow the mxid column to be null to represent the deletion of a binding | ||
# and remove not null constraints on ts, notBefore and notAfter (again for when a binding has been deleted | ||
# and these wouldn't be very meaningful) | ||
logger.info("Migrating schema from v0 to v1") | ||
cur.execute("DROP INDEX IF EXISTS medium_address") | ||
cur.execute("DROP INDEX IF EXISTS local_threepid_medium_address") | ||
cur.execute("ALTER TABLE local_threepid_associations RENAME TO old_local_threepid_associations"); | ||
cur.execute( | ||
"CREATE TABLE local_threepid_associations (id integer primary key autoincrement, " | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. triple-quotes are totally a thing btw There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. (for reference, doesn't need changing now) There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. oh true, although istr synapse used separate single line quotes. whatever. |
||
"medium varchar(16) not null, " | ||
"address varchar(256) not null, " | ||
"mxid varchar(256), " | ||
"ts integer, " | ||
"notBefore bigint, " | ||
"notAfter bigint)" | ||
) | ||
cur.execute( | ||
"INSERT INTO local_threepid_associations (medium, address, mxid, ts, notBefore, notAfter) " | ||
"SELECT medium, address, mxid, ts, notBefore, notAfter FROM old_local_threepid_associations" | ||
) | ||
cur.execute( | ||
"CREATE UNIQUE INDEX local_threepid_medium_address on local_threepid_associations(medium, address)" | ||
) | ||
cur.execute("DROP TABLE old_local_threepid_associations") | ||
|
||
# same autoincrement for global_threepid_associations (fields stay non-nullable because we don't need | ||
# entries in this table for deletions, we can just delete the rows) | ||
cur.execute("DROP INDEX IF EXISTS global_threepid_medium_address") | ||
cur.execute("DROP INDEX IF EXISTS global_threepid_medium_lower_address") | ||
cur.execute("DROP INDEX IF EXISTS global_threepid_originServer_originId") | ||
cur.execute("DROP INDEX IF EXISTS medium_lower_address") | ||
cur.execute("DROP INDEX IF EXISTS threepid_originServer_originId") | ||
cur.execute("ALTER TABLE global_threepid_associations RENAME TO old_global_threepid_associations"); | ||
cur.execute( | ||
"CREATE TABLE IF NOT EXISTS global_threepid_associations " | ||
"(id integer primary key autoincrement, " | ||
"medium varchar(16) not null, " | ||
"address varchar(256) not null, " | ||
"mxid varchar(256) not null, " | ||
"ts integer not null, " | ||
"notBefore bigint not null, " | ||
"notAfter integer not null, " | ||
"originServer varchar(255) not null, " | ||
"originId integer not null, " | ||
"sgAssoc text not null)" | ||
) | ||
cur.execute( | ||
"INSERT INTO global_threepid_associations " | ||
"(medium, address, mxid, ts, notBefore, notAfter, originServer, originId, sgAssoc) " | ||
"SELECT medium, address, mxid, ts, notBefore, notAfter, originServer, originId, sgAssoc " | ||
"FROM old_global_threepid_associations" | ||
) | ||
cur.execute("CREATE INDEX global_threepid_medium_address on global_threepid_associations (medium, address)") | ||
cur.execute( | ||
"CREATE INDEX global_threepid_medium_lower_address on " | ||
"global_threepid_associations (medium, lower(address))" | ||
) | ||
cur.execute( | ||
"CREATE UNIQUE INDEX global_threepid_originServer_originId on " | ||
"global_threepid_associations (originServer, originId)" | ||
) | ||
cur.execute("DROP TABLE old_global_threepid_associations") | ||
self.db.commit() | ||
logger.info("v0 -> v1 schema migration complete") | ||
self._setSchemaVersion(1) | ||
|
||
def _getSchemaVersion(self): | ||
cur = self.db.cursor() | ||
res = cur.execute("PRAGMA user_version"); | ||
row = cur.fetchone() | ||
return row[0] | ||
|
||
def _setSchemaVersion(self, ver): | ||
cur = self.db.cursor() | ||
# NB. pragma doesn't support variable substitution so we | ||
# do it in python (as a decimal so we don't risk SQL injection) | ||
res = cur.execute("PRAGMA user_version = %d" % (ver,)); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -19,6 +19,10 @@ | |
from sydent.threepid import ThreepidAssociation, threePidAssocFromDict | ||
|
||
import json | ||
import logging | ||
|
||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
class LocalAssociationStore: | ||
|
@@ -60,6 +64,39 @@ def getAssociationsAfterId(self, afterId, limit): | |
|
||
return (assocs, maxId) | ||
|
||
def removeAssociation(self, threepid, mxid): | ||
cur = self.sydent.db.cursor() | ||
|
||
# check to see if we have any matching associations first. | ||
# We use a REPLACE INTO because we need the resulting row to have | ||
# a new ID (such that we know it's a new change that needs to be | ||
# replicated) so there's no need to insert a deletion row if there's | ||
# nothing to delete. | ||
cur.execute( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why do we bother with this, ooi? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Because it's a REPLACE INTO, so we don't need to insert an empty record if there's nothing there to start with |
||
"SELECT COUNT(*) FROM local_threepid_associations " | ||
"WHERE medium = ? AND address = ? AND mxid = ?", | ||
(threepid['medium'], threepid['address'], mxid) | ||
) | ||
row = cur.fetchone() | ||
if row[0] > 0: | ||
ts = time_msec() | ||
cur.execute( | ||
"REPLACE INTO local_threepid_associations " | ||
"('medium', 'address', 'mxid', 'ts', 'notBefore', 'notAfter') " | ||
" values (?, ?, NULL, ?, null, null)", | ||
(threepid['medium'], threepid['address'], ts), | ||
) | ||
logger.info( | ||
"Deleting local assoc for %s/%s/%s replaced %d rows", | ||
threepid['medium'], threepid['address'], mxid, cur.rowcount, | ||
) | ||
self.sydent.db.commit() | ||
else: | ||
logger.info( | ||
"No local assoc found for %s/%s/%s", | ||
threepid['medium'], threepid['address'], mxid, | ||
) | ||
|
||
|
||
class GlobalAssociationStore: | ||
def __init__(self, sydent): | ||
|
@@ -164,3 +201,16 @@ def lastIdFromServer(self, server): | |
return None | ||
|
||
return row[0] | ||
|
||
def removeAssociation(self, medium, address): | ||
cur = self.sydent.db.cursor() | ||
cur.execute( | ||
"DELETE FROM global_threepid_associations WHERE " | ||
"medium = ? AND address = ?", | ||
(medium, address), | ||
) | ||
logger.info( | ||
"Deleted %d rows from global associations for %s/%s", | ||
cur.rowcount, medium, address, | ||
) | ||
self.sydent.db.commit() |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,184 @@ | ||
# -*- coding: utf-8 -*- | ||
|
||
# Copyright 2018 New Vector Ltd | ||
# | ||
# Licensed under the Apache License, Version 2.0 (the "License"); | ||
# you may not use this file except in compliance with the License. | ||
# You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
|
||
import logging | ||
import time | ||
|
||
from twisted.internet import defer | ||
from twisted.names.error import DNSNameError | ||
import twisted.names.client | ||
import twisted.names.dns | ||
from unpaddedbase64 import decode_base64 | ||
import signedjson.sign | ||
import signedjson.key | ||
from signedjson.sign import SignatureVerifyException | ||
|
||
from sydent.http.httpclient import FederationHttpClient | ||
|
||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
class NoAuthenticationError(Exception): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. could this have a docstring describing what it represents please There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done |
||
""" | ||
Raised when no signature is provided that could be authenticated | ||
""" | ||
pass | ||
|
||
|
||
class Verifier(object): | ||
""" | ||
Verifies signed json blobs from Matrix Homeservers by finding the | ||
homeserver's address, contacting it, requesting its keys and | ||
verifying that the signature on the json blob matches. | ||
""" | ||
def __init__(self, sydent): | ||
self.sydent = sydent | ||
# Cache of server keys. These are cached until the 'valid_until_ts' time | ||
# in the result. | ||
self.cache = { | ||
# server_name: <result from keys query>, | ||
} | ||
|
||
@defer.inlineCallbacks | ||
def _getKeysForServer(self, server_name): | ||
"""Get the signing key data from a home server. | ||
""" | ||
|
||
if server_name in self.cache: | ||
cached = self.cache[server_name] | ||
now = int(time.time() * 1000) | ||
if cached['valid_until_ts'] > now: | ||
defer.returnValue(self.cache[server_name]['verify_keys']) | ||
|
||
client = FederationHttpClient(self.sydent) | ||
result = yield client.get_json("https://%s/_matrix/key/v2/server/" % server_name) | ||
if 'verify_keys' not in result: | ||
raise SignatureVerifyException("No key found in response") | ||
|
||
if 'valid_until_ts' in result: | ||
# Don't cache anything without a valid_until_ts or we wouldn't | ||
# know when to expire it. | ||
logger.info("Got keys for %s: caching until %s", server_name, result['valid_until_ts']) | ||
self.cache[server_name] = result | ||
|
||
defer.returnValue(result['verify_keys']) | ||
|
||
@defer.inlineCallbacks | ||
def verifyServerSignedJson(self, signed_json, acceptable_server_names=None): | ||
"""Given a signed json object, try to verify any one | ||
of the signatures on it | ||
XXX: This contains a fairly noddy version of the home server | ||
SRV lookup and signature verification. It only looks at | ||
the first SRV result. It does no caching (just fetches the | ||
signature each time and does not contact any other servers | ||
to do perspectives checks. | ||
|
||
:param acceptable_server_names: If provided and not None, | ||
only signatures from servers in this list will be accepted. | ||
:type acceptable_server_names: list of strings | ||
|
||
:return a tuple of the server name and key name that was | ||
successfully verified. If the json cannot be verified, | ||
raises SignatureVerifyException. | ||
""" | ||
if 'signatures' not in signed_json: | ||
raise SignatureVerifyException("Signature missing") | ||
for server_name, sigs in signed_json['signatures'].items(): | ||
if acceptable_server_names is not None: | ||
if server_name not in acceptable_server_names: | ||
continue | ||
|
||
server_keys = yield self._getKeysForServer(server_name) | ||
for key_name, sig in sigs.items(): | ||
if key_name in server_keys: | ||
if 'key' not in server_keys[key_name]: | ||
logger.warn("Ignoring key %s with no 'key'") | ||
continue | ||
key_bytes = decode_base64(server_keys[key_name]['key']) | ||
verify_key = signedjson.key.decode_verify_key_bytes(key_name, key_bytes) | ||
logger.info("verifying sig from key %r", key_name) | ||
signedjson.sign.verify_signed_json(signed_json, server_name, verify_key) | ||
logger.info("Verified signature with key %s from %s", key_name, server_name) | ||
defer.returnValue((server_name, key_name)) | ||
logger.warn( | ||
"No matching key found for signature block %r in server keys %r", | ||
signed_json['signatures'], server_keys, | ||
) | ||
logger.warn( | ||
"Unable to verify any signatures from block %r. Acceptable server names: %r", | ||
signed_json['signatures'], acceptable_server_names, | ||
) | ||
raise SignatureVerifyException("No matching signature found") | ||
|
||
@defer.inlineCallbacks | ||
def authenticate_request(self, request, content): | ||
"""Authenticates a Matrix federation request based on the X-Matrix header | ||
XXX: Copied largely from synapse | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. could do with some docstring regardless... There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done |
||
|
||
:param request: The request object to authenticate | ||
:param content: The content of the request, if any | ||
:type content: bytes or None | ||
""" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. could you document the return val please There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. done |
||
json_request = { | ||
"method": request.method, | ||
"uri": request.uri, | ||
"destination_is": self.sydent.server_name, | ||
"signatures": {}, | ||
} | ||
|
||
if content is not None: | ||
json_request["content"] = content | ||
|
||
origin = None | ||
|
||
def parse_auth_header(header_str): | ||
try: | ||
params = auth.split(" ")[1].split(",") | ||
param_dict = dict(kv.split("=") for kv in params) | ||
|
||
def strip_quotes(value): | ||
if value.startswith("\""): | ||
return value[1:-1] | ||
else: | ||
return value | ||
|
||
origin = strip_quotes(param_dict["origin"]) | ||
key = strip_quotes(param_dict["key"]) | ||
sig = strip_quotes(param_dict["sig"]) | ||
return (origin, key, sig) | ||
except Exception: | ||
raise SignatureVerifyException("Malformed Authorization header") | ||
|
||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") | ||
|
||
if not auth_headers: | ||
raise NoAuthenticationError("Missing Authorization headers") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. what is the logic behind when we raise a SignatureVerifyException and when a NoAuthenticationError? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Mostly just me thinking that the you might want separate errors for them to be slightly more helpful (eg. for if you spelt Authorization with an s or something). Granted, with the way we're using it here we could just have the same exception type with a different message. |
||
|
||
for auth in auth_headers: | ||
if auth.startswith("X-Matrix"): | ||
(origin, key, sig) = parse_auth_header(auth) | ||
json_request["origin"] = origin | ||
json_request["signatures"].setdefault(origin, {})[key] = sig | ||
|
||
if not json_request["signatures"]: | ||
raise NoAuthenticationError("Missing Authorization headers") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this is misleading There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. oops, thanks |
||
|
||
yield self.verifyServerSignedJson(json_request, [origin]) | ||
|
||
logger.info("Verified request from HS %s", origin) | ||
|
||
defer.returnValue(origin) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
ok, so the files on disk are the v0 schema, and new installations are made to be v1 by first running the v0 schema and then doing the upgrades?
that's fine, but saying so in the comment would help
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done