Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Federation fixes. #47

Merged
merged 21 commits into from
Feb 5, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
941f591
Don't fail an entire request if one of the returned events fails a si…
erikjohnston Feb 2, 2015
40c6fe1
Don't bother requesting PDUs with bad signatures from the same server
erikjohnston Feb 2, 2015
e7ca813
Try to ensure we don't persist an event we have already persisted. In…
erikjohnston Feb 3, 2015
51969f9
Return rejected events if asked for it over federation.
erikjohnston Feb 3, 2015
0f48e22
PEP8
erikjohnston Feb 3, 2015
4ff2273
Add FIXME note.
erikjohnston Feb 3, 2015
06c34bf
Give exception better message
erikjohnston Feb 3, 2015
fed2925
Spelling
erikjohnston Feb 3, 2015
77a076b
Set combinations is | and not +
erikjohnston Feb 3, 2015
6efd4d1
Don't completely die if get auth_chain or querying auth_chain request…
erikjohnston Feb 3, 2015
0dd3aea
Keep around the old (buggy) version of the prune_event function so th…
erikjohnston Feb 3, 2015
7b810e1
Add new FederationBase
erikjohnston Feb 3, 2015
8dae5c8
Remove unused imports
erikjohnston Feb 3, 2015
9bace3a
Actually, the old prune_event function was non-deterministic, so no p…
erikjohnston Feb 3, 2015
7dd1c5c
Neaten the handling of state and auth_chain up a bit
erikjohnston Feb 3, 2015
3c39f42
New line
erikjohnston Feb 3, 2015
02be8da
Add doc to get_event
erikjohnston Feb 3, 2015
c0462db
Rearrange persist_event so that do all the queries that need to be do…
erikjohnston Feb 4, 2015
f275ba4
Fix state resolution to remember join_rules is a type of auth event.
erikjohnston Feb 4, 2015
03d415a
Brief comment on why we do some things on every call to persist_event…
erikjohnston Feb 4, 2015
650e32d
Change context.auth_events to what the auth_events would be bases on …
erikjohnston Feb 4, 2015
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 6 additions & 8 deletions synapse/api/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,6 @@ def check_joined_room(self, room_id, user_id):
def check_host_in_room(self, room_id, host):
curr_state = yield self.state.get_current_state(room_id)

logger.debug("Got curr_state %s", curr_state)

for event in curr_state:
if event.type == EventTypes.Member:
try:
Expand Down Expand Up @@ -360,7 +358,7 @@ def is_server_admin(self, user):
def add_auth_events(self, builder, context):
yield run_on_reactor()

auth_ids = self.compute_auth_events(builder, context)
auth_ids = self.compute_auth_events(builder, context.current_state)

auth_events_entries = yield self.store.add_event_hashes(
auth_ids
Expand All @@ -374,26 +372,26 @@ def add_auth_events(self, builder, context):
if v.event_id in auth_ids
}

def compute_auth_events(self, event, context):
def compute_auth_events(self, event, current_state):
if event.type == EventTypes.Create:
return []

auth_ids = []

key = (EventTypes.PowerLevels, "", )
power_level_event = context.current_state.get(key)
power_level_event = current_state.get(key)

if power_level_event:
auth_ids.append(power_level_event.event_id)

key = (EventTypes.JoinRules, "", )
join_rule_event = context.current_state.get(key)
join_rule_event = current_state.get(key)

key = (EventTypes.Member, event.user_id, )
member_event = context.current_state.get(key)
member_event = current_state.get(key)

key = (EventTypes.Create, "", )
create_event = context.current_state.get(key)
create_event = current_state.get(key)
if create_event:
auth_ids.append(create_event.event_id)

Expand Down
2 changes: 1 addition & 1 deletion synapse/events/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def membership(self):
return self.content["membership"]

def is_state(self):
return hasattr(self, "state_key")
return hasattr(self, "state_key") and self.state_key is not None

def get_dict(self):
d = dict(self._event_dict)
Expand Down
118 changes: 118 additions & 0 deletions synapse/federation/federation_base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from twisted.internet import defer

from synapse.events.utils import prune_event

from syutil.jsonutil import encode_canonical_json

from synapse.crypto.event_signing import check_event_content_hash

from synapse.api.errors import SynapseError

import logging


logger = logging.getLogger(__name__)


class FederationBase(object):
@defer.inlineCallbacks
def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False):
"""Takes a list of PDUs and checks the signatures and hashs of each
one. If a PDU fails its signature check then we check if we have it in
the database and if not then request if from the originating server of
that PDU.

If a PDU fails its content hash check then it is redacted.

The given list of PDUs are not modified, instead the function returns
a new list.

Args:
pdu (list)
outlier (bool)

Returns:
Deferred : A list of PDUs that have valid signatures and hashes.
"""
signed_pdus = []
for pdu in pdus:
try:
new_pdu = yield self._check_sigs_and_hash(pdu)
signed_pdus.append(new_pdu)
except SynapseError:
# FIXME: We should handle signature failures more gracefully.

# Check local db.
new_pdu = yield self.store.get_event(
pdu.event_id,
allow_rejected=True
)
if new_pdu:
signed_pdus.append(new_pdu)
continue

# Check pdu.origin
if pdu.origin != origin:
new_pdu = yield self.get_pdu(
destinations=[pdu.origin],
event_id=pdu.event_id,
outlier=outlier,
)

if new_pdu:
signed_pdus.append(new_pdu)
continue

logger.warn("Failed to find copy of %s with valid signature")

defer.returnValue(signed_pdus)

@defer.inlineCallbacks
def _check_sigs_and_hash(self, pdu):
"""Throws a SynapseError if the PDU does not have the correct
signatures.

Returns:
FrozenEvent: Either the given event or it redacted if it failed the
content hash check.
"""
# Check signatures are correct.
redacted_event = prune_event(pdu)
redacted_pdu_json = redacted_event.get_pdu_json()

try:
yield self.keyring.verify_json_for_server(
pdu.origin, redacted_pdu_json
)
except SynapseError:
logger.warn(
"Signature check failed for %s redacted to %s",
encode_canonical_json(pdu.get_pdu_json()),
encode_canonical_json(redacted_pdu_json),
)
raise

if not check_event_content_hash(pdu):
logger.warn(
"Event content has been tampered, redacting %s, %s",
pdu.event_id, encode_canonical_json(pdu.get_dict())
)
defer.returnValue(redacted_event)

defer.returnValue(pdu)
99 changes: 31 additions & 68 deletions synapse/federation/federation_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,25 +16,19 @@

from twisted.internet import defer

from .federation_base import FederationBase
from .units import Edu

from synapse.util.logutils import log_function
from synapse.events import FrozenEvent
from synapse.events.utils import prune_event

from syutil.jsonutil import encode_canonical_json

from synapse.crypto.event_signing import check_event_content_hash

from synapse.api.errors import SynapseError

import logging


logger = logging.getLogger(__name__)


class FederationClient(object):
class FederationClient(FederationBase):
@log_function
def send_pdu(self, pdu, destinations):
"""Informs the replication layer about a new PDU generated within the
Expand Down Expand Up @@ -224,17 +218,17 @@ def get_state_for_room(self, destination, room_id, event_id):
for p in result.get("auth_chain", [])
]

for i, pdu in enumerate(pdus):
pdus[i] = yield self._check_sigs_and_hash(pdu)

# FIXME: We should handle signature failures more gracefully.
signed_pdus = yield self._check_sigs_and_hash_and_fetch(
destination, pdus, outlier=True
)

for i, pdu in enumerate(auth_chain):
auth_chain[i] = yield self._check_sigs_and_hash(pdu)
signed_auth = yield self._check_sigs_and_hash_and_fetch(
destination, auth_chain, outlier=True
)

# FIXME: We should handle signature failures more gracefully.
signed_auth.sort(key=lambda e: e.depth)

defer.returnValue((pdus, auth_chain))
defer.returnValue((signed_pdus, signed_auth))

@defer.inlineCallbacks
@log_function
Expand All @@ -248,14 +242,13 @@ def get_event_auth(self, destination, room_id, event_id):
for p in res["auth_chain"]
]

for i, pdu in enumerate(auth_chain):
auth_chain[i] = yield self._check_sigs_and_hash(pdu)

# FIXME: We should handle signature failures more gracefully.
signed_auth = yield self._check_sigs_and_hash_and_fetch(
destination, auth_chain, outlier=True
)

auth_chain.sort(key=lambda e: e.depth)
signed_auth.sort(key=lambda e: e.depth)

defer.returnValue(auth_chain)
defer.returnValue(signed_auth)

@defer.inlineCallbacks
def make_join(self, destination, room_id, user_id):
Expand Down Expand Up @@ -291,21 +284,19 @@ def send_join(self, destination, pdu):
for p in content.get("auth_chain", [])
]

for i, pdu in enumerate(state):
state[i] = yield self._check_sigs_and_hash(pdu)

# FIXME: We should handle signature failures more gracefully.

for i, pdu in enumerate(auth_chain):
auth_chain[i] = yield self._check_sigs_and_hash(pdu)
signed_state = yield self._check_sigs_and_hash_and_fetch(
destination, state, outlier=True
)

# FIXME: We should handle signature failures more gracefully.
signed_auth = yield self._check_sigs_and_hash_and_fetch(
destination, auth_chain, outlier=True
)

auth_chain.sort(key=lambda e: e.depth)

defer.returnValue({
"state": state,
"auth_chain": auth_chain,
"state": signed_state,
"auth_chain": signed_auth,
})

@defer.inlineCallbacks
Expand Down Expand Up @@ -353,12 +344,18 @@ def query_auth(self, destination, room_id, event_id, local_auth):
)

auth_chain = [
(yield self._check_sigs_and_hash(self.event_from_pdu_json(e)))
self.event_from_pdu_json(e)
for e in content["auth_chain"]
]

signed_auth = yield self._check_sigs_and_hash_and_fetch(
destination, auth_chain, outlier=True
)

signed_auth.sort(key=lambda e: e.depth)

ret = {
"auth_chain": auth_chain,
"auth_chain": signed_auth,
"rejects": content.get("rejects", []),
"missing": content.get("missing", []),
}
Expand All @@ -373,37 +370,3 @@ def event_from_pdu_json(self, pdu_json, outlier=False):
event.internal_metadata.outlier = outlier

return event

@defer.inlineCallbacks
def _check_sigs_and_hash(self, pdu):
"""Throws a SynapseError if the PDU does not have the correct
signatures.

Returns:
FrozenEvent: Either the given event or it redacted if it failed the
content hash check.
"""
# Check signatures are correct.
redacted_event = prune_event(pdu)
redacted_pdu_json = redacted_event.get_pdu_json()

try:
yield self.keyring.verify_json_for_server(
pdu.origin, redacted_pdu_json
)
except SynapseError:
logger.warn(
"Signature check failed for %s redacted to %s",
encode_canonical_json(pdu.get_pdu_json()),
encode_canonical_json(redacted_pdu_json),
)
raise

if not check_event_content_hash(pdu):
logger.warn(
"Event content has been tampered, redacting %s, %s",
pdu.event_id, encode_canonical_json(pdu.get_dict())
)
defer.returnValue(redacted_event)

defer.returnValue(pdu)
Loading