From a5e39505b5f0b6bc87487e6c9804d0c17e6244c5 Mon Sep 17 00:00:00 2001 From: qstokkink Date: Sat, 3 Mar 2018 22:22:35 +0100 Subject: [PATCH 1/3] Added AllChannel2.0 --- .../Core/Libtorrent/LibtorrentDownloadImpl.py | 3 + .../Test/Community/Allchannel2/__init__.py | 0 .../Community/Allchannel2/test_community.py | 102 +++++++ .../Community/Allchannel2/test_structures.py | 144 ++++++++++ Tribler/Test/data/channels/testcase0/0 | 1 + Tribler/community/allchannel2/__init__.py | 0 Tribler/community/allchannel2/community.py | 238 ++++++++++++++++ Tribler/community/allchannel2/payload.py | 16 ++ Tribler/community/allchannel2/structures.py | 261 ++++++++++++++++++ 9 files changed, 765 insertions(+) create mode 100644 Tribler/Test/Community/Allchannel2/__init__.py create mode 100644 Tribler/Test/Community/Allchannel2/test_community.py create mode 100644 Tribler/Test/Community/Allchannel2/test_structures.py create mode 100644 Tribler/Test/data/channels/testcase0/0 create mode 100644 Tribler/community/allchannel2/__init__.py create mode 100644 Tribler/community/allchannel2/community.py create mode 100644 Tribler/community/allchannel2/payload.py create mode 100644 Tribler/community/allchannel2/structures.py diff --git a/Tribler/Core/Libtorrent/LibtorrentDownloadImpl.py b/Tribler/Core/Libtorrent/LibtorrentDownloadImpl.py index 5a9ea1f570d..dbae90f9847 100644 --- a/Tribler/Core/Libtorrent/LibtorrentDownloadImpl.py +++ b/Tribler/Core/Libtorrent/LibtorrentDownloadImpl.py @@ -151,6 +151,7 @@ def __init__(self, session, tdef): self.deferreds_handle = [] self.deferred_added = Deferred() self.deferred_removed = Deferred() + self.deferred_finished = Deferred() self.handle_check_lc = self.register_task("handle_check", LoopingCall(self.check_handle)) @@ -663,6 +664,8 @@ def reset_priorities(): if self.endbuffsize: self.set_byte_priority([(self.get_vod_fileindex(), 0, -1)], 1) self.endbuffsize = 0 + if not self.deferred_finished.called: + self.deferred_finished.callback(self) def update_lt_status(self, lt_status): """ Update libtorrent stats and check if the download should be stopped.""" diff --git a/Tribler/Test/Community/Allchannel2/__init__.py b/Tribler/Test/Community/Allchannel2/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/Tribler/Test/Community/Allchannel2/test_community.py b/Tribler/Test/Community/Allchannel2/test_community.py new file mode 100644 index 00000000000..39fe810d0b8 --- /dev/null +++ b/Tribler/Test/Community/Allchannel2/test_community.py @@ -0,0 +1,102 @@ +import os +import shutil + +from twisted.internet.defer import succeed + +from Tribler.community.allchannel2.community import AllChannel2Community +from Tribler.pyipv8.ipv8.keyvault.crypto import ECCrypto +from Tribler.pyipv8.ipv8.peer import Peer +import Tribler.Test as super_module +from Tribler.pyipv8.ipv8.test.base import TestBase +from Tribler.pyipv8.ipv8.test.util import twisted_wrapper + + +class FakeDownloadConfig(object): + + def __init__(self, download_dir): + self.download_dir = download_dir + + def get_dest_dir(self): + return self.download_dir + + +class FakeDownload(object): + + def __init__(self, download_dir): + self.deferred_finished = succeed(self) + self.dlconfig = FakeDownloadConfig(download_dir) + + +class FakeSession(object): + + def __init__(self, download_dir): + self.magnet_links = {} + self.download_dir = download_dir + + def start_download_from_uri(self, magnetlink, _): + self.magnet_links[magnetlink] = '' + return succeed(FakeDownload(self.download_dir)) + + +class TestAllChannel2(TestBase): + + def setUp(self): + super(TestAllChannel2, self).setUp() + + mocked_community = AllChannel2Community + key = ECCrypto().generate_key(u"very-low") + mocked_community.master_peer = Peer(key) + self.initialize(mocked_community, 2) + + data_dir = os.path.abspath(os.path.join(os.path.dirname(super_module.__file__), 'data', 'channels')) + for node in self.nodes: + node.overlay.working_directory = data_dir + + def tearDown(self): + super(TestAllChannel2, self).tearDown() + + for node in self.nodes: + channel_dir = os.path.abspath(os.path.join(node.overlay.working_directory, node.overlay.my_channel_name)) + if os.path.isdir(channel_dir): + shutil.rmtree(channel_dir) + + def test_write_channel(self): + """ + Check if we can add a magnet link to our channel and write it to file. + """ + magnet_link = 'a'*20 + self.nodes[0].overlay.add_magnetlink(magnet_link) + channel_name = self.nodes[0].overlay.my_channel_name + + self.assertListEqual([channel_name], self.nodes[0].overlay.get_channels()) + self.assertListEqual([magnet_link], self.nodes[0].overlay.get_magnetlinks(channel_name)) + self.assertEqual('\xc9\x18[5\x1c\x99=\xe9\x17\xd7|\x0ee\xf6E=ia\xb5W', + self.nodes[0].overlay.my_channel_info_hash) + + def test_read_channel(self): + """ + Check if we can read a channel from disk. + """ + channel_name = "testcase0" + magnet_link = 'a' * 20 + self.nodes[0].overlay.load_channel(channel_name) + + self.assertListEqual([channel_name], self.nodes[0].overlay.get_channels()) + self.assertListEqual([magnet_link], self.nodes[0].overlay.get_magnetlinks(channel_name)) + + @twisted_wrapper + def test_share_channel(self): + """ + Check if peers start downloading each others channel after introducing. + """ + magnet_link = 'a' * 20 + for node in self.nodes: + node.overlay.tribler_session = FakeSession(node.overlay.working_directory) + node.overlay.add_magnetlink(magnet_link) + + yield self.introduce_nodes() + yield self.deliver_messages() + + for node in self.nodes: + self.assertListEqual(['magnet:?xt=urn:btih:\xc9\x18[5\x1c\x99=\xe9\x17\xd7|\x0ee\xf6E=ia\xb5W'], + node.overlay.tribler_session.magnet_links.keys()) diff --git a/Tribler/Test/Community/Allchannel2/test_structures.py b/Tribler/Test/Community/Allchannel2/test_structures.py new file mode 100644 index 00000000000..a716a726f7d --- /dev/null +++ b/Tribler/Test/Community/Allchannel2/test_structures.py @@ -0,0 +1,144 @@ +import unittest + +from Tribler.community.allchannel2.structures import Chunk, ChunkedTable + + +class TestChunk(unittest.TestCase): + + def setUp(self): + self.chunk = Chunk() + + def test_empty(self): + """ + Test if a Chunk is initially empty. + """ + self.assertDictEqual(self.chunk.data, {}) + + def test_add(self): + """ + Test if we can add a key value pair to the Chunk. + """ + self.assertTrue(self.chunk.add("key", "value")) + self.assertDictEqual(self.chunk.data, {"key": "value"}) + + def test_remove(self): + """ + Test if we can remove a key value pair from the Chunk. + """ + self.assertTrue(self.chunk.add("key", "value")) + self.chunk.remove("key") + self.assertDictEqual(self.chunk.data, {}) + + def test_add_full(self): + """ + Test if we cannot add blocks to an already full Chunk. + """ + self.chunk.max_length = 0 + self.assertFalse(self.chunk.add("key", "value")) + + def test_serialize_empty(self): + """ + Test if we can serialize and deserialize an empty Chunk. + """ + data = self.chunk.serialize() + chunk = Chunk.unserialize(data) + self.assertDictEqual(self.chunk.data, chunk.data) + + def test_serialize(self): + """ + Test if we can serialize and deserialize a Chunk. + """ + self.chunk.data = {"key": "value"} + data = self.chunk.serialize() + chunk = Chunk.unserialize(data) + self.assertDictEqual(self.chunk.data, chunk.data) + + +class TestChunkedTable(unittest.TestCase): + + def setUp(self): + self.chunked_table = ChunkedTable() + + def test_empty(self): + """ + Test if a ChunkedTable is initially empty. + """ + self.assertDictEqual(self.chunked_table.chunklist, {}) + + def test_add(self): + """ + Test if we can add a key value pair to the ChunkedTable. + """ + self.chunked_table.add("key", "value") + self.assertEqual(len(self.chunked_table.chunklist.keys()), 1) + self.assertDictEqual(self.chunked_table.get_all(), {"key": "value"}) + + def test_add_second(self): + """ + Test if we can add a key value pair to the ChunkedTable in an existing Chunk. + """ + self.chunked_table.add("key", "value") + self.chunked_table.add("key2", "value2") + self.assertEqual(len(self.chunked_table.chunklist.keys()), 1) + self.assertDictEqual(self.chunked_table.get_all(), {"key": "value", "key2": "value2"}) + + def test_add_second_spill(self): + """ + Test if we can add a key value pair to the ChunkedTable if the existing Chunk is full. + """ + self.chunked_table.add("key", "value") + self.chunked_table.chunklist.values()[0].max_length = 0 + self.chunked_table.add("key2", "value2") + self.assertEqual(len(self.chunked_table.chunklist.keys()), 2) + self.assertDictEqual(self.chunked_table.get_all(), {"key": "value", "key2": "value2"}) + + def test_remove(self): + """ + Test if we can remove a key value pair from the ChunkedTable. + """ + self.chunked_table.add("key", "value") + self.chunked_table.remove("key") + self.assertEqual(len(self.chunked_table.chunklist.keys()), 0) + self.assertDictEqual(self.chunked_table.get_all(), {}) + + def test_remove_hole(self): + """ + Test if we do not remove middle Chunks in the ChunkTable. + """ + fake_chunklist = {0: Chunk(), 1:Chunk(), 2:Chunk()} + fake_chunklist[0].add("key0", "value0") + fake_chunklist[1].add("key1", "value1") + fake_chunklist[2].add("key2", "value2") + self.chunked_table.chunklist = fake_chunklist + self.chunked_table.remove("key1") + self.assertEqual(len(self.chunked_table.chunklist.keys()), 3) + self.assertDictEqual(self.chunked_table.get_all(), {"key0": "value0", "key2": "value2"}) + + def test_remove_hole_recover(self): + """ + Test if we remove all trailing empty Chunks in the ChunkTable. + """ + fake_chunklist = {0: Chunk(), 1:Chunk(), 2:Chunk()} + fake_chunklist[0].add("key0", "value0") + fake_chunklist[2].add("key2", "value2") + self.chunked_table.chunklist = fake_chunklist + self.chunked_table.remove("key2") + self.assertEqual(len(self.chunked_table.chunklist.keys()), 1) + self.assertDictEqual(self.chunked_table.get_all(), {"key0": "value0"}) + + def test_serialize_empty(self): + """ + Test if we can serialize and deserialize an empty ChunkedTable. + """ + data = self.chunked_table.serialize() + chunked_table = ChunkedTable.unserialize(data) + self.assertDictEqual(self.chunked_table.get_all(), chunked_table.get_all()) + + def test_serialize(self): + """ + Test if we can serialize and deserialize a ChunkedTable. + """ + self.chunked_table.add("key", "value") + data = self.chunked_table.serialize() + chunked_table = ChunkedTable.unserialize(data) + self.assertDictEqual(self.chunked_table.get_all(), chunked_table.get_all()) diff --git a/Tribler/Test/data/channels/testcase0/0 b/Tribler/Test/data/channels/testcase0/0 new file mode 100644 index 00000000000..8ad4e7c7949 --- /dev/null +++ b/Tribler/Test/data/channels/testcase0/0 @@ -0,0 +1 @@ +d20:aaaaaaaaaaaaaaaaaaaa0:e \ No newline at end of file diff --git a/Tribler/community/allchannel2/__init__.py b/Tribler/community/allchannel2/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/Tribler/community/allchannel2/community.py b/Tribler/community/allchannel2/community.py new file mode 100644 index 00000000000..b328e9c0698 --- /dev/null +++ b/Tribler/community/allchannel2/community.py @@ -0,0 +1,238 @@ +import os + +from Tribler.community.allchannel2.payload import ChannelPayload +from Tribler.community.allchannel2.structures import Channel +from Tribler.Core.DownloadConfig import DefaultDownloadStartupConfig +from Tribler.pyipv8.ipv8.deprecated.community import Community +from Tribler.pyipv8.ipv8.deprecated.payload_headers import BinMemberAuthenticationPayload, GlobalTimeDistributionPayload +from Tribler.pyipv8.ipv8.peer import Peer + + +def infohash_to_magnet(infohash): + """ + Tranform an info hash to a magnet link. + + :param infohash: the infohash to convert + :return: the magnet link belonging to this info hash + """ + return "magnet:?xt=urn:btih:" + infohash + + +class AllChannel2Community(Community): + """ + AllChannel 2.0 + + My channel administration usage: + 1. __init__() + 2. load_channels(): performs heavy disk I/O + 3. add_magnetlink()/remove_magnetlink(): change the contents of my channel + 4. fetch and seed my_channel_torrent (automatically committed) + """ + master_peer = Peer(("307e301006072a8648ce3d020106052b81040024036a000400d2aaf87af3a1743286" + + "fe9a9b13abffe35fe1fc7fd5e40e5d587b01cbefa4eece1c9c8fca4767684acbe8b2d" + + "050c09eb7f2a9d000083c293d9ed9cc562edfa330d178e0516b068de4509193cd38b3" + + "bb22476177ede8f944010e9fb8843e15ef4fe829bc569de649").decode('hex')) + + def __init__(self, my_peer, endpoint, network, working_directory="./channels", tribler_session=None): + """ + Initialize the AllChannel2 Community. + + :param my_peer: the Peer representing my peer + :param endpoint: the Endpoint object to use + :param network: the Network object to use + :param working_directory: the folder where all of the channels are stored + """ + super(AllChannel2Community, self).__init__(my_peer, endpoint, network) + self.working_directory = working_directory + self.channels = {} + self.my_channel_name = self.my_peer.mid.encode("hex") + self.tribler_session = tribler_session + + # Internals, do not touch! + self._my_channel_info_hash = None + self._my_channel_torrent = None + self.decode_map.update({chr(1): self.on_channel}) + + def load_channels(self): + """ + Load all known Channels from the working directory. + + :returns: None + """ + channel_directories = [folder for folder in os.listdir(self.working_directory) + if os.path.isdir(os.path.join(self.working_directory, folder))] + for folder in channel_directories: + self.load_channel(folder) + if not self.my_channel_name in self.channels: + os.makedirs(os.path.abspath(os.path.join(self.working_directory, self.my_channel_name))) + + def load_channel(self, channel): + """ + Load a single channel from the folder structure. + + :param channel: the channel name + :returns: None + """ + real_path = os.path.abspath(os.path.join(self.working_directory, channel)) + if os.path.isdir(real_path): + channel_instance = Channel(channel, self.working_directory, allow_edit=(channel == self.my_channel_name)) + channel_instance.load() + self.channels[channel] = channel_instance + + def _commit_my_channel(self): + """ + Commit the channel based on my_peer. + + :returns: None + """ + my_channel = self.channels.get(self.my_channel_name, + Channel(self.my_channel_name, self.working_directory, True)) + my_channel.commit() + try: + self._my_channel_torrent, self._my_channel_info_hash = my_channel.make_torrent() + except RuntimeError: + self.logger.warning("Tried to make torrent, but the Channel was empty!") + self.channels[self.my_channel_name] = my_channel + + def _dirty_cache(self): + """ + Turn my channel dirty. + This invalidates the info hash and torrent file, which will have to be recreated. + + :returns: None + """ + self._my_channel_info_hash = None + self._my_channel_torrent = None + + @property + def my_channel_info_hash(self): + """ + The info hash representing my channel. + + :return: (20 byte str) info hash of my channel + """ + if not self._my_channel_info_hash: + self._commit_my_channel() + return self._my_channel_info_hash + + @property + def my_channel_magnet_link(self): + """ + The magnet link representing my channel. + + :return: the (stripped) magnet link of my channel + """ + if not self._my_channel_info_hash: + self._commit_my_channel() + return infohash_to_magnet(self._my_channel_info_hash) + + @property + def my_channel_torrent(self): + """ + The torrent file representing my channel. + + :return: the filename of the torrent for my channel + """ + if not self._my_channel_torrent: + self._commit_my_channel() + return self._my_channel_torrent + + def add_magnetlink(self, magnetlink): + """ + Add a magnet link to my channel. + + :param magnetlink: the (20 byte str) magnet link to add + :returns: None + """ + if not self.my_channel_name in self.channels: + self._commit_my_channel() + self.channels[self.my_channel_name].add_magnetlink(magnetlink) + self._dirty_cache() + + def remove_magnetlink(self, magnetlink): + """ + Remove a magnet link from my channel. + + :param magnetlink: the (20 byte str) magnet link to add + :returns: None + """ + if not self.my_channel_name in self.channels: + self._commit_my_channel() + self.channels[self.my_channel_name].remove_magnetlink(magnetlink) + self._dirty_cache() + + def get_channels(self): + """ + Get all known channels. + + :return: the names of the channels we know about (including our own) + """ + return self.channels.keys() + + def get_magnetlinks(self, channel): + """ + Get all the magnet links from a specific channel name. + + :param channel: the channel name + :return: the magnet links belonging to that channel + """ + channel_instance = self.channels.get(channel, None) + return channel_instance.get_magnetlinks() if channel_instance else [] + + def create_channel_message(self): + """ + Create a channel message for my channel. + + :return: the channel message + """ + global_time = self.claim_global_time() + payload = ChannelPayload(self.my_channel_info_hash).to_pack_list() + auth = BinMemberAuthenticationPayload(self.my_peer.public_key.key_to_bin()).to_pack_list() + dist = GlobalTimeDistributionPayload(global_time).to_pack_list() + + return self._ez_pack(self._prefix, 1, [auth, dist, payload]) + + def download_finished(self, download): + """ + Callback for when a Channel download finished. + Load in the Channel data. + + :param download: the LibtorrentDownloadImpl instance + :returns: None + """ + real_path = download.dlconfig.get_dest_dir() + rel_path = os.path.relpath(self.working_directory, real_path) + self.load_channel(rel_path) + + def on_channel(self, source_address, data): + """ + Callback for when a ChannelPayload message is received. + """ + auth, _, payload = self._ez_unpack_auth(ChannelPayload, data) + channel = Peer(auth.public_key_bin).mid.encode('hex') + # If we don't know about this channel, respond with our own + if channel not in self.channels: + if self.my_channel_info_hash: + packet = self.create_channel_message() + self.endpoint.send(source_address, packet) + # And start downloading it, if we are hooked up to a Tribler session + if self.tribler_session: + download_config = DefaultDownloadStartupConfig.getInstance() + dest_dir = os.path.abspath(os.path.join(self.working_directory, channel)) + download_config.set_dest_dir(dest_dir) + add_deferred = self.tribler_session.start_download_from_uri(infohash_to_magnet(payload.info_hash), + download_config) + add_deferred.addCallback(lambda download: + download.deferred_finished).addCallback(self.download_finished) + + def on_introduction_response(self, source_address, data): + """ + Callback for when an introduction response is received. + + We extend the functionality by sharing our channel with the other side. + """ + super(AllChannel2Community, self).on_introduction_response(source_address, data) + + if self.my_channel_info_hash: + packet = self.create_channel_message() + self.endpoint.send(source_address, packet) diff --git a/Tribler/community/allchannel2/payload.py b/Tribler/community/allchannel2/payload.py new file mode 100644 index 00000000000..b0e95c1b2c0 --- /dev/null +++ b/Tribler/community/allchannel2/payload.py @@ -0,0 +1,16 @@ +from Tribler.pyipv8.ipv8.deprecated.payload import Payload + + +class ChannelPayload(Payload): + + format_list = ['20s'] + + def __init__(self, info_hash): + self.info_hash = info_hash + + def to_pack_list(self): + return [('20s', self.info_hash)] + + @classmethod + def from_unpack_list(cls, *args): + return cls(args[0]) diff --git a/Tribler/community/allchannel2/structures.py b/Tribler/community/allchannel2/structures.py new file mode 100644 index 00000000000..84d4222f713 --- /dev/null +++ b/Tribler/community/allchannel2/structures.py @@ -0,0 +1,261 @@ +import os + +from libtorrent import add_files, bdecode, bencode, create_torrent, file_storage, set_piece_hashes + + +PIECE_SIZE = 16*1024*1024 # 16 MB: Holds 762600 magnetlinks without metadata + + +class Chunk(object): + """ + A `PIECE_SIZE` sized file filled with magnet links/metadata. + """ + + def __init__(self): + """ + Initialize the Chunk: it is still in memory. + The value of `serialize()` can be written to disk. + """ + super(Chunk, self).__init__() + self.data = {} + self.current_length = 0 + self.max_length = PIECE_SIZE - 2 # 16MB - len('d') len('e') + + def add(self, key, value): + """ + Add a key, value (magnetlink, metadata) store to the Chunk. + This may fail if the key/value is too big to fit into this Chunk. + + :param key: the key / magnetlink + :param value: the value / metadata + :returns: whether the store was added successfully + """ + key_len = len(key) + value_len = len(value) + combined_len = len(str(key_len)) + len(str(value_len)) + key_len + value_len + 4 + + if self.current_length + combined_len <= self.max_length: + self.data[key] = value + self.current_length += combined_len + return True + return False + + def remove(self, key): + """ + Remove a key, value store by key. + They key does not need to exist in this Chunk. + + :param key: the key to remove by + :returns: None + """ + self.data.pop(key, None) + + def serialize(self): + """ + Create a serialized form of this Chunk. + :return: the bencoding of this Chunk + """ + return bencode(self.data) + + @classmethod + def unserialize(cls, data): + """ + Read in a Chunk from a file. + + :param data: the file contents + :return: a Chunk object derived from the input data + """ + out = cls() + for key, value in bdecode(data).iteritems(): + out.add(key, value) + return out + + +class ChunkedTable(object): + """ + Table managing Chunk objects. + May incomplete if the ChunkedTable was created from an incomplete torrent download. + """ + + def __init__(self): + """ + Initialize the ChunkedTable. + """ + super(ChunkedTable, self).__init__() + self.chunklist = {} + + def add(self, key, value): + """ + Add a key, value (magnetlink, metadata) store to the Chunk. + This may fail if the key/value is too big to fit into this Chunk. + + :param key: the key / magnetlink + :param value: the value / metadata + :returns: whether the store was added successfully + """ + for chunk in self.chunklist.values(): + if chunk.add(key, value): + return + chunk = Chunk() + if not chunk.add(key, value): + return False # key value pair too large for any container + self.chunklist[len(self.chunklist)] = chunk + + def remove(self, key): + """ + Remove a key, value store by key. + They key does not need to exist in this Chunk. + + :param key: the key to remove by + :returns: None + """ + empty_chunks = {} + high_id = 0 + for chunk_id, chunk in self.chunklist.iteritems(): + if chunk_id > high_id: + high_id = chunk_id + chunk.remove(key) + if not chunk.data.keys(): + empty_chunks[chunk_id] = chunk + # Get the sequential highest-order first list of empty chunks + if empty_chunks: + empty_chunk_id_list = sorted(empty_chunks.keys(), reverse=True) + index = 0 + empty_count = len(empty_chunks.keys()) + for i in xrange(high_id, -1, -1): + if index >= empty_count: + break + if empty_chunk_id_list[index] == i: + self.chunklist.pop(i) + else: + break + index += 1 + + def serialize(self): + """ + Create a map of Chunk serializations. Maps (chunkid -> Chunk). + + :returns: the serialized Chunk mapping + """ + out = {} + for i in range(len(self.chunklist)): + out[str(i)] = self.chunklist[i].serialize() + return out + + @classmethod + def unserialize(cls, mapping): + """ + Read in a ChunkedTable from a map of filenames to file contents. + + :param mapping: the serialized Chunkforms per chunk id + :returns: the ChunkedTable corresponding to the input map + """ + chunk_table = ChunkedTable() + for i in mapping.keys(): + chunk_table.chunklist[int(i)] = Chunk.unserialize(mapping[i]) + return chunk_table + + def get_all(self): + """ + Get all key/value stores in each of the Chunks. + + :return: the complete dictionary of data in the Chunks + """ + out = {} + for chunk in self.chunklist.values(): + out.update(chunk.data) + return out + + +class Channel(object): + + def __init__(self, name, directory=".", allow_edit=False): + """ + Create a new Channel. + + :param name: the name of the Channel + :param directory: the directory to store the Channel + :param allow_edit: allow addition/removal of magnetlinks (only for the Channel owner) + """ + super(Channel, self).__init__() + + self.name = name + self.allow_edit = allow_edit + self.channel_directory = os.path.abspath(os.path.join(directory, name)) + if not os.path.isdir(self.channel_directory): + os.makedirs(self.channel_directory) + self.chunked_table = ChunkedTable() + + def add_magnetlink(self, magnetlink): + """ + Add a magnetlink to this channel. + + TODO Future work: add metadata (always "" for now) + + :param magnetlink: the magnetlink to add + :returns: None + """ + self.chunked_table.add(magnetlink, "") + + def remove_magnetlink(self, magnetlink): + """ + Remove a magnetlink from this channel. + + :param magnetlink: the magnetlink to remove + :returns: None + """ + self.chunked_table.remove(magnetlink) + to_remove = set(os.listdir(self.channel_directory)) - set(self.chunked_table.chunklist.keys()) + for filename in to_remove: + real_file = os.path.abspath(os.path.join(self.channel_directory, filename)) + os.remove(real_file) + + def get_magnetlinks(self): + """ + Get all known magnetlinks in this Channel. + + :return: the list of magnetlinks + """ + return self.chunked_table.get_all().keys() + + def commit(self): + """ + Commit the added and/or removed magnetlinks to the file structure. + + :returns: None + """ + for filename, content in self.chunked_table.serialize().iteritems(): + with open(os.path.join(self.channel_directory, filename), 'w') as f: + f.write(content) + + def make_torrent(self): + """ + Create a torrent from the last committed file stucture. + + :return: the resulting torrent file name, the info hash + """ + fs = file_storage() + add_files(fs, self.channel_directory) + flags = 19 + t = create_torrent(fs, piece_size=PIECE_SIZE, flags=flags) + t.set_priv(False) + set_piece_hashes(t, os.path.dirname(self.channel_directory)) + torrent_name = os.path.join(self.channel_directory, self.name + ".torrent") + generated = t.generate() + with open(torrent_name, 'w') as f: + f.write(bencode(generated)) + return torrent_name, generated['info']['root hash'] + + def load(self): + """ + Load the channel from the last committed file structure. + + :returns: None + """ + files = os.listdir(self.channel_directory) + data = {} + for filename in files: + if filename.isdigit(): + with open(os.path.join(self.channel_directory, filename), 'r') as f: + data[filename] = f.read() + self.chunked_table = ChunkedTable.unserialize(data) From 5e74ee96e58767a4fe035189fc497028348b5212 Mon Sep 17 00:00:00 2001 From: qstokkink Date: Thu, 10 May 2018 09:54:16 +0200 Subject: [PATCH 2/3] Added AllChannel2 to LaunchManyCore --- Tribler/Core/APIImplementation/LaunchManyCore.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/Tribler/Core/APIImplementation/LaunchManyCore.py b/Tribler/Core/APIImplementation/LaunchManyCore.py index 60d591faadc..f100772fc99 100644 --- a/Tribler/Core/APIImplementation/LaunchManyCore.py +++ b/Tribler/Core/APIImplementation/LaunchManyCore.py @@ -242,6 +242,20 @@ def load_ipv8_overlays(self): if not self.session.config.get_dispersy_enabled(): self.ipv8.strategies.append((RandomWalk(discovery_community), 20)) + # AllChannel2 Community + if self.session.config.get_channel_search_enabled(): + triblerchain_peer = Peer(self.session.trustchain_keypair) + from Tribler.community.allchannel2.community import AllChannel2Community + + channel_directory = os.path.join(self.session.config.get_state_dir(), u"channels") + self.allchannel2_community = AllChannel2Community(triblerchain_peer, self.ipv8.endpoint, + self.ipv8.network, + tribler_session=self.session, + working_directory=channel_directory) + self.allchannel2_community.load_channels() + self.ipv8.overlays.append(self.allchannel2_community) + self.ipv8.strategies.append((RandomWalk(self.allchannel2_community), 20)) + # TriblerChain Community if self.session.config.get_trustchain_enabled(): triblerchain_peer = Peer(self.session.trustchain_keypair) From e54ce2c907823ca9871503a0b72c43b6e0e80e59 Mon Sep 17 00:00:00 2001 From: qstokkink Date: Thu, 10 May 2018 10:17:50 +0200 Subject: [PATCH 3/3] Don't ask trackers for their channels --- Tribler/community/allchannel2/community.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Tribler/community/allchannel2/community.py b/Tribler/community/allchannel2/community.py index b328e9c0698..5cb5f9f1187 100644 --- a/Tribler/community/allchannel2/community.py +++ b/Tribler/community/allchannel2/community.py @@ -212,7 +212,7 @@ def on_channel(self, source_address, data): channel = Peer(auth.public_key_bin).mid.encode('hex') # If we don't know about this channel, respond with our own if channel not in self.channels: - if self.my_channel_info_hash: + if self.my_channel_info_hash and source_address not in self.network.blacklist: packet = self.create_channel_message() self.endpoint.send(source_address, packet) # And start downloading it, if we are hooked up to a Tribler session @@ -233,6 +233,6 @@ def on_introduction_response(self, source_address, data): """ super(AllChannel2Community, self).on_introduction_response(source_address, data) - if self.my_channel_info_hash: + if self.my_channel_info_hash and source_address not in self.network.blacklist: packet = self.create_channel_message() self.endpoint.send(source_address, packet)