From de8d9cf72225ff6badd498a1b26fa88f88c7a6ba Mon Sep 17 00:00:00 2001 From: Mark Qvist Date: Wed, 18 Apr 2018 23:31:17 +0200 Subject: [PATCH] Implemented resources --- Notes/Header format | 2 +- RNS/Destination.py | 2 +- RNS/Identity.py | 15 +- RNS/Link.py | 113 +++- RNS/Packet.py | 348 ++++++------ RNS/Resource.py | 437 ++++++++++++++++ RNS/Reticulum.py | 7 +- RNS/Transport.py | 15 +- RNS/__init__.py | 1 + RNS/vendor/umsgpack.py | 1134 ++++++++++++++++++++++++++++++++++++++++ 10 files changed, 1891 insertions(+), 183 deletions(-) create mode 100644 RNS/Resource.py create mode 100644 RNS/vendor/umsgpack.py diff --git a/Notes/Header format b/Notes/Header format index f32409c..0cd4462 100644 --- a/Notes/Header format +++ b/Notes/Header format @@ -2,7 +2,7 @@ header types ----------------- type 1 00 Two byte header, one 10 byte address field type 2 01 Two byte header, two 10 byte address fields -type 3 10 Two byte header, one 10 byte address field, used for link request proofs +type 3 10 Reserved type 4 11 Reserved for extended header format diff --git a/RNS/Destination.py b/RNS/Destination.py index 8d0f063..4e6e711 100755 --- a/RNS/Destination.py +++ b/RNS/Destination.py @@ -201,7 +201,7 @@ class Destination: # Application specific data can be added to the announce. def announce(self,app_data=None): destination_hash = self.hash - random_hash = self.identity.getRandomHash() + random_hash = RNS.Identity.getRandomHash() signed_data = self.hash+self.identity.getPublicKey()+random_hash if app_data != None: diff --git a/RNS/Identity.py b/RNS/Identity.py index e802b2b..3064319 100644 --- a/RNS/Identity.py +++ b/RNS/Identity.py @@ -4,7 +4,7 @@ import os import RNS import time import atexit -import cPickle +import vendor.umsgpack as umsgpack from cryptography.hazmat.primitives import hashes from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization @@ -61,7 +61,7 @@ class Identity: def saveKnownDestinations(): RNS.log("Saving known destinations to storage...", RNS.LOG_VERBOSE) file = open(RNS.Reticulum.storagepath+"/known_destinations","w") - cPickle.dump(Identity.known_destinations, file) + umsgpack.dump(Identity.known_destinations, file) file.close() RNS.log("Done saving known destinations to storage", RNS.LOG_VERBOSE) @@ -69,7 +69,7 @@ class Identity: def loadKnownDestinations(): if os.path.isfile(RNS.Reticulum.storagepath+"/known_destinations"): file = open(RNS.Reticulum.storagepath+"/known_destinations","r") - Identity.known_destinations = cPickle.load(file) + Identity.known_destinations = umsgpack.load(file) file.close() RNS.log("Loaded "+str(len(Identity.known_destinations))+" known destinations from storage", RNS.LOG_VERBOSE) else: @@ -89,6 +89,10 @@ class Identity: return digest.finalize()[:10] + @staticmethod + def getRandomHash(): + return Identity.truncatedHash(os.urandom(10)) + @staticmethod def validateAnnounce(packet): if packet.packet_type == RNS.Packet.ANNOUNCE: @@ -276,8 +280,3 @@ class Identity: proof = RNS.Packet(destination, proof_data, RNS.Packet.PROOF) proof.send() - - - def getRandomHash(self): - return self.truncatedHash(os.urandom(10)) - diff --git a/RNS/Link.py b/RNS/Link.py index 0893aad..11df9f1 100644 --- a/RNS/Link.py +++ b/RNS/Link.py @@ -14,15 +14,21 @@ class LinkCallbacks: self.link_established = None self.packet = None self.resource_started = None - self.resource_completed = None + self.resource_concluded = None class Link: CURVE = ec.SECP256R1() ECPUBSIZE = 91 + BLOCKSIZE = 16 PENDING = 0x00 ACTIVE = 0x01 + ACCEPT_NONE = 0x00 + ACCEPT_APP = 0x01 + ACCEPT_ALL = 0x02 + resource_strategies = [ACCEPT_NONE, ACCEPT_APP, ACCEPT_ALL] + @staticmethod def validateRequest(owner, data, packet): if len(data) == (Link.ECPUBSIZE): @@ -40,6 +46,7 @@ class Link: except Exception as e: RNS.log("Validating link request failed", RNS.LOG_VERBOSE) + traceback.print_exc() return None else: @@ -50,7 +57,11 @@ class Link: def __init__(self, destination=None, owner=None, peer_pub_bytes = None): if destination != None and destination.type != RNS.Destination.SINGLE: raise TypeError("Links can only be established to the \"single\" destination type") + self.rtt = None self.callbacks = LinkCallbacks() + self.resource_strategy = Link.ACCEPT_NONE + self.outgoing_resources = [] + self.incoming_resources = [] self.status = Link.PENDING self.type = RNS.Destination.LINK self.owner = owner @@ -109,7 +120,7 @@ class Link: signature = self.owner.identity.sign(signed_data) proof_data = self.pub_bytes+signature - proof = RNS.Packet(self, proof_data, packet_type=RNS.Packet.PROOF, header_type=RNS.Packet.HEADER_3) + proof = RNS.Packet(self, proof_data, packet_type=RNS.Packet.PROOF, context=RNS.Packet.LRPROOF) proof.send() def validateProof(self, packet): @@ -122,9 +133,9 @@ class Link: self.handshake() self.attached_interface = packet.receiving_interface RNS.Transport.activateLink(self) + RNS.log("Link "+str(self)+" established with "+str(self.destination), RNS.LOG_VERBOSE) if self.callbacks.link_established != None: self.callbacks.link_established(self) - RNS.log("Link "+str(self)+" established with "+str(self.destination), RNS.LOG_VERBOSE) else: RNS.log("Invalid link proof signature received by "+str(self), RNS.LOG_VERBOSE) @@ -139,9 +150,60 @@ class Link: if packet.receiving_interface != self.attached_interface: RNS.log("Link-associated packet received on unexpected interface! Someone might be trying to manipulate your communication!", RNS.LOG_ERROR) else: - plaintext = self.decrypt(packet.data) - if (self.callbacks.packet != None): - self.callbacks.packet(plaintext, packet) + if packet.packet_type == RNS.Packet.DATA: + if packet.context == RNS.Packet.NONE: + plaintext = self.decrypt(packet.data) + if (self.callbacks.packet != None): + self.callbacks.packet(plaintext, packet) + + elif packet.context == RNS.Packet.RESOURCE_ADV: + packet.plaintext = self.decrypt(packet.data) + if self.resource_strategy == Link.ACCEPT_NONE: + pass + elif self.resource_strategy == Link.ACCEPT_APP: + if self.callbacks.resource != None: + self.callbacks.resource(packet) + elif self.resource_strategy == Link.ACCEPT_ALL: + RNS.Resource.accept(packet, self.callbacks.resource_concluded) + + elif packet.context == RNS.Packet.RESOURCE_REQ: + plaintext = self.decrypt(packet.data) + if ord(plaintext[:1]) == RNS.Resource.HASHMAP_IS_EXHAUSTED: + resource_hash = plaintext[1+RNS.Resource.MAPHASH_LEN:RNS.Identity.HASHLENGTH/8+1+RNS.Resource.MAPHASH_LEN] + else: + resource_hash = plaintext[1:RNS.Identity.HASHLENGTH/8+1] + for resource in self.outgoing_resources: + if resource.hash == resource_hash: + resource.request(plaintext) + + elif packet.context == RNS.Packet.RESOURCE_HMU: + plaintext = self.decrypt(packet.data) + resource_hash = plaintext[:RNS.Identity.HASHLENGTH/8] + for resource in self.incoming_resources: + if resource_hash == resource.hash: + resource.hashmap_update_packet(plaintext) + + elif packet.context == RNS.Packet.RESOURCE_ICL: + plaintext = self.decrypt(packet.data) + resource_hash = plaintext[:RNS.Identity.HASHLENGTH/8] + for resource in self.incoming_resources: + if resource_hash == resource.hash: + resource.cancel() + + # TODO: find the most efficient way to allow multiple + # transfers at the same time, sending resource hash on + # each packet is a huge overhead + elif packet.context == RNS.Packet.RESOURCE: + for resource in self.incoming_resources: + resource.receive_part(packet) + + elif packet.packet_type == RNS.Packet.PROOF: + if packet.context == RNS.Packet.RESOURCE_PRF: + resource_hash = packet.data[0:RNS.Identity.HASHLENGTH/8] + for resource in self.outgoing_resources: + if resource_hash == resource.hash: + resource.validateProof(packet.data) + def encrypt(self, plaintext): if self.__encryption_disabled: @@ -170,11 +232,43 @@ class Link: def packet_callback(self, callback): self.callbacks.packet = callback + # Called when an incoming resource transfer is started def resource_started_callback(self, callback): self.callbacks.resource_started = callback - def resource_completed_callback(self, callback): - self.callbacks.resource_completed = callback + # Called when a resource transfer is concluded + def resource_concluded_callback(self, callback): + self.callbacks.resource_concluded = callback + + def setResourceStrategy(self, resource_strategy): + if not resource_strategy in Link.resource_strategies: + raise TypeError("Unsupported resource strategy") + else: + self.resource_strategy = resource_strategy + + def register_outgoing_resource(self, resource): + self.outgoing_resources.append(resource) + + def register_incoming_resource(self, resource): + self.incoming_resources.append(resource) + + def cancel_outgoing_resource(self, resource): + if resource in self.outgoing_resources: + self.outgoing_resources.remove(resource) + else: + RNS.log("Attempt to cancel a non-existing incoming resource", RNS.LOG_ERROR) + + def cancel_incoming_resource(self, resource): + if resource in self.incoming_resources: + self.incoming_resources.remove(resource) + else: + RNS.log("Attempt to cancel a non-existing incoming resource", RNS.LOG_ERROR) + + def ready_for_new_resource(self): + if len(self.outgoing_resources) > 0: + return False + else: + return True def disableEncryption(self): if (RNS.Reticulum.should_allow_unencrypted()): @@ -185,5 +279,8 @@ class Link: RNS.log("Shutting down Reticulum now!", RNS.LOG_CRITICAL) RNS.panic() + def encryption_disabled(self): + return self.__encryption_disabled + def __str__(self): return RNS.prettyhexrep(self.link_id) \ No newline at end of file diff --git a/RNS/Packet.py b/RNS/Packet.py index 38e7fae..2e2d8ad 100755 --- a/RNS/Packet.py +++ b/RNS/Packet.py @@ -2,6 +2,198 @@ import struct import time import RNS +class Packet: + # Constants + DATA = 0x00 + ANNOUNCE = 0x01 + LINKREQUEST = 0x02 + PROOF = 0x03 + types = [DATA, ANNOUNCE, LINKREQUEST, PROOF] + + HEADER_1 = 0x00 # Normal header format + HEADER_2 = 0x01 # Header format used for link packets in transport + HEADER_3 = 0x02 # Reserved + HEADER_4 = 0x03 # Reserved + header_types = [HEADER_1, HEADER_2, HEADER_3, HEADER_4] + + # Context types + NONE = 0x00 + RESOURCE = 0x01 + RESOURCE_ADV = 0x02 + RESOURCE_REQ = 0x03 + RESOURCE_HMU = 0x04 + RESOURCE_PRF = 0x05 + RESOURCE_ICL = 0x06 + RESOURCE_RCL = 0x07 + REQUEST = 0x08 + RESPONSE = 0x09 + COMMAND = 0x0A + COMMAND_STAT = 0x0B + LRPROOF = 0xFF + + HEADER_MAXSIZE = 23 + + # Defaults + TIMEOUT = 60 + + def __init__(self, destination, data, packet_type = DATA, context = NONE, transport_type = RNS.Transport.BROADCAST, header_type = HEADER_1, transport_id = None): + if destination != None: + if transport_type == None: + transport_type = RNS.Transport.BROADCAST + + self.header_type = header_type + self.packet_type = packet_type + self.transport_type = transport_type + self.context = context + + self.hops = 0; + self.destination = destination + self.transport_id = transport_id + self.data = data + self.flags = self.getPackedFlags() + self.MTU = RNS.Reticulum.MTU + + self.raw = None + self.packed = False + self.sent = False + self.receipt = None + self.fromPacked = False + else: + self.raw = data + self.packed = True + self.fromPacked = True + + self.sent_at = None + self.packet_hash = None + + def getPackedFlags(self): + if self.context == Packet.LRPROOF: + packed_flags = (self.header_type << 6) | (self.transport_type << 4) | RNS.Destination.LINK | self.packet_type + else: + packed_flags = (self.header_type << 6) | (self.transport_type << 4) | (self.destination.type << 2) | self.packet_type + return packed_flags + + def pack(self): + self.header = "" + self.header += struct.pack("!B", self.flags) + self.header += struct.pack("!B", self.hops) + + + if self.context == Packet.LRPROOF: + self.header += self.destination.link_id + self.ciphertext = self.data + else: + if self.header_type == Packet.HEADER_1: + self.header += self.destination.hash + + if self.packet_type == Packet.ANNOUNCE: + # Announce packets are not encrypted + self.ciphertext = self.data + elif self.packet_type == Packet.PROOF and self.context == Packet.RESOURCE_PRF: + # Resource proofs are not encrypted + self.ciphertext = self.data + elif self.context == Packet.RESOURCE: + # A resource takes care of symmetric + # encryption by itself + self.ciphertext = self.data + else: + # In all other cases, we encrypt the packet + # with the destination's public key + self.ciphertext = self.destination.encrypt(self.data) + + if self.header_type == Packet.HEADER_2: + if t_destination != None: + self.header += self.t_destination + else: + raise IOError("Packet with header type 2 must have a transport ID") + + + + + self.header += chr(self.context) + + self.raw = self.header + self.ciphertext + + if len(self.raw) > self.MTU: + raise IOError("Packet size of "+str(len(self.raw))+" exceeds MTU of "+str(self.MTU)+" bytes") + + self.packed = True + + def unpack(self): + self.flags = ord(self.raw[0]) + self.hops = ord(self.raw[1]) + + self.header_type = (self.flags & 0b11000000) >> 6 + self.transport_type = (self.flags & 0b00110000) >> 4 + self.destination_type = (self.flags & 0b00001100) >> 2 + self.packet_type = (self.flags & 0b00000011) + + if self.header_type == Packet.HEADER_2: + self.transport_id = self.raw[2:12] + self.destination_hash = self.raw[12:22] + self.context = ord(self.raw[22:23]) + self.data = self.raw[23:] + else: + self.transport_id = None + self.destination_hash = self.raw[2:12] + self.context = ord(self.raw[12:13]) + self.data = self.raw[13:] + + self.packed = False + + def send(self): + if not self.sent: + if not self.packed: + self.pack() + + if RNS.Transport.outbound(self): + return self.receipt + else: + # TODO: Don't raise error here, handle gracefully + raise IOError("Packet could not be sent! Do you have any outbound interfaces configured?") + else: + raise IOError("Packet was already sent") + + def resend(self): + if self.sent: + Transport.outbound(self.raw) + else: + raise IOError("Packet was not sent yet") + + def prove(self, destination=None): + if self.fromPacked and self.destination: + if self.destination.identity and self.destination.identity.prv: + self.destination.identity.prove(self, destination) + + # Generates a special destination that allows Reticulum + # to direct the proof back to the proved packet's sender + def generateProofDestination(self): + return ProofDestination(self) + + def validateProofPacket(self, proof_packet): + return self.receipt.validateProofPacket(proof_packet) + + def validateProof(self, proof): + return self.receipt.validateProof(proof) + + def updateHash(self): + self.packet_hash = self.getHash() + + def getHash(self): + return RNS.Identity.fullHash(self.getHashablePart()) + + def getHashablePart(self): + return self.raw[0:1]+self.raw[2:] + +class ProofDestination: + def __init__(self, packet): + self.hash = packet.getHash()[:10]; + self.type = RNS.Destination.SINGLE + + def encrypt(self, plaintext): + return plaintext + + class PacketReceipt: # Receipt status constants FAILED = 0x00 @@ -90,158 +282,4 @@ class PacketReceipt: class PacketReceiptCallbacks: def __init__(self): self.delivery = None - self.timeout = None - -class Packet: - # Constants - DATA = 0x00; - ANNOUNCE = 0x01; - LINKREQUEST = 0x02; - PROOF = 0x03; - types = [DATA, ANNOUNCE, LINKREQUEST, PROOF] - - HEADER_1 = 0x00; # Normal header format - HEADER_2 = 0x01; # Header format used for link packets in transport - HEADER_3 = 0x02; # Normal header format, but used to indicate a link request proof - HEADER_4 = 0x03; # Reserved - header_types = [HEADER_1, HEADER_2, HEADER_3, HEADER_4] - - # Defaults - TIMEOUT = 3600.0 - - def __init__(self, destination, data, packet_type = DATA, transport_type = RNS.Transport.BROADCAST, header_type = HEADER_1, transport_id = None): - if destination != None: - if transport_type == None: - transport_type = RNS.Transport.BROADCAST - - self.header_type = header_type - self.packet_type = packet_type - self.transport_type = transport_type - - self.hops = 0; - self.destination = destination - self.transport_id = transport_id - self.data = data - self.flags = self.getPackedFlags() - self.MTU = RNS.Reticulum.MTU - - self.raw = None - self.packed = False - self.sent = False - self.receipt = None - self.fromPacked = False - else: - self.raw = data - self.packed = True - self.fromPacked = True - - self.sent_at = None - self.packet_hash = None - - def getPackedFlags(self): - if self.header_type == Packet.HEADER_3: - packed_flags = (self.header_type << 6) | (self.transport_type << 4) | RNS.Destination.LINK | self.packet_type - else: - packed_flags = (self.header_type << 6) | (self.transport_type << 4) | (self.destination.type << 2) | self.packet_type - return packed_flags - - def pack(self): - self.header = "" - self.header += struct.pack("!B", self.flags) - self.header += struct.pack("!B", self.hops) - if self.header_type == Packet.HEADER_2: - if t_destination != None: - self.header += self.t_destination - else: - raise IOError("Packet with header type 2 must have a transport ID") - - if self.header_type == Packet.HEADER_1: - self.header += self.destination.hash - if self.packet_type != Packet.ANNOUNCE: - self.ciphertext = self.destination.encrypt(self.data) - else: - self.ciphertext = self.data - - if self.header_type == Packet.HEADER_3: - self.header += self.destination.link_id - self.ciphertext = self.data - - self.raw = self.header + self.ciphertext - - if len(self.raw) > self.MTU: - raise IOError("Packet size of "+str(len(self.raw))+" exceeds MTU of "+str(self.MTU)+" bytes") - - self.packed = True - - def unpack(self): - self.flags = ord(self.raw[0]) - self.hops = ord(self.raw[1]) - - self.header_type = (self.flags & 0b11000000) >> 6 - self.transport_type = (self.flags & 0b00110000) >> 4 - self.destination_type = (self.flags & 0b00001100) >> 2 - self.packet_type = (self.flags & 0b00000011) - - if self.header_type == Packet.HEADER_2: - self.transport_id = self.raw[2:12] - self.destination_hash = self.raw[12:22] - self.data = self.raw[22:] - else: - self.transport_id = None - self.destination_hash = self.raw[2:12] - self.data = self.raw[12:] - - self.packed = False - - def send(self): - if not self.sent: - if not self.packed: - self.pack() - - if RNS.Transport.outbound(self): - return self.receipt - else: - # TODO: Don't raise error here, handle gracefully - raise IOError("Packet could not be sent! Do you have any outbound interfaces configured?") - else: - raise IOError("Packet was already sent") - - def resend(self): - if self.sent: - Transport.outbound(self.raw) - else: - raise IOError("Packet was not sent yet") - - def prove(self, destination=None): - if self.fromPacked and self.destination: - if self.destination.identity and self.destination.identity.prv: - self.destination.identity.prove(self, destination) - - # Generates a special destination that allows Reticulum - # to direct the proof back to the proved packet's sender - def generateProofDestination(self): - return ProofDestination(self) - - def validateProofPacket(self, proof_packet): - return self.receipt.validateProofPacket(proof_packet) - - def validateProof(self, proof): - return self.receipt.validateProof(proof) - - def updateHash(self): - self.packet_hash = self.getHash() - - def getHash(self): - return RNS.Identity.fullHash(self.getHashablePart()) - - def getHashablePart(self): - return self.raw[0:1]+self.raw[2:] - -class ProofDestination: - def __init__(self, packet): - self.hash = packet.getHash()[:10]; - self.type = RNS.Destination.SINGLE - - def encrypt(self, plaintext): - return plaintext - + self.timeout = None \ No newline at end of file diff --git a/RNS/Resource.py b/RNS/Resource.py new file mode 100644 index 0000000..be08bba --- /dev/null +++ b/RNS/Resource.py @@ -0,0 +1,437 @@ +import RNS +import bz2 +import math +import time +import threading +import vendor.umsgpack as umsgpack + + +class Resource: + WINDOW_MIN = 1 + WINDOW_MAX = 10 + WINDOW = 5 + MAPHASH_LEN = 4 + SDU = RNS.Reticulum.MTU - RNS.Packet.HEADER_MAXSIZE + RANDOM_HASH_SIZE = 4 + + DEFAULT_TIMEOUT = RNS.Packet.TIMEOUT + MAX_RETRIES = 3 + ROUNDTRIP_FACTOR = 1.5 + + HASHMAP_IS_NOT_EXHAUSTED = 0x00 + HASHMAP_IS_EXHAUSTED = 0xFF + + # Status constants + NONE = 0x00 + QUEUED = 0x01 + ADVERTISED = 0x02 + TRANSFERRING = 0x03 + COMPLETE = 0x04 + FAILED = 0x05 + CORRUPT = 0x06 + + @staticmethod + def accept(advertisement_packet, callback=None, progress_callback = None): + try: + adv = ResourceAdvertisement.unpack(advertisement_packet.plaintext) + + resource = Resource(None, advertisement_packet.link) + resource.status = Resource.TRANSFERRING + + resource.flags = adv.f + resource.size = adv.t + resource.uncompressed_size = adv.d + resource.hash = adv.h + resource.random_hash = adv.r + resource.hashmap_raw = adv.m + resource.encrypted = True if resource.flags & 0x01 else False + resource.compressed = True if resource.flags >> 1 & 0x01 else False + resource.initiator = False + resource.callback = callback + resource.__progress_callback = progress_callback + resource.total_parts = int(math.ceil(resource.size/float(Resource.SDU))) + resource.received_count = 0 + resource.outstanding_parts = 0 + resource.parts = [None] * resource.total_parts + resource.window = Resource.WINDOW + resource.last_activity = time.time() + + resource.hashmap = [None] * resource.total_parts + resource.hashmap_height = 0 + resource.waiting_for_hmu = False + + resource.link.register_incoming_resource(resource) + + RNS.log("Accepting resource advertisement for "+RNS.prettyhexrep(resource.hash), RNS.LOG_DEBUG) + resource.link.callbacks.resource_started(resource) + + resource.hashmap_update(0, resource.hashmap_raw) + + return resource + except Exception as e: + RNS.log("Could not decode resource advertisement, dropping resource", RNS.LOG_VERBOSE) + traceback.print_exc() + return None + + def __init__(self, data, link, advertise=True, auto_compress=True, callback=None, progress_callback=None): + self.status = Resource.NONE + self.link = link + self.rtt = None + + if data != None: + hashmap_ok = False + while not hashmap_ok: + self.initiator = True + self.callback = callback + self.progress_callback = progress_callback + self.random_hash = RNS.Identity.getRandomHash()[:Resource.RANDOM_HASH_SIZE] + self.uncompressed_data = data + self.compressed_data = bz2.compress(self.uncompressed_data) + self.uncompressed_size = len(self.uncompressed_data) + self.compressed_size = len(self.compressed_data) + + self.hash = RNS.Identity.fullHash(data+self.random_hash) + self.expected_proof = RNS.Identity.fullHash(data+self.hash) + + if (self.compressed_size < self.uncompressed_size and auto_compress): + self.data = self.compressed_data + self.compressed = True + self.uncompressed_data = None + else: + self.data = self.uncompressed_data + self.compressed = False + self.compressed_data = None + + if not self.link.encryption_disabled(): + self.data = self.link.encrypt(self.data) + self.encrypted = True + else: + self.encrypted = False + + self.size = len(self.data) + + self.hashmap = "" + self.parts = [] + for i in range(0,int(math.ceil(self.size/float(Resource.SDU)))): + data = self.data[i*Resource.SDU:(i+1)*Resource.SDU] + part = RNS.Packet(link, data, context=RNS.Packet.RESOURCE) + part.pack() + part.map_hash = self.getMapHash(data) + self.hashmap += part.map_hash + self.parts.append(part) + + hashmap_ok = self.checkHashMap() + if not hashmap_ok: + RNS.log("Found hash collision in resource map, remapping...", RNS.LOG_VERBOSE) + + if advertise: + self.advertise() + else: + pass + + + def checkHashMap(self): + checked_hashes = [] + for part in self.parts: + if part.map_hash in checked_hashes: + return False + checked_hashes.append(part.map_hash) + + return True + + def hashmap_update_packet(self, plaintext): + if not self.status == Resource.FAILED: + update = umsgpack.unpackb(plaintext[RNS.Identity.HASHLENGTH/8:]) + self.hashmap_update(update[0], update[1]) + + + def hashmap_update(self, segment, hashmap): + if not self.status == Resource.FAILED: + seg_len = ResourceAdvertisement.HASHMAP_MAX_LEN + hashes = len(hashmap)/Resource.MAPHASH_LEN + for i in range(0,hashes): + self.hashmap[i+segment*seg_len] = hashmap[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN] + self.hashmap_height += 1 + + self.waiting_for_hmu = False + self.request_next() + + def getMapHash(self, data): + return RNS.Identity.fullHash(data+self.random_hash)[:Resource.MAPHASH_LEN] + + def advertise(self): + thread = threading.Thread(target=self.__advertise_job) + thread.setDaemon(True) + thread.start() + + def __advertise_job(self): + data = ResourceAdvertisement(self).pack() + packet = RNS.Packet(self.link, data, context=RNS.Packet.RESOURCE_ADV) + while not self.link.ready_for_new_resource(): + self.status = Resource.QUEUED + sleep(0.25) + + packet.send() + self.last_activity = time.time() + self.adv_sent = self.last_activity + self.rtt = None + self.status = Resource.ADVERTISED + self.link.register_outgoing_resource(self) + + def assemble(self): + if not self.status == Resource.FAILED: + try: + RNS.log("Assembling parts...") + stream = "" + for part in self.parts: + stream += part + + if self.encrypted: + data = self.link.decrypt(stream) + else: + data = stream + + if self.compressed: + self.data = bz2.decompress(data) + else: + self.data = data + + calculated_hash = RNS.Identity.fullHash(self.data+self.random_hash) + + if calculated_hash == self.hash: + self.status = Resource.COMPLETE + self.prove() + else: + self.status = Resource.CORRUPT + + except Exception as e: + RNS.log("Error while assembling received resource.", RNS.LOG_ERROR) + RNS.log("The contained exception was: "+str(e), RNS.LOG_ERROR) + self.status = Resource.CORRUPT + + if self.callback != None: + self.callback(self) + + + def prove(self): + if not self.status == Resource.FAILED: + proof = RNS.Identity.fullHash(self.data+self.hash) + proof_data = self.hash+proof + proof_packet = RNS.Packet(self.link, proof_data, packet_type=RNS.Packet.PROOF, context=RNS.Packet.RESOURCE_PRF) + proof_packet.send() + + def validateProof(self, proof_data): + if not self.status == Resource.FAILED: + if len(proof_data) == RNS.Identity.HASHLENGTH/8*2: + if proof_data[RNS.Identity.HASHLENGTH/8:] == self.expected_proof: + self.status = Resource.COMPLETE + if self.callback != None: + self.callback(self) + else: + pass + else: + pass + + + def receive_part(self, packet): + self.last_activity = time.time() + if self.req_resp == None: + self.req_resp = self.last_activity + rtt = self.req_resp-self.req_sent + if self.rtt == None: + self.rtt = rtt + elif self.rtt < rtt: + self.rtt = rtt + + if not self.status == Resource.FAILED: + self.status = Resource.TRANSFERRING + part_data = packet.data + part_hash = self.getMapHash(part_data) + + i = 0 + for map_hash in self.hashmap: + if map_hash == part_hash: + if self.parts[i] == None: + self.parts[i] = part_data + self.received_count += 1 + self.outstanding_parts -= 1 + i += 1 + + if self.__progress_callback != None: + self.__progress_callback(self) + + if self.outstanding_parts == 0 and self.received_count == self.total_parts: + self.assemble() + elif self.outstanding_parts == 0: + if self.window < Resource.WINDOW_MAX: + self.window += 1 + self.request_next() + + # Called on incoming resource to send a request for more data + def request_next(self): + if not self.status == Resource.FAILED: + if not self.waiting_for_hmu: + self.outstanding_parts = 0 + hashmap_exhausted = Resource.HASHMAP_IS_NOT_EXHAUSTED + requested_hashes = "" + + i = 0; pn = 0 + for part in self.parts: + + if part == None: + part_hash = self.hashmap[pn] + if part_hash != None: + requested_hashes += part_hash + self.outstanding_parts += 1 + i += 1 + else: + hashmap_exhausted = Resource.HASHMAP_IS_EXHAUSTED + + pn += 1 + if i >= self.window or hashmap_exhausted == Resource.HASHMAP_IS_EXHAUSTED: + break + + hmu_part = chr(hashmap_exhausted) + if hashmap_exhausted == Resource.HASHMAP_IS_EXHAUSTED: + last_map_hash = self.hashmap[self.hashmap_height-1] + hmu_part += last_map_hash + self.waiting_for_hmu = True + + request_data = hmu_part + self.hash + requested_hashes + request_packet = RNS.Packet(self.link, request_data, context = RNS.Packet.RESOURCE_REQ) + + request_packet.send() + self.last_activity = time.time() + self.req_sent = self.last_activity + self.req_resp = None + + # Called on outgoing resource to make it send more data + def request(self, request_data): + if not self.status == Resource.FAILED: + rtt = time.time() - self.adv_sent + if self.rtt == None: + self.rtt = rtt + + self.status == Resource.TRANSFERRING + wants_more_hashmap = True if ord(request_data[0]) == Resource.HASHMAP_IS_EXHAUSTED else False + pad = 1+Resource.MAPHASH_LEN if wants_more_hashmap else 1 + + requested_hashes = request_data[pad+RNS.Identity.HASHLENGTH/8:] + + for i in range(0,len(requested_hashes)/Resource.MAPHASH_LEN): + requested_hash = requested_hashes[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN] + + i = 0 + for part in self.parts: + if part.map_hash == requested_hash: + if not part.sent: + part.send() + else: + part.resend() + self.last_activity = time.time() + break + i += 1 + + if wants_more_hashmap: + last_map_hash = request_data[1:Resource.MAPHASH_LEN+1] + + part_index = 0 + for part in self.parts: + part_index += 1 + if part.map_hash == last_map_hash: + break + + if part_index % ResourceAdvertisement.HASHMAP_MAX_LEN != 0: + RNS.log("Resource sequencing error, cancelling transfer!", RNS.LOG_ERROR) + self.cancel() + else: + segment = part_index / ResourceAdvertisement.HASHMAP_MAX_LEN + + + hashmap_start = segment*ResourceAdvertisement.HASHMAP_MAX_LEN + hashmap_end = min((segment+1)*ResourceAdvertisement.HASHMAP_MAX_LEN, len(self.parts)) + + hashmap = "" + for i in range(hashmap_start,hashmap_end): + hashmap += self.hashmap[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN] + + hmu = self.hash+umsgpack.packb([segment, hashmap]) + hmu_packet = RNS.Packet(self.link, hmu, context = RNS.Packet.RESOURCE_HMU) + hmu_packet.send() + self.last_activity = time.time() + + def cancel(self): + self.status = Resource.FAILED + if self.initiator: + cancel_packet = RNS.Packet(self.link, self.hash, context=RNS.Packet.RESOURCE_ICL) + cancel_packet.send() + self.link.cancel_outgoing_resource(self) + else: + self.link.cancel_incoming_resource(self) + + if self.callback != None: + self.callback(self) + + def progress_callback(self, callback): + self.__progress_callback = callback + + def progress(self): + progress = self.received_count / float(self.total_parts) + return progress + + def __str__(self): + return RNS.prettyHexRep(self.hash) + + +class ResourceAdvertisement: + # TODO: Can this be allocated dynamically? Keep in mind hashmap_update inference + HASHMAP_MAX_LEN = 84 + + def __init__(self, resource=None): + if resource != None: + self.t = resource.size # Transfer size + self.d = resource.uncompressed_size # Data size + self.n = len(resource.parts) # Number of parts + self.h = resource.hash # Resource hash + self.r = resource.random_hash # Resource random hash + self.m = resource.hashmap # Resource hashmap + self.c = resource.compressed # Compression flag + self.e = resource.encrypted # Encryption flag + self.f = 0x00 | self.c << 1 | self.e # Flags + + def pack(self, segment=0): + hashmap_start = segment*ResourceAdvertisement.HASHMAP_MAX_LEN + hashmap_end = min((segment+1)*ResourceAdvertisement.HASHMAP_MAX_LEN, self.n) + + hashmap = "" + for i in range(hashmap_start,hashmap_end): + hashmap += self.m[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN] + + dictionary = { + u"t": self.t, + u"d": self.d, + u"n": self.n, + u"h": self.h, + u"r": self.r, + u"f": self.f, + u"m": hashmap + } + + return umsgpack.packb(dictionary) + + @staticmethod + def unpack(data): + dictionary = umsgpack.unpackb(data) + + adv = ResourceAdvertisement() + adv.t = dictionary["t"] + adv.d = dictionary["d"] + adv.n = dictionary["n"] + adv.h = dictionary["h"] + adv.r = dictionary["r"] + adv.m = dictionary["m"] + adv.f = dictionary["f"] + adv.e = True if (adv.f & 0x01) == 0x01 else False + adv.c = True if ((adv.f >> 1) & 0x01) == 0x01 else False + + return adv diff --git a/RNS/Reticulum.py b/RNS/Reticulum.py index e078053..177d9ba 100755 --- a/RNS/Reticulum.py +++ b/RNS/Reticulum.py @@ -29,7 +29,7 @@ class Reticulum: Reticulum.cachepath = Reticulum.configdir+"/storage/cache" Reticulum.__allow_unencrypted = False - Reticulum.__use_implicit_proof = False + Reticulum.__use_implicit_proof = True if not os.path.isdir(Reticulum.storagepath): os.makedirs(Reticulum.storagepath) @@ -41,7 +41,7 @@ class Reticulum: self.config = ConfigObj(self.configpath) RNS.log("Configuration loaded from "+self.configpath) else: - RNS.log("Could not load config file, creating default configuration...") + RNS.log("Could not load config file, creating default configuration file...") self.createDefaultConfig() RNS.log("Default config file created. Make any necessary changes in "+Reticulum.configdir+"/config and start Reticulum again.") RNS.log("Exiting now!") @@ -244,11 +244,8 @@ class Reticulum: except Exception as e: RNS.log("The interface \""+name+"\" could not be created. Check your configuration file for errors!", RNS.LOG_ERROR) RNS.log("The contained exception was: "+str(e), RNS.LOG_ERROR) - #traceback.print_exc() - - def createDefaultConfig(self): self.config = ConfigObj() self.config.filename = Reticulum.configpath diff --git a/RNS/Transport.py b/RNS/Transport.py index ba824ba..cf93038 100755 --- a/RNS/Transport.py +++ b/RNS/Transport.py @@ -115,18 +115,19 @@ class Transport: if packet.packet_type == RNS.Packet.ANNOUNCE: if RNS.Identity.validateAnnounce(packet): Transport.cache(packet) - - if packet.packet_type == RNS.Packet.LINKREQUEST: + + elif packet.packet_type == RNS.Packet.LINKREQUEST: for destination in Transport.destinations: if destination.hash == packet.destination_hash and destination.type == packet.destination_type: packet.destination = destination destination.receive(packet) Transport.cache(packet) - if packet.packet_type == RNS.Packet.DATA: + elif packet.packet_type == RNS.Packet.DATA: if packet.destination_type == RNS.Destination.LINK: for link in Transport.active_links: if link.link_id == packet.destination_hash: + packet.link = link link.receive(packet) Transport.cache(packet) else: @@ -143,13 +144,17 @@ class Transport: if destination.callbacks.proof_requested: destination.callbacks.proof_requested(packet) - if packet.packet_type == RNS.Packet.PROOF: - if packet.header_type == RNS.Packet.HEADER_3: + elif packet.packet_type == RNS.Packet.PROOF: + if packet.context == RNS.Packet.LRPROOF: # This is a link request proof, forward # to a waiting link request for link in Transport.pending_links: if link.link_id == packet.destination_hash: link.validateProof(packet) + elif packet.context == RNS.Packet.RESOURCE_PRF: + for link in Transport.active_links: + if link.link_id == packet.destination_hash: + link.receive(packet) else: # TODO: Make sure everything uses new proof handling if len(packet.data) == RNS.PacketReceipt.EXPL_LENGTH: diff --git a/RNS/__init__.py b/RNS/__init__.py index e632b60..0483e7a 100755 --- a/RNS/__init__.py +++ b/RNS/__init__.py @@ -10,6 +10,7 @@ from .Transport import Transport from .Destination import Destination from .Packet import Packet from .Packet import PacketReceipt +from .Resource import Resource modules = glob.glob(os.path.dirname(__file__)+"/*.py") __all__ = [ os.path.basename(f)[:-3] for f in modules if not f.endswith('__init__.py')] diff --git a/RNS/vendor/umsgpack.py b/RNS/vendor/umsgpack.py new file mode 100644 index 0000000..139ab98 --- /dev/null +++ b/RNS/vendor/umsgpack.py @@ -0,0 +1,1134 @@ +# u-msgpack-python v2.5.0 - v at sergeev.io +# https://github.com/vsergeev/u-msgpack-python +# +# u-msgpack-python is a lightweight MessagePack serializer and deserializer +# module, compatible with both Python 2 and 3, as well CPython and PyPy +# implementations of Python. u-msgpack-python is fully compliant with the +# latest MessagePack specification.com/msgpack/msgpack/blob/master/spec.md). In +# particular, it supports the new binary, UTF-8 string, and application ext +# types. +# +# MIT License +# +# Copyright (c) 2013-2016 vsergeev / Ivan (Vanya) A. Sergeev +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +""" +u-msgpack-python v2.5.0 - v at sergeev.io +https://github.com/vsergeev/u-msgpack-python + +u-msgpack-python is a lightweight MessagePack serializer and deserializer +module, compatible with both Python 2 and 3, as well CPython and PyPy +implementations of Python. u-msgpack-python is fully compliant with the +latest MessagePack specification.com/msgpack/msgpack/blob/master/spec.md). In +particular, it supports the new binary, UTF-8 string, and application ext +types. + +License: MIT +""" +import struct +import collections +import datetime +import sys +import io + +__version__ = "2.5.0" +"Module version string" + +version = (2, 5, 0) +"Module version tuple" + + +############################################################################## +# Ext Class +############################################################################## + +# Extension type for application-defined types and data +class Ext: + """ + The Ext class facilitates creating a serializable extension object to store + an application-defined type and data byte array. + """ + + def __init__(self, type, data): + """ + Construct a new Ext object. + + Args: + type: application-defined type integer + data: application-defined data byte array + + Example: + >>> foo = umsgpack.Ext(0x05, b"\x01\x02\x03") + >>> umsgpack.packb({u"special stuff": foo, u"awesome": True}) + '\x82\xa7awesome\xc3\xadspecial stuff\xc7\x03\x05\x01\x02\x03' + >>> bar = umsgpack.unpackb(_) + >>> print(bar["special stuff"]) + Ext Object (Type: 0x05, Data: 01 02 03) + >>> + """ + # Check type is type int + if not isinstance(type, int): + raise TypeError("ext type is not type integer") + # Check data is type bytes + elif sys.version_info[0] == 3 and not isinstance(data, bytes): + raise TypeError("ext data is not type \'bytes\'") + elif sys.version_info[0] == 2 and not isinstance(data, str): + raise TypeError("ext data is not type \'str\'") + self.type = type + self.data = data + + def __eq__(self, other): + """ + Compare this Ext object with another for equality. + """ + return (isinstance(other, self.__class__) and + self.type == other.type and + self.data == other.data) + + def __ne__(self, other): + """ + Compare this Ext object with another for inequality. + """ + return not self.__eq__(other) + + def __str__(self): + """ + String representation of this Ext object. + """ + s = "Ext Object (Type: 0x%02x, Data: " % self.type + s += " ".join(["0x%02x" % ord(self.data[i:i + 1]) + for i in xrange(min(len(self.data), 8))]) + if len(self.data) > 8: + s += " ..." + s += ")" + return s + + def __hash__(self): + """ + Provide a hash of this Ext object. + """ + return hash((self.type, self.data)) + + +class InvalidString(bytes): + """Subclass of bytes to hold invalid UTF-8 strings.""" + pass + +############################################################################## +# Exceptions +############################################################################## + + +# Base Exception classes +class PackException(Exception): + "Base class for exceptions encountered during packing." + pass + + +class UnpackException(Exception): + "Base class for exceptions encountered during unpacking." + pass + + +# Packing error +class UnsupportedTypeException(PackException): + "Object type not supported for packing." + pass + + +# Unpacking error +class InsufficientDataException(UnpackException): + "Insufficient data to unpack the serialized object." + pass + + +class InvalidStringException(UnpackException): + "Invalid UTF-8 string encountered during unpacking." + pass + + +class UnsupportedTimestampException(UnpackException): + "Unsupported timestamp format encountered during unpacking." + pass + + +class ReservedCodeException(UnpackException): + "Reserved code encountered during unpacking." + pass + + +class UnhashableKeyException(UnpackException): + """ + Unhashable key encountered during map unpacking. + The serialized map cannot be deserialized into a Python dictionary. + """ + pass + + +class DuplicateKeyException(UnpackException): + "Duplicate key encountered during map unpacking." + pass + + +# Backwards compatibility +KeyNotPrimitiveException = UnhashableKeyException +KeyDuplicateException = DuplicateKeyException + +############################################################################# +# Exported Functions and Glob +############################################################################# + +# Exported functions and variables, set up in __init() +pack = None +packb = None +unpack = None +unpackb = None +dump = None +dumps = None +load = None +loads = None + +compatibility = False +""" +Compatibility mode boolean. + +When compatibility mode is enabled, u-msgpack-python will serialize both +unicode strings and bytes into the old "raw" msgpack type, and deserialize the +"raw" msgpack type into bytes. This provides backwards compatibility with the +old MessagePack specification. + +Example: +>>> umsgpack.compatibility = True +>>> +>>> umsgpack.packb([u"some string", b"some bytes"]) +b'\x92\xabsome string\xaasome bytes' +>>> umsgpack.unpackb(_) +[b'some string', b'some bytes'] +>>> +""" + +############################################################################## +# Packing +############################################################################## + +# You may notice struct.pack("B", obj) instead of the simpler chr(obj) in the +# code below. This is to allow for seamless Python 2 and 3 compatibility, as +# chr(obj) has a str return type instead of bytes in Python 3, and +# struct.pack(...) has the right return type in both versions. + + +def _pack_integer(obj, fp, options): + if obj < 0: + if obj >= -32: + fp.write(struct.pack("b", obj)) + elif obj >= -2**(8 - 1): + fp.write(b"\xd0" + struct.pack("b", obj)) + elif obj >= -2**(16 - 1): + fp.write(b"\xd1" + struct.pack(">h", obj)) + elif obj >= -2**(32 - 1): + fp.write(b"\xd2" + struct.pack(">i", obj)) + elif obj >= -2**(64 - 1): + fp.write(b"\xd3" + struct.pack(">q", obj)) + else: + raise UnsupportedTypeException("huge signed int") + else: + if obj <= 127: + fp.write(struct.pack("B", obj)) + elif obj <= 2**8 - 1: + fp.write(b"\xcc" + struct.pack("B", obj)) + elif obj <= 2**16 - 1: + fp.write(b"\xcd" + struct.pack(">H", obj)) + elif obj <= 2**32 - 1: + fp.write(b"\xce" + struct.pack(">I", obj)) + elif obj <= 2**64 - 1: + fp.write(b"\xcf" + struct.pack(">Q", obj)) + else: + raise UnsupportedTypeException("huge unsigned int") + + +def _pack_nil(obj, fp, options): + fp.write(b"\xc0") + + +def _pack_boolean(obj, fp, options): + fp.write(b"\xc3" if obj else b"\xc2") + + +def _pack_float(obj, fp, options): + float_precision = options.get('force_float_precision', _float_precision) + + if float_precision == "double": + fp.write(b"\xcb" + struct.pack(">d", obj)) + elif float_precision == "single": + fp.write(b"\xca" + struct.pack(">f", obj)) + else: + raise ValueError("invalid float precision") + + +def _pack_string(obj, fp, options): + obj = obj.encode('utf-8') + if len(obj) <= 31: + fp.write(struct.pack("B", 0xa0 | len(obj)) + obj) + elif len(obj) <= 2**8 - 1: + fp.write(b"\xd9" + struct.pack("B", len(obj)) + obj) + elif len(obj) <= 2**16 - 1: + fp.write(b"\xda" + struct.pack(">H", len(obj)) + obj) + elif len(obj) <= 2**32 - 1: + fp.write(b"\xdb" + struct.pack(">I", len(obj)) + obj) + else: + raise UnsupportedTypeException("huge string") + + +def _pack_binary(obj, fp, options): + if len(obj) <= 2**8 - 1: + fp.write(b"\xc4" + struct.pack("B", len(obj)) + obj) + elif len(obj) <= 2**16 - 1: + fp.write(b"\xc5" + struct.pack(">H", len(obj)) + obj) + elif len(obj) <= 2**32 - 1: + fp.write(b"\xc6" + struct.pack(">I", len(obj)) + obj) + else: + raise UnsupportedTypeException("huge binary string") + + +def _pack_oldspec_raw(obj, fp, options): + if len(obj) <= 31: + fp.write(struct.pack("B", 0xa0 | len(obj)) + obj) + elif len(obj) <= 2**16 - 1: + fp.write(b"\xda" + struct.pack(">H", len(obj)) + obj) + elif len(obj) <= 2**32 - 1: + fp.write(b"\xdb" + struct.pack(">I", len(obj)) + obj) + else: + raise UnsupportedTypeException("huge raw string") + + +def _pack_ext(obj, fp, options): + if len(obj.data) == 1: + fp.write(b"\xd4" + struct.pack("B", obj.type & 0xff) + obj.data) + elif len(obj.data) == 2: + fp.write(b"\xd5" + struct.pack("B", obj.type & 0xff) + obj.data) + elif len(obj.data) == 4: + fp.write(b"\xd6" + struct.pack("B", obj.type & 0xff) + obj.data) + elif len(obj.data) == 8: + fp.write(b"\xd7" + struct.pack("B", obj.type & 0xff) + obj.data) + elif len(obj.data) == 16: + fp.write(b"\xd8" + struct.pack("B", obj.type & 0xff) + obj.data) + elif len(obj.data) <= 2**8 - 1: + fp.write(b"\xc7" + + struct.pack("BB", len(obj.data), obj.type & 0xff) + obj.data) + elif len(obj.data) <= 2**16 - 1: + fp.write(b"\xc8" + + struct.pack(">HB", len(obj.data), obj.type & 0xff) + obj.data) + elif len(obj.data) <= 2**32 - 1: + fp.write(b"\xc9" + + struct.pack(">IB", len(obj.data), obj.type & 0xff) + obj.data) + else: + raise UnsupportedTypeException("huge ext data") + + +def _pack_ext_timestamp(obj, fp, options): + delta = obj - _epoch + seconds = delta.seconds + delta.days * 86400 + microseconds = delta.microseconds + + if microseconds == 0 and 0 <= seconds <= 2**32 - 1: + # 32-bit timestamp + fp.write(b"\xd6\xff" + + struct.pack(">I", seconds)) + elif 0 <= seconds <= 2**34 - 1: + # 64-bit timestamp + value = ((microseconds * 1000) << 34) | seconds + fp.write(b"\xd7\xff" + + struct.pack(">Q", value)) + elif -2**63 <= abs(seconds) <= 2**63 - 1: + # 96-bit timestamp + fp.write(b"\xc7\x0c\xff" + + struct.pack(">I", microseconds * 1000) + + struct.pack(">q", seconds)) + else: + raise UnsupportedTypeException("huge timestamp") + + +def _pack_array(obj, fp, options): + if len(obj) <= 15: + fp.write(struct.pack("B", 0x90 | len(obj))) + elif len(obj) <= 2**16 - 1: + fp.write(b"\xdc" + struct.pack(">H", len(obj))) + elif len(obj) <= 2**32 - 1: + fp.write(b"\xdd" + struct.pack(">I", len(obj))) + else: + raise UnsupportedTypeException("huge array") + + for e in obj: + pack(e, fp, **options) + + +def _pack_map(obj, fp, options): + if len(obj) <= 15: + fp.write(struct.pack("B", 0x80 | len(obj))) + elif len(obj) <= 2**16 - 1: + fp.write(b"\xde" + struct.pack(">H", len(obj))) + elif len(obj) <= 2**32 - 1: + fp.write(b"\xdf" + struct.pack(">I", len(obj))) + else: + raise UnsupportedTypeException("huge array") + + for k, v in obj.items(): + pack(k, fp, **options) + pack(v, fp, **options) + +######################################## + + +# Pack for Python 2, with 'unicode' type, 'str' type, and 'long' type +def _pack2(obj, fp, **options): + """ + Serialize a Python object into MessagePack bytes. + + Args: + obj: a Python object + fp: a .write()-supporting file-like object + + Kwargs: + ext_handlers (dict): dictionary of Ext handlers, mapping a custom type + to a callable that packs an instance of the type + into an Ext object + force_float_precision (str): "single" to force packing floats as + IEEE-754 single-precision floats, + "double" to force packing floats as + IEEE-754 double-precision floats. + + Returns: + None. + + Raises: + UnsupportedType(PackException): + Object type not supported for packing. + + Example: + >>> f = open('test.bin', 'wb') + >>> umsgpack.pack({u"compact": True, u"schema": 0}, f) + >>> + """ + global compatibility + + ext_handlers = options.get("ext_handlers") + + if obj is None: + _pack_nil(obj, fp, options) + elif ext_handlers and obj.__class__ in ext_handlers: + _pack_ext(ext_handlers[obj.__class__](obj), fp, options) + elif isinstance(obj, bool): + _pack_boolean(obj, fp, options) + elif isinstance(obj, int) or isinstance(obj, long): + _pack_integer(obj, fp, options) + elif isinstance(obj, float): + _pack_float(obj, fp, options) + elif compatibility and isinstance(obj, unicode): + _pack_oldspec_raw(bytes(obj), fp, options) + elif compatibility and isinstance(obj, bytes): + _pack_oldspec_raw(obj, fp, options) + elif isinstance(obj, unicode): + _pack_string(obj, fp, options) + elif isinstance(obj, str): + _pack_binary(obj, fp, options) + elif isinstance(obj, list) or isinstance(obj, tuple): + _pack_array(obj, fp, options) + elif isinstance(obj, dict): + _pack_map(obj, fp, options) + elif isinstance(obj, datetime.datetime): + _pack_ext_timestamp(obj, fp, options) + elif isinstance(obj, Ext): + _pack_ext(obj, fp, options) + elif ext_handlers: + # Linear search for superclass + t = next((t for t in ext_handlers.keys() if isinstance(obj, t)), None) + if t: + _pack_ext(ext_handlers[t](obj), fp, options) + else: + raise UnsupportedTypeException( + "unsupported type: %s" % str(type(obj))) + else: + raise UnsupportedTypeException("unsupported type: %s" % str(type(obj))) + + +# Pack for Python 3, with unicode 'str' type, 'bytes' type, and no 'long' type +def _pack3(obj, fp, **options): + """ + Serialize a Python object into MessagePack bytes. + + Args: + obj: a Python object + fp: a .write()-supporting file-like object + + Kwargs: + ext_handlers (dict): dictionary of Ext handlers, mapping a custom type + to a callable that packs an instance of the type + into an Ext object + force_float_precision (str): "single" to force packing floats as + IEEE-754 single-precision floats, + "double" to force packing floats as + IEEE-754 double-precision floats. + + Returns: + None. + + Raises: + UnsupportedType(PackException): + Object type not supported for packing. + + Example: + >>> f = open('test.bin', 'wb') + >>> umsgpack.pack({u"compact": True, u"schema": 0}, f) + >>> + """ + global compatibility + + ext_handlers = options.get("ext_handlers") + + if obj is None: + _pack_nil(obj, fp, options) + elif ext_handlers and obj.__class__ in ext_handlers: + _pack_ext(ext_handlers[obj.__class__](obj), fp, options) + elif isinstance(obj, bool): + _pack_boolean(obj, fp, options) + elif isinstance(obj, int): + _pack_integer(obj, fp, options) + elif isinstance(obj, float): + _pack_float(obj, fp, options) + elif compatibility and isinstance(obj, str): + _pack_oldspec_raw(obj.encode('utf-8'), fp, options) + elif compatibility and isinstance(obj, bytes): + _pack_oldspec_raw(obj, fp, options) + elif isinstance(obj, str): + _pack_string(obj, fp, options) + elif isinstance(obj, bytes): + _pack_binary(obj, fp, options) + elif isinstance(obj, list) or isinstance(obj, tuple): + _pack_array(obj, fp, options) + elif isinstance(obj, dict): + _pack_map(obj, fp, options) + elif isinstance(obj, datetime.datetime): + _pack_ext_timestamp(obj, fp, options) + elif isinstance(obj, Ext): + _pack_ext(obj, fp, options) + elif ext_handlers: + # Linear search for superclass + t = next((t for t in ext_handlers.keys() if isinstance(obj, t)), None) + if t: + _pack_ext(ext_handlers[t](obj), fp, options) + else: + raise UnsupportedTypeException( + "unsupported type: %s" % str(type(obj))) + else: + raise UnsupportedTypeException( + "unsupported type: %s" % str(type(obj))) + + +def _packb2(obj, **options): + """ + Serialize a Python object into MessagePack bytes. + + Args: + obj: a Python object + + Kwargs: + ext_handlers (dict): dictionary of Ext handlers, mapping a custom type + to a callable that packs an instance of the type + into an Ext object + force_float_precision (str): "single" to force packing floats as + IEEE-754 single-precision floats, + "double" to force packing floats as + IEEE-754 double-precision floats. + + Returns: + A 'str' containing serialized MessagePack bytes. + + Raises: + UnsupportedType(PackException): + Object type not supported for packing. + + Example: + >>> umsgpack.packb({u"compact": True, u"schema": 0}) + '\x82\xa7compact\xc3\xa6schema\x00' + >>> + """ + fp = io.BytesIO() + _pack2(obj, fp, **options) + return fp.getvalue() + + +def _packb3(obj, **options): + """ + Serialize a Python object into MessagePack bytes. + + Args: + obj: a Python object + + Kwargs: + ext_handlers (dict): dictionary of Ext handlers, mapping a custom type + to a callable that packs an instance of the type + into an Ext object + force_float_precision (str): "single" to force packing floats as + IEEE-754 single-precision floats, + "double" to force packing floats as + IEEE-754 double-precision floats. + + Returns: + A 'bytes' containing serialized MessagePack bytes. + + Raises: + UnsupportedType(PackException): + Object type not supported for packing. + + Example: + >>> umsgpack.packb({u"compact": True, u"schema": 0}) + b'\x82\xa7compact\xc3\xa6schema\x00' + >>> + """ + fp = io.BytesIO() + _pack3(obj, fp, **options) + return fp.getvalue() + +############################################################################# +# Unpacking +############################################################################# + + +def _read_except(fp, n): + data = fp.read(n) + if len(data) < n: + raise InsufficientDataException() + return data + + +def _unpack_integer(code, fp, options): + if (ord(code) & 0xe0) == 0xe0: + return struct.unpack("b", code)[0] + elif code == b'\xd0': + return struct.unpack("b", _read_except(fp, 1))[0] + elif code == b'\xd1': + return struct.unpack(">h", _read_except(fp, 2))[0] + elif code == b'\xd2': + return struct.unpack(">i", _read_except(fp, 4))[0] + elif code == b'\xd3': + return struct.unpack(">q", _read_except(fp, 8))[0] + elif (ord(code) & 0x80) == 0x00: + return struct.unpack("B", code)[0] + elif code == b'\xcc': + return struct.unpack("B", _read_except(fp, 1))[0] + elif code == b'\xcd': + return struct.unpack(">H", _read_except(fp, 2))[0] + elif code == b'\xce': + return struct.unpack(">I", _read_except(fp, 4))[0] + elif code == b'\xcf': + return struct.unpack(">Q", _read_except(fp, 8))[0] + raise Exception("logic error, not int: 0x%02x" % ord(code)) + + +def _unpack_reserved(code, fp, options): + if code == b'\xc1': + raise ReservedCodeException( + "encountered reserved code: 0x%02x" % ord(code)) + raise Exception( + "logic error, not reserved code: 0x%02x" % ord(code)) + + +def _unpack_nil(code, fp, options): + if code == b'\xc0': + return None + raise Exception("logic error, not nil: 0x%02x" % ord(code)) + + +def _unpack_boolean(code, fp, options): + if code == b'\xc2': + return False + elif code == b'\xc3': + return True + raise Exception("logic error, not boolean: 0x%02x" % ord(code)) + + +def _unpack_float(code, fp, options): + if code == b'\xca': + return struct.unpack(">f", _read_except(fp, 4))[0] + elif code == b'\xcb': + return struct.unpack(">d", _read_except(fp, 8))[0] + raise Exception("logic error, not float: 0x%02x" % ord(code)) + + +def _unpack_string(code, fp, options): + if (ord(code) & 0xe0) == 0xa0: + length = ord(code) & ~0xe0 + elif code == b'\xd9': + length = struct.unpack("B", _read_except(fp, 1))[0] + elif code == b'\xda': + length = struct.unpack(">H", _read_except(fp, 2))[0] + elif code == b'\xdb': + length = struct.unpack(">I", _read_except(fp, 4))[0] + else: + raise Exception("logic error, not string: 0x%02x" % ord(code)) + + # Always return raw bytes in compatibility mode + global compatibility + if compatibility: + return _read_except(fp, length) + + data = _read_except(fp, length) + try: + return bytes.decode(data, 'utf-8') + except UnicodeDecodeError: + if options.get("allow_invalid_utf8"): + return InvalidString(data) + raise InvalidStringException("unpacked string is invalid utf-8") + + +def _unpack_binary(code, fp, options): + if code == b'\xc4': + length = struct.unpack("B", _read_except(fp, 1))[0] + elif code == b'\xc5': + length = struct.unpack(">H", _read_except(fp, 2))[0] + elif code == b'\xc6': + length = struct.unpack(">I", _read_except(fp, 4))[0] + else: + raise Exception("logic error, not binary: 0x%02x" % ord(code)) + + return _read_except(fp, length) + + +def _unpack_ext(code, fp, options): + if code == b'\xd4': + length = 1 + elif code == b'\xd5': + length = 2 + elif code == b'\xd6': + length = 4 + elif code == b'\xd7': + length = 8 + elif code == b'\xd8': + length = 16 + elif code == b'\xc7': + length = struct.unpack("B", _read_except(fp, 1))[0] + elif code == b'\xc8': + length = struct.unpack(">H", _read_except(fp, 2))[0] + elif code == b'\xc9': + length = struct.unpack(">I", _read_except(fp, 4))[0] + else: + raise Exception("logic error, not ext: 0x%02x" % ord(code)) + + ext_type = struct.unpack("b", _read_except(fp, 1))[0] + ext_data = _read_except(fp, length) + + # Create extension object + ext = Ext(ext_type, ext_data) + + # Unpack with ext handler, if we have one + ext_handlers = options.get("ext_handlers") + if ext_handlers and ext.type in ext_handlers: + return ext_handlers[ext.type](ext) + + # Timestamp extension + if ext.type == -1: + return _unpack_ext_timestamp(ext, options) + + return ext + + +def _unpack_ext_timestamp(ext, options): + if len(ext.data) == 4: + # 32-bit timestamp + seconds = struct.unpack(">I", ext.data)[0] + microseconds = 0 + elif len(ext.data) == 8: + # 64-bit timestamp + value = struct.unpack(">Q", ext.data)[0] + seconds = value & 0x3ffffffff + microseconds = (value >> 34) // 1000 + elif len(ext.data) == 12: + # 96-bit timestamp + seconds = struct.unpack(">q", ext.data[4:12])[0] + microseconds = struct.unpack(">I", ext.data[0:4])[0] // 1000 + else: + raise UnsupportedTimestampException( + "unsupported timestamp with data length %d" % len(ext.data)) + + return _epoch + datetime.timedelta(seconds=seconds, + microseconds=microseconds) + + +def _unpack_array(code, fp, options): + if (ord(code) & 0xf0) == 0x90: + length = (ord(code) & ~0xf0) + elif code == b'\xdc': + length = struct.unpack(">H", _read_except(fp, 2))[0] + elif code == b'\xdd': + length = struct.unpack(">I", _read_except(fp, 4))[0] + else: + raise Exception("logic error, not array: 0x%02x" % ord(code)) + + return [_unpack(fp, options) for i in xrange(length)] + + +def _deep_list_to_tuple(obj): + if isinstance(obj, list): + return tuple([_deep_list_to_tuple(e) for e in obj]) + return obj + + +def _unpack_map(code, fp, options): + if (ord(code) & 0xf0) == 0x80: + length = (ord(code) & ~0xf0) + elif code == b'\xde': + length = struct.unpack(">H", _read_except(fp, 2))[0] + elif code == b'\xdf': + length = struct.unpack(">I", _read_except(fp, 4))[0] + else: + raise Exception("logic error, not map: 0x%02x" % ord(code)) + + d = {} if not options.get('use_ordered_dict') \ + else collections.OrderedDict() + for _ in xrange(length): + # Unpack key + k = _unpack(fp, options) + + if isinstance(k, list): + # Attempt to convert list into a hashable tuple + k = _deep_list_to_tuple(k) + elif not isinstance(k, collections.Hashable): + raise UnhashableKeyException( + "encountered unhashable key: %s, %s" % (str(k), str(type(k)))) + elif k in d: + raise DuplicateKeyException( + "encountered duplicate key: %s, %s" % (str(k), str(type(k)))) + + # Unpack value + v = _unpack(fp, options) + + try: + d[k] = v + except TypeError: + raise UnhashableKeyException( + "encountered unhashable key: %s" % str(k)) + return d + + +def _unpack(fp, options): + code = _read_except(fp, 1) + return _unpack_dispatch_table[code](code, fp, options) + +######################################## + + +def _unpack2(fp, **options): + """ + Deserialize MessagePack bytes into a Python object. + + Args: + fp: a .read()-supporting file-like object + + Kwargs: + ext_handlers (dict): dictionary of Ext handlers, mapping integer Ext + type to a callable that unpacks an instance of + Ext into an object + use_ordered_dict (bool): unpack maps into OrderedDict, instead of + unordered dict (default False) + allow_invalid_utf8 (bool): unpack invalid strings into instances of + InvalidString, for access to the bytes + (default False) + + Returns: + A Python object. + + Raises: + InsufficientDataException(UnpackException): + Insufficient data to unpack the serialized object. + InvalidStringException(UnpackException): + Invalid UTF-8 string encountered during unpacking. + UnsupportedTimestampException(UnpackException): + Unsupported timestamp format encountered during unpacking. + ReservedCodeException(UnpackException): + Reserved code encountered during unpacking. + UnhashableKeyException(UnpackException): + Unhashable key encountered during map unpacking. + The serialized map cannot be deserialized into a Python dictionary. + DuplicateKeyException(UnpackException): + Duplicate key encountered during map unpacking. + + Example: + >>> f = open('test.bin', 'rb') + >>> umsgpack.unpackb(f) + {u'compact': True, u'schema': 0} + >>> + """ + return _unpack(fp, options) + + +def _unpack3(fp, **options): + """ + Deserialize MessagePack bytes into a Python object. + + Args: + fp: a .read()-supporting file-like object + + Kwargs: + ext_handlers (dict): dictionary of Ext handlers, mapping integer Ext + type to a callable that unpacks an instance of + Ext into an object + use_ordered_dict (bool): unpack maps into OrderedDict, instead of + unordered dict (default False) + allow_invalid_utf8 (bool): unpack invalid strings into instances of + InvalidString, for access to the bytes + (default False) + + Returns: + A Python object. + + Raises: + InsufficientDataException(UnpackException): + Insufficient data to unpack the serialized object. + InvalidStringException(UnpackException): + Invalid UTF-8 string encountered during unpacking. + UnsupportedTimestampException(UnpackException): + Unsupported timestamp format encountered during unpacking. + ReservedCodeException(UnpackException): + Reserved code encountered during unpacking. + UnhashableKeyException(UnpackException): + Unhashable key encountered during map unpacking. + The serialized map cannot be deserialized into a Python dictionary. + DuplicateKeyException(UnpackException): + Duplicate key encountered during map unpacking. + + Example: + >>> f = open('test.bin', 'rb') + >>> umsgpack.unpackb(f) + {'compact': True, 'schema': 0} + >>> + """ + return _unpack(fp, options) + + +# For Python 2, expects a str object +def _unpackb2(s, **options): + """ + Deserialize MessagePack bytes into a Python object. + + Args: + s: a 'str' or 'bytearray' containing serialized MessagePack bytes + + Kwargs: + ext_handlers (dict): dictionary of Ext handlers, mapping integer Ext + type to a callable that unpacks an instance of + Ext into an object + use_ordered_dict (bool): unpack maps into OrderedDict, instead of + unordered dict (default False) + allow_invalid_utf8 (bool): unpack invalid strings into instances of + InvalidString, for access to the bytes + (default False) + + Returns: + A Python object. + + Raises: + TypeError: + Packed data type is neither 'str' nor 'bytearray'. + InsufficientDataException(UnpackException): + Insufficient data to unpack the serialized object. + InvalidStringException(UnpackException): + Invalid UTF-8 string encountered during unpacking. + UnsupportedTimestampException(UnpackException): + Unsupported timestamp format encountered during unpacking. + ReservedCodeException(UnpackException): + Reserved code encountered during unpacking. + UnhashableKeyException(UnpackException): + Unhashable key encountered during map unpacking. + The serialized map cannot be deserialized into a Python dictionary. + DuplicateKeyException(UnpackException): + Duplicate key encountered during map unpacking. + + Example: + >>> umsgpack.unpackb(b'\x82\xa7compact\xc3\xa6schema\x00') + {u'compact': True, u'schema': 0} + >>> + """ + if not isinstance(s, (str, bytearray)): + raise TypeError("packed data must be type 'str' or 'bytearray'") + return _unpack(io.BytesIO(s), options) + + +# For Python 3, expects a bytes object +def _unpackb3(s, **options): + """ + Deserialize MessagePack bytes into a Python object. + + Args: + s: a 'bytes' or 'bytearray' containing serialized MessagePack bytes + + Kwargs: + ext_handlers (dict): dictionary of Ext handlers, mapping integer Ext + type to a callable that unpacks an instance of + Ext into an object + use_ordered_dict (bool): unpack maps into OrderedDict, instead of + unordered dict (default False) + allow_invalid_utf8 (bool): unpack invalid strings into instances of + InvalidString, for access to the bytes + (default False) + + Returns: + A Python object. + + Raises: + TypeError: + Packed data type is neither 'bytes' nor 'bytearray'. + InsufficientDataException(UnpackException): + Insufficient data to unpack the serialized object. + InvalidStringException(UnpackException): + Invalid UTF-8 string encountered during unpacking. + UnsupportedTimestampException(UnpackException): + Unsupported timestamp format encountered during unpacking. + ReservedCodeException(UnpackException): + Reserved code encountered during unpacking. + UnhashableKeyException(UnpackException): + Unhashable key encountered during map unpacking. + The serialized map cannot be deserialized into a Python dictionary. + DuplicateKeyException(UnpackException): + Duplicate key encountered during map unpacking. + + Example: + >>> umsgpack.unpackb(b'\x82\xa7compact\xc3\xa6schema\x00') + {'compact': True, 'schema': 0} + >>> + """ + if not isinstance(s, (bytes, bytearray)): + raise TypeError("packed data must be type 'bytes' or 'bytearray'") + return _unpack(io.BytesIO(s), options) + +############################################################################# +# Module Initialization +############################################################################# + + +def __init(): + global pack + global packb + global unpack + global unpackb + global dump + global dumps + global load + global loads + global compatibility + global _epoch + global _utc_tzinfo + global _float_precision + global _unpack_dispatch_table + global xrange + + # Compatibility mode for handling strings/bytes with the old specification + compatibility = False + + if sys.version_info[0] == 3: + _utc_tzinfo = datetime.timezone.utc + else: + _utc_tzinfo = None + + # Calculate epoch datetime + _epoch = datetime.datetime(1970, 1, 1, tzinfo=_utc_tzinfo) + + # Auto-detect system float precision + if sys.float_info.mant_dig == 53: + _float_precision = "double" + else: + _float_precision = "single" + + # Map packb and unpackb to the appropriate version + if sys.version_info[0] == 3: + pack = _pack3 + packb = _packb3 + dump = _pack3 + dumps = _packb3 + unpack = _unpack3 + unpackb = _unpackb3 + load = _unpack3 + loads = _unpackb3 + xrange = range + else: + pack = _pack2 + packb = _packb2 + dump = _pack2 + dumps = _packb2 + unpack = _unpack2 + unpackb = _unpackb2 + load = _unpack2 + loads = _unpackb2 + + # Build a dispatch table for fast lookup of unpacking function + + _unpack_dispatch_table = {} + # Fix uint + for code in range(0, 0x7f + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_integer + # Fix map + for code in range(0x80, 0x8f + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_map + # Fix array + for code in range(0x90, 0x9f + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_array + # Fix str + for code in range(0xa0, 0xbf + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_string + # Nil + _unpack_dispatch_table[b'\xc0'] = _unpack_nil + # Reserved + _unpack_dispatch_table[b'\xc1'] = _unpack_reserved + # Boolean + _unpack_dispatch_table[b'\xc2'] = _unpack_boolean + _unpack_dispatch_table[b'\xc3'] = _unpack_boolean + # Bin + for code in range(0xc4, 0xc6 + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_binary + # Ext + for code in range(0xc7, 0xc9 + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_ext + # Float + _unpack_dispatch_table[b'\xca'] = _unpack_float + _unpack_dispatch_table[b'\xcb'] = _unpack_float + # Uint + for code in range(0xcc, 0xcf + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_integer + # Int + for code in range(0xd0, 0xd3 + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_integer + # Fixext + for code in range(0xd4, 0xd8 + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_ext + # String + for code in range(0xd9, 0xdb + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_string + # Array + _unpack_dispatch_table[b'\xdc'] = _unpack_array + _unpack_dispatch_table[b'\xdd'] = _unpack_array + # Map + _unpack_dispatch_table[b'\xde'] = _unpack_map + _unpack_dispatch_table[b'\xdf'] = _unpack_map + # Negative fixint + for code in range(0xe0, 0xff + 1): + _unpack_dispatch_table[struct.pack("B", code)] = _unpack_integer + + +__init()