Preliminary Python3 support

This commit is contained in:
Mark Qvist 2020-04-22 12:07:13 +02:00
parent 9f8da39614
commit a339ae3d28
17 changed files with 356 additions and 259 deletions

View File

@ -61,7 +61,7 @@ def announceLoop(destination):
# destination on the network, which will let clients
# know how to create messages directed towards it.
while True:
entered = raw_input()
entered = input()
destination.announce()
RNS.log("Sent announce from "+RNS.prettyhexrep(destination.hash))
@ -86,7 +86,7 @@ def client(destination_hexhash, configpath, timeout=None):
try:
if len(destination_hexhash) != 20:
raise ValueError("Destination length is invalid, must be 20 hexadecimal characters (10 bytes)")
destination_hash = destination_hexhash.decode("hex")
destination_hash = bytes.fromhex(destination_hexhash)
except:
RNS.log("Invalid destination entered. Check your input!\n")
exit()
@ -106,7 +106,7 @@ def client(destination_hexhash, configpath, timeout=None):
# echo request to the destination specified on the
# command line.
while True:
raw_input()
input()
# Let's first check if RNS knows a path to the destination.
# If it does, we'll load the server identity and create a packet

View File

@ -62,7 +62,7 @@ def announceLoop(destination):
# destination on the network, which will let clients
# know how to create messages directed towards it.
while True:
entered = raw_input()
entered = input()
destination.announce()
RNS.log("Sent announce from "+RNS.prettyhexrep(destination.hash))
@ -116,18 +116,19 @@ def client_disconnected(link):
def client_request(message, packet):
global serve_path
if message in list_files():
filename = message.decode("utf-8")
if filename in list_files():
try:
# If we have the requested file, we'll
# read it and pack it as a resource
RNS.log("Client requested \""+message+"\"")
file = open(os.path.join(serve_path, message), "r")
RNS.log("Client requested \""+filename+"\"")
file = open(os.path.join(serve_path, filename), "rb")
file_resource = RNS.Resource(file.read(), packet.link, callback=resource_sending_concluded)
file_resource.filename = message
file_resource.filename = filename
except:
# If somethign went wrong, we close
# the link
RNS.log("Error while reading file \""+message+"\"", RNS.LOG_ERROR)
RNS.log("Error while reading file \""+filename+"\"", RNS.LOG_ERROR)
packet.link.teardown()
else:
# If we don't have it, we close the link
@ -172,7 +173,7 @@ def client(destination_hexhash, configpath):
try:
if len(destination_hexhash) != 20:
raise ValueError("Destination length is invalid, must be 20 hexadecimal characters (10 bytes)")
destination_hash = destination_hexhash.decode("hex")
destination_hash = bytes.fromhex(destination_hexhash)
except:
RNS.log("Invalid destination entered. Check your input!\n")
exit()
@ -230,11 +231,11 @@ def download(filename):
# We just create a packet containing the
# requested filename, and send it down the
# link.
request_packet = RNS.Packet(server_link, filename)
request_packet = RNS.Packet(server_link, filename.encode("utf-8"))
request_packet.send()
print("")
print("Requested \""+filename+"\" from server, waiting for download to begin...")
print(("Requested \""+filename+"\" from server, waiting for download to begin..."))
menu_mode = "download_started"
# This function runs a simple menu for the user
@ -258,7 +259,7 @@ def menu():
# Wait
time.sleep(0.25)
user_input = raw_input()
user_input = input()
if user_input == "q" or user_input == "quit" or user_input == "exit":
should_quit = True
print("")
@ -288,7 +289,7 @@ def print_menu():
print_filelist()
print("")
print("Select a file to download by entering name or number, or q to quit")
print("> "),
print(("> "), end=' ')
elif menu_mode == "download_started":
download_began = time.time()
while menu_mode == "download_started":
@ -305,12 +306,12 @@ def print_menu():
while menu_mode == "downloading":
global current_download
percent = round(current_download.progress() * 100.0, 1)
print("\rProgress: "+str(percent)+" % "),
print(("\rProgress: "+str(percent)+" % "), end=' ')
sys.stdout.flush()
time.sleep(0.1)
if menu_mode == "save_error":
print("\rProgress: 100.0 %"),
print(("\rProgress: 100.0 %"), end=' ')
sys.stdout.flush()
print("")
print("Could not write downloaded file to disk")
@ -319,16 +320,16 @@ def print_menu():
if menu_mode == "download_concluded":
if current_download.status == RNS.Resource.COMPLETE:
print("\rProgress: 100.0 %"),
print(("\rProgress: 100.0 %"), end=' ')
sys.stdout.flush()
print("")
print("The download completed! Pres enter to return to the menu.")
raw_input()
print("The download completed! Press enter to return to the menu.")
input()
else:
print("")
print("The download failed! Pres enter to return to the menu.")
raw_input()
print("The download failed! Press enter to return to the menu.")
input()
current_download = None
menu_mode = "main"
@ -431,7 +432,7 @@ def download_concluded(resource):
saved_filename = current_filename+"."+str(counter)
try:
file = open(saved_filename, "w")
file = open(saved_filename, "wb")
file.write(resource.data)
file.close()
menu_mode = "download_concluded"

81
Examples/Minimal.py Normal file
View File

@ -0,0 +1,81 @@
##########################################################
# This RNS example demonstrates a minimal setup, that #
# will start up the Reticulum Network Stack, generate a #
# new destination, and let the user send an announce. #
##########################################################
import argparse
import RNS
# Let's define an app name. We'll use this for all
# destinations we create. Since this basic example
# is part of a range of example utilities, we'll put
# them all within the app namespace "example_utilities"
APP_NAME = "example_utilitites"
# This initialisation is executed when the program is started
def program_setup(configpath):
# We must first initialise Reticulum
reticulum = RNS.Reticulum(configpath)
# Randomly create a new identity for our example
identity = RNS.Identity()
# Using the identity we just created, we create a destination.
# Destinations are endpoints in Reticulum, that can be addressed
# and communicated with. Destinations can also announce their
# existence, which will let the network know they are reachable
# and autoomatically create paths to them, from anywhere else
# in the network.
destination = RNS.Destination(identity, RNS.Destination.IN, RNS.Destination.SINGLE, APP_NAME, "minimalsample")
# We configure the destination to automatically prove all
# packets adressed to it. By doing this, RNS will automatically
# generate a proof for each incoming packet and transmit it
# back to the sender of that packet. This will let anyone that
# tries to communicate with the destination know whether their
# communication was received correctly.
destination.set_proof_strategy(RNS.Destination.PROVE_ALL)
# Everything's ready!
# Let's hand over control to the announce loop
announceLoop(destination)
def announceLoop(destination):
# Let the user know that everything is ready
RNS.log("Minimal example "+RNS.prettyhexrep(destination.hash)+" running, hit enter to manually send an announce (Ctrl-C to quit)")
# We enter a loop that runs until the users exits.
# If the user hits enter, we will announce our server
# destination on the network, which will let clients
# know how to create messages directed towards it.
while True:
entered = input()
destination.announce()
RNS.log("Sent announce from "+RNS.prettyhexrep(destination.hash))
##########################################################
#### Program Startup #####################################
##########################################################
# This part of the program gets run at startup,
# and parses input from the user, and then starts
# the desired program mode.
if __name__ == "__main__":
try:
parser = argparse.ArgumentParser(description="Bare minimum example to start Reticulum and create a destination")
parser.add_argument("--config", action="store", default=None, help="path to alternative Reticulum config directory", type=str)
args = parser.parse_args()
if args.config:
configarg = args.config
else:
configarg = None
program_setup(configarg)
except KeyboardInterrupt:
print("")
exit()

View File

@ -54,7 +54,7 @@ class Destination:
# Create a digest for the destination
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
digest.update(name)
digest.update(name.encode("UTF-8"))
return digest.finalize()[:10]
@ -83,7 +83,7 @@ class Destination:
self.name = Destination.getDestinationName(app_name, *aspects)
self.hash = Destination.getDestinationHash(app_name, *aspects)
self.hexhash = self.hash.encode("hex_codec")
self.hexhash = self.hash.hex()
self.callback = None
self.proofcallback = None

View File

@ -4,7 +4,7 @@ import os
import RNS
import time
import atexit
import vendor.umsgpack as umsgpack
from .vendor import umsgpack as umsgpack
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
@ -50,7 +50,7 @@ class Identity:
@staticmethod
def saveKnownDestinations():
RNS.log("Saving known destinations to storage...", RNS.LOG_VERBOSE)
file = open(RNS.Reticulum.storagepath+"/known_destinations","w")
file = open(RNS.Reticulum.storagepath+"/known_destinations","wb")
umsgpack.dump(Identity.known_destinations, file)
file.close()
RNS.log("Done saving known destinations to storage", RNS.LOG_VERBOSE)
@ -59,7 +59,7 @@ class Identity:
def loadKnownDestinations():
if os.path.isfile(RNS.Reticulum.storagepath+"/known_destinations"):
try:
file = open(RNS.Reticulum.storagepath+"/known_destinations","r")
file = open(RNS.Reticulum.storagepath+"/known_destinations","rb")
Identity.known_destinations = umsgpack.load(file)
file.close()
RNS.log("Loaded "+str(len(Identity.known_destinations))+" known destinations from storage", RNS.LOG_VERBOSE)
@ -80,7 +80,7 @@ class Identity:
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
digest.update(data)
return digest.finalize()[:(Identity.TRUNCATED_HASHLENGTH/8)]
return digest.finalize()[:(Identity.TRUNCATED_HASHLENGTH//8)]
@staticmethod
def getRandomHash():
@ -91,12 +91,12 @@ class Identity:
if packet.packet_type == RNS.Packet.ANNOUNCE:
RNS.log("Validating announce from "+RNS.prettyhexrep(packet.destination_hash), RNS.LOG_DEBUG)
destination_hash = packet.destination_hash
public_key = packet.data[10:Identity.DERKEYSIZE/8+10]
random_hash = packet.data[Identity.DERKEYSIZE/8+10:Identity.DERKEYSIZE/8+20]
signature = packet.data[Identity.DERKEYSIZE/8+20:Identity.DERKEYSIZE/8+20+Identity.KEYSIZE/8]
app_data = ""
if len(packet.data) > Identity.DERKEYSIZE/8+20+Identity.KEYSIZE/8:
app_data = packet.data[Identity.DERKEYSIZE/8+20+Identity.KEYSIZE/8:]
public_key = packet.data[10:Identity.DERKEYSIZE//8+10]
random_hash = packet.data[Identity.DERKEYSIZE//8+10:Identity.DERKEYSIZE//8+20]
signature = packet.data[Identity.DERKEYSIZE//8+20:Identity.DERKEYSIZE//8+20+Identity.KEYSIZE//8]
app_data = b""
if len(packet.data) > Identity.DERKEYSIZE//8+20+Identity.KEYSIZE//8:
app_data = packet.data[Identity.DERKEYSIZE//8+20+Identity.KEYSIZE//8:]
signed_data = destination_hash+public_key+random_hash+app_data
@ -198,11 +198,11 @@ class Identity:
def updateHashes(self):
self.hash = Identity.truncatedHash(self.pub_bytes)
self.hexhash = self.hash.encode("hex_codec")
self.hexhash = self.hash.hex()
def save(self, path):
try:
with open(path, "w") as key_file:
with open(path, "wb") as key_file:
key_file.write(self.prv_bytes)
return True
return False
@ -212,7 +212,7 @@ class Identity:
def load(self, path):
try:
with open(path, "r") as key_file:
with open(path, "rb") as key_file:
prv_bytes = key_file.read()
return self.loadPrivateKey(prv_bytes)
return False
@ -222,10 +222,10 @@ class Identity:
def encrypt(self, plaintext):
if self.pub != None:
chunksize = (Identity.KEYSIZE-Identity.PADDINGSIZE)/8
chunksize = (Identity.KEYSIZE-Identity.PADDINGSIZE)//8
chunks = int(math.ceil(len(plaintext)/(float(chunksize))))
ciphertext = "";
ciphertext = b"";
for chunk in range(chunks):
start = chunk*chunksize
end = (chunk+1)*chunksize
@ -249,10 +249,10 @@ class Identity:
if self.prv != None:
plaintext = None
try:
chunksize = (Identity.KEYSIZE)/8
chunksize = (Identity.KEYSIZE)//8
chunks = int(math.ceil(len(ciphertext)/(float(chunksize))))
plaintext = "";
plaintext = b"";
for chunk in range(chunks):
start = chunk*chunksize
end = (chunk+1)*chunksize

View File

@ -1,5 +1,5 @@
from __future__ import print_function
from Interface import Interface
from .Interface import Interface
from time import sleep
import sys
import serial
@ -288,7 +288,7 @@ class AX25KISSInterface(Interface):
in_frame = False
command = KISS.CMD_UNKNOWN
escape = False
sleep(0.08)
sleep(0.08)
except Exception as e:
self.online = False

View File

@ -1,5 +1,5 @@
from __future__ import print_function
from Interface import Interface
from .Interface import Interface
from time import sleep
import sys
import serial
@ -236,11 +236,11 @@ class KISSInterface(Interface):
else:
time_since_last = int(time.time()*1000) - last_read_ms
if len(data_buffer) > 0 and time_since_last > self.timeout:
data_buffer = ""
in_frame = False
command = KISS.CMD_UNKNOWN
escape = False
sleep(0.08)
data_buffer = ""
in_frame = False
command = KISS.CMD_UNKNOWN
escape = False
sleep(0.08)
except Exception as e:
self.online = False

View File

@ -1,5 +1,5 @@
from __future__ import print_function
from Interface import Interface
from .Interface import Interface
from time import sleep
import sys
import serial
@ -416,11 +416,11 @@ class RNodeInterface(Interface):
time_since_last = int(time.time()*1000) - last_read_ms
if len(data_buffer) > 0 and time_since_last > self.timeout:
RNS.log(str(self)+" serial read timeout", RNS.LOG_DEBUG)
data_buffer = ""
in_frame = False
command = KISS.CMD_UNKNOWN
escape = False
sleep(0.08)
data_buffer = ""
in_frame = False
command = KISS.CMD_UNKNOWN
escape = False
sleep(0.08)
except Exception as e:
self.online = False

View File

@ -1,5 +1,5 @@
from __future__ import print_function
from Interface import Interface
from .Interface import Interface
from time import sleep
import sys
import serial

View File

@ -1,5 +1,5 @@
from Interface import Interface
import SocketServer
from .Interface import Interface
import socketserver
import threading
import socket
import time
@ -27,7 +27,7 @@ class UdpInterface(Interface):
self.owner = owner
address = (self.bind_ip, self.bind_port)
self.server = SocketServer.UDPServer(address, handlerFactory(self.processIncoming))
self.server = socketserver.UDPServer(address, handlerFactory(self.processIncoming))
thread = threading.Thread(target=self.server.serve_forever)
thread.setDaemon(True)
@ -52,10 +52,10 @@ class UdpInterface(Interface):
def __str__(self):
return "UdpInterface["+self.name+"/"+self.bind_ip+":"+str(self.bind_port)+"]"
class UdpInterfaceHandler(SocketServer.BaseRequestHandler):
class UdpInterfaceHandler(socketserver.BaseRequestHandler):
def __init__(self, callback, *args, **keys):
self.callback = callback
SocketServer.BaseRequestHandler.__init__(self, *args, **keys)
socketserver.BaseRequestHandler.__init__(self, *args, **keys)
def handle(self):
data = self.request[0]

View File

@ -5,7 +5,7 @@ from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
from cryptography.fernet import Fernet
from time import sleep
import vendor.umsgpack as umsgpack
from .vendor import umsgpack as umsgpack
import threading
import base64
import time
@ -182,7 +182,7 @@ class Link:
if self.initiator:
peer_pub_bytes = packet.data[:Link.ECPUBSIZE]
signed_data = self.link_id+peer_pub_bytes
signature = packet.data[Link.ECPUBSIZE:RNS.Identity.KEYSIZE/8+Link.ECPUBSIZE]
signature = packet.data[Link.ECPUBSIZE:RNS.Identity.KEYSIZE//8+Link.ECPUBSIZE]
if self.destination.identity.validate(signature, signed_data):
self.loadPeer(peer_pub_bytes)
@ -378,23 +378,23 @@ class Link:
elif packet.context == RNS.Packet.RESOURCE_REQ:
plaintext = self.decrypt(packet.data)
if ord(plaintext[:1]) == RNS.Resource.HASHMAP_IS_EXHAUSTED:
resource_hash = plaintext[1+RNS.Resource.MAPHASH_LEN:RNS.Identity.HASHLENGTH/8+1+RNS.Resource.MAPHASH_LEN]
resource_hash = plaintext[1+RNS.Resource.MAPHASH_LEN:RNS.Identity.HASHLENGTH//8+1+RNS.Resource.MAPHASH_LEN]
else:
resource_hash = plaintext[1:RNS.Identity.HASHLENGTH/8+1]
resource_hash = plaintext[1:RNS.Identity.HASHLENGTH//8+1]
for resource in self.outgoing_resources:
if resource.hash == resource_hash:
resource.request(plaintext)
elif packet.context == RNS.Packet.RESOURCE_HMU:
plaintext = self.decrypt(packet.data)
resource_hash = plaintext[:RNS.Identity.HASHLENGTH/8]
resource_hash = plaintext[:RNS.Identity.HASHLENGTH//8]
for resource in self.incoming_resources:
if resource_hash == resource.hash:
resource.hashmap_update_packet(plaintext)
elif packet.context == RNS.Packet.RESOURCE_ICL:
plaintext = self.decrypt(packet.data)
resource_hash = plaintext[:RNS.Identity.HASHLENGTH/8]
resource_hash = plaintext[:RNS.Identity.HASHLENGTH//8]
for resource in self.incoming_resources:
if resource_hash == resource.hash:
resource.cancel()
@ -415,7 +415,7 @@ class Link:
elif packet.packet_type == RNS.Packet.PROOF:
if packet.context == RNS.Packet.RESOURCE_PRF:
resource_hash = packet.data[0:RNS.Identity.HASHLENGTH/8]
resource_hash = packet.data[0:RNS.Identity.HASHLENGTH//8]
for resource in self.outgoing_resources:
if resource_hash == resource.hash:
resource.validateProof(packet.data)

View File

@ -89,11 +89,10 @@ class Packet:
def pack(self):
self.destination_hash = self.destination.hash
self.header = ""
self.header = b""
self.header += struct.pack("!B", self.flags)
self.header += struct.pack("!B", self.hops)
if self.context == Packet.LRPROOF:
self.header += self.destination.link_id
self.ciphertext = self.data
@ -135,8 +134,7 @@ class Packet:
raise IOError("Packet with header type 2 must have a transport ID")
self.header += chr(self.context)
self.header += bytes([self.context])
self.raw = self.header + self.ciphertext
if len(self.raw) > self.MTU:
@ -146,8 +144,8 @@ class Packet:
self.updateHash()
def unpack(self):
self.flags = ord(self.raw[0])
self.hops = ord(self.raw[1])
self.flags = self.raw[0]
self.hops = self.raw[1]
self.header_type = (self.flags & 0b11000000) >> 6
self.transport_type = (self.flags & 0b00110000) >> 4
@ -229,7 +227,7 @@ class Packet:
return RNS.Identity.truncatedHash(self.getHashablePart())
def getHashablePart(self):
hashable_part = struct.pack("!B", struct.unpack("!B", self.raw[0])[0] & 0b00001111)
hashable_part = bytes([self.raw[0] & 0b00001111])
if self.header_type == Packet.HEADER_2:
hashable_part += self.raw[12:]
else:
@ -253,8 +251,8 @@ class PacketReceipt:
DELIVERED = 0x02
EXPL_LENGTH = RNS.Identity.HASHLENGTH/8+RNS.Identity.SIGLENGTH/8
IMPL_LENGTH = RNS.Identity.SIGLENGTH/8
EXPL_LENGTH = RNS.Identity.HASHLENGTH//8+RNS.Identity.SIGLENGTH//8
IMPL_LENGTH = RNS.Identity.SIGLENGTH//8
# Creates a new packet receipt from a sent packet
def __init__(self, packet):
@ -280,8 +278,8 @@ class PacketReceipt:
# TODO: Hardcoded as explicit proofs for now
if True or len(proof) == PacketReceipt.EXPL_LENGTH:
# This is an explicit proof
proof_hash = proof[:RNS.Identity.HASHLENGTH/8]
signature = proof[RNS.Identity.HASHLENGTH/8:RNS.Identity.HASHLENGTH/8+RNS.Identity.SIGLENGTH/8]
proof_hash = proof[:RNS.Identity.HASHLENGTH//8]
signature = proof[RNS.Identity.HASHLENGTH//8:RNS.Identity.HASHLENGTH//8+RNS.Identity.SIGLENGTH//8]
if proof_hash == self.hash:
proof_valid = link.validate(signature, self.hash)
if proof_valid:
@ -297,7 +295,8 @@ class PacketReceipt:
return False
elif len(proof) == PacketReceipt.IMPL_LENGTH:
pass
# signature = proof[:RNS.Identity.SIGLENGTH/8]
# TODO: Why is this disabled?
# signature = proof[:RNS.Identity.SIGLENGTH//8]
# proof_valid = self.link.validate(signature, self.hash)
# if proof_valid:
# self.status = PacketReceipt.DELIVERED
@ -317,8 +316,8 @@ class PacketReceipt:
def validateProof(self, proof):
if len(proof) == PacketReceipt.EXPL_LENGTH:
# This is an explicit proof
proof_hash = proof[:RNS.Identity.HASHLENGTH/8]
signature = proof[RNS.Identity.HASHLENGTH/8:RNS.Identity.HASHLENGTH/8+RNS.Identity.SIGLENGTH/8]
proof_hash = proof[:RNS.Identity.HASHLENGTH//8]
signature = proof[RNS.Identity.HASHLENGTH//8:RNS.Identity.HASHLENGTH//8+RNS.Identity.SIGLENGTH//8]
if proof_hash == self.hash:
proof_valid = self.destination.identity.validate(signature, self.hash)
if proof_valid:
@ -337,7 +336,7 @@ class PacketReceipt:
if self.destination.identity == None:
return False
signature = proof[:RNS.Identity.SIGLENGTH/8]
signature = proof[:RNS.Identity.SIGLENGTH//8]
proof_valid = self.destination.identity.validate(signature, self.hash)
if proof_valid:
self.status = PacketReceipt.DELIVERED

View File

@ -3,7 +3,7 @@ import bz2
import math
import time
import threading
import vendor.umsgpack as umsgpack
from .vendor import umsgpack as umsgpack
from time import sleep
class Resource:
@ -74,7 +74,7 @@ class Resource:
resource.watchdog_job()
return resource
except Exception as e:
except:
RNS.log("Could not decode resource advertisement, dropping resource", RNS.LOG_DEBUG)
return None
@ -123,7 +123,7 @@ class Resource:
self.size = len(self.data)
self.hashmap = ""
self.hashmap = b""
self.sent_parts = 0
self.parts = []
for i in range(0,int(math.ceil(self.size/float(Resource.SDU)))):
@ -158,7 +158,7 @@ class Resource:
self.last_activity = time.time()
self.retries_left = self.max_retries
update = umsgpack.unpackb(plaintext[RNS.Identity.HASHLENGTH/8:])
update = umsgpack.unpackb(plaintext[RNS.Identity.HASHLENGTH//8:])
self.hashmap_update(update[0], update[1])
@ -166,7 +166,7 @@ class Resource:
if not self.status == Resource.FAILED:
self.status = Resource.TRANSFERRING
seg_len = ResourceAdvertisement.HASHMAP_MAX_LEN
hashes = len(hashmap)/Resource.MAPHASH_LEN
hashes = len(hashmap)//Resource.MAPHASH_LEN
for i in range(0,hashes):
if self.hashmap[i+segment*seg_len] == None:
self.hashmap_height += 1
@ -283,7 +283,7 @@ class Resource:
if not self.status == Resource.FAILED:
try:
self.status = Resource.ASSEMBLING
stream = ""
stream = b""
for part in self.parts:
stream += part
@ -324,8 +324,8 @@ class Resource:
def validateProof(self, proof_data):
if not self.status == Resource.FAILED:
if len(proof_data) == RNS.Identity.HASHLENGTH/8*2:
if proof_data[RNS.Identity.HASHLENGTH/8:] == self.expected_proof:
if len(proof_data) == RNS.Identity.HASHLENGTH//8*2:
if proof_data[RNS.Identity.HASHLENGTH//8:] == self.expected_proof:
self.status = Resource.COMPLETE
if self.callback != None:
self.link.resource_concluded(self)
@ -379,7 +379,7 @@ class Resource:
if not self.waiting_for_hmu:
self.outstanding_parts = 0
hashmap_exhausted = Resource.HASHMAP_IS_NOT_EXHAUSTED
requested_hashes = ""
requested_hashes = b""
i = 0; pn = 0
for part in self.parts:
@ -397,12 +397,13 @@ class Resource:
if i >= self.window or hashmap_exhausted == Resource.HASHMAP_IS_EXHAUSTED:
break
hmu_part = chr(hashmap_exhausted)
hmu_part = bytes([hashmap_exhausted])
if hashmap_exhausted == Resource.HASHMAP_IS_EXHAUSTED:
last_map_hash = self.hashmap[self.hashmap_height-1]
hmu_part += last_map_hash
self.waiting_for_hmu = True
requested_data = b""
request_data = hmu_part + self.hash + requested_hashes
request_packet = RNS.Packet(self.link, request_data, context = RNS.Packet.RESOURCE_REQ)
@ -424,12 +425,12 @@ class Resource:
self.retries_left = self.max_retries
wants_more_hashmap = True if ord(request_data[0]) == Resource.HASHMAP_IS_EXHAUSTED else False
wants_more_hashmap = True if request_data[0] == Resource.HASHMAP_IS_EXHAUSTED else False
pad = 1+Resource.MAPHASH_LEN if wants_more_hashmap else 1
requested_hashes = request_data[pad+RNS.Identity.HASHLENGTH/8:]
requested_hashes = request_data[pad+RNS.Identity.HASHLENGTH//8:]
for i in range(0,len(requested_hashes)/Resource.MAPHASH_LEN):
for i in range(0,len(requested_hashes)//Resource.MAPHASH_LEN):
requested_hash = requested_hashes[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
pi = 0
@ -458,13 +459,13 @@ class Resource:
RNS.log("Resource sequencing error, cancelling transfer!", RNS.LOG_ERROR)
self.cancel()
else:
segment = part_index / ResourceAdvertisement.HASHMAP_MAX_LEN
segment = part_index // ResourceAdvertisement.HASHMAP_MAX_LEN
hashmap_start = segment*ResourceAdvertisement.HASHMAP_MAX_LEN
hashmap_end = min((segment+1)*ResourceAdvertisement.HASHMAP_MAX_LEN, len(self.parts))
hashmap = ""
hashmap = b""
for i in range(hashmap_start,hashmap_end):
hashmap += self.hashmap[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
@ -523,18 +524,18 @@ class ResourceAdvertisement:
hashmap_start = segment*ResourceAdvertisement.HASHMAP_MAX_LEN
hashmap_end = min((segment+1)*ResourceAdvertisement.HASHMAP_MAX_LEN, self.n)
hashmap = ""
hashmap = b""
for i in range(hashmap_start,hashmap_end):
hashmap += self.m[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
dictionary = {
u"t": self.t,
u"d": self.d,
u"n": self.n,
u"h": self.h,
u"r": self.r,
u"f": self.f,
u"m": hashmap
"t": self.t,
"d": self.d,
"n": self.n,
"h": self.h,
"r": self.r,
"f": self.f,
"m": hashmap
}
return umsgpack.packb(dictionary)

View File

@ -1,6 +1,6 @@
from Interfaces import *
import ConfigParser
from vendor.configobj import ConfigObj
from .Interfaces import *
import configparser
from .vendor.configobj import ConfigObj
import RNS
import atexit
import struct

View File

@ -6,7 +6,7 @@ import struct
import threading
import traceback
from time import sleep
import vendor.umsgpack as umsgpack
from .vendor import umsgpack as umsgpack
class Transport:
# Constants
@ -84,7 +84,7 @@ class Transport:
packet_hashlist_path = RNS.Reticulum.configdir+"/packet_hashlist"
if os.path.isfile(packet_hashlist_path):
try:
file = open(packet_hashlist_path, "r")
file = open(packet_hashlist_path, "rb")
Transport.packet_hashlist = umsgpack.unpackb(file.read())
file.close()
except Exception as e:
@ -144,7 +144,7 @@ class Transport:
announce_identity = RNS.Identity.recall(packet.destination_hash)
announce_destination = RNS.Destination(announce_identity, RNS.Destination.OUT, RNS.Destination.SINGLE, "unknown", "unknown");
announce_destination.hash = packet.destination_hash
announce_destination.hexhash = announce_destination.hash.encode("hex_codec")
announce_destination.hexhash = announce_destination.hash.hex()
new_packet = RNS.Packet(announce_destination, announce_data, RNS.Packet.ANNOUNCE, context = announce_context, header_type = RNS.Packet.HEADER_2, transport_type = Transport.TRANSPORT, transport_id = Transport.identity.hash)
new_packet.hops = announce_entry[4]
RNS.log("Rebroadcasting announce for "+RNS.prettyhexrep(announce_destination.hash)+" with hop count "+str(new_packet.hops), RNS.LOG_DEBUG)
@ -423,7 +423,7 @@ class Transport:
# First, check that the announce is not for a destination
# local to this system, and that hops are less than the max
if (not any(packet.destination_hash == d.hash for d in Transport.destinations) and packet.hops < Transport.PATHFINDER_M+1):
random_blob = packet.data[RNS.Identity.DERKEYSIZE/8+10:RNS.Identity.DERKEYSIZE/8+20]
random_blob = packet.data[RNS.Identity.DERKEYSIZE//8+10:RNS.Identity.DERKEYSIZE//8+20]
random_blobs = []
if packet.destination_hash in Transport.destination_table:
random_blobs = Transport.destination_table[packet.destination_hash][4]
@ -541,7 +541,7 @@ class Transport:
# plaintext = link.decrypt(packet.data)
if len(packet.data) == RNS.PacketReceipt.EXPL_LENGTH:
proof_hash = packet.data[:RNS.Identity.HASHLENGTH/8]
proof_hash = packet.data[:RNS.Identity.HASHLENGTH//8]
else:
proof_hash = None
@ -612,7 +612,7 @@ class Transport:
if RNS.Transport.shouldCache(packet):
try:
packet_hash = RNS.hexrep(packet.getHash(), delimit=False)
file = open(RNS.Reticulum.cachepath+"/"+packet_hash, "w")
file = open(RNS.Reticulum.cachepath+"/"+packet_hash, "wb")
file.write(packet.raw)
file.close()
RNS.log("Wrote packet "+packet_hash+" to cache", RNS.LOG_EXTREME)
@ -628,7 +628,7 @@ class Transport:
packet_hash = RNS.hexrep(packet.data, delimit=False)
path = RNS.Reticulum.cachepath+"/"+packet_hash
if os.path.isfile(path):
file = open(path, "r")
file = open(path, "rb")
raw = file.read()
file.close()
packet = RNS.Packet(None, raw)
@ -642,7 +642,7 @@ class Transport:
RNS.log("Cache request for "+RNS.prettyhexrep(packet_hash), RNS.LOG_EXTREME)
path = RNS.Reticulum.cachepath+"/"+RNS.hexrep(packet_hash, delimit=False)
if os.path.isfile(path):
file = open(path, "r")
file = open(path, "rb")
raw = file.read()
Transport.inbound(raw)
file.close()
@ -665,8 +665,8 @@ class Transport:
@staticmethod
def pathRequestHandler(data, packet):
if len(data) >= RNS.Identity.TRUNCATED_HASHLENGTH/8:
Transport.pathRequest(data[:RNS.Identity.TRUNCATED_HASHLENGTH/8])
if len(data) >= RNS.Identity.TRUNCATED_HASHLENGTH//8:
Transport.pathRequest(data[:RNS.Identity.TRUNCATED_HASHLENGTH//8])
@staticmethod
def pathRequest(destination_hash):
@ -704,7 +704,7 @@ class Transport:
def exitHandler():
try:
packet_hashlist_path = RNS.Reticulum.configdir+"/packet_hashlist"
file = open(packet_hashlist_path, "w")
file = open(packet_hashlist_path, "wb")
file.write(umsgpack.packb(Transport.packet_hashlist))
file.close()
except Exception as e:

View File

@ -82,7 +82,7 @@ def hexrep(data, delimit=True):
def prettyhexrep(data):
delimiter = ""
hexrep = "<"+delimiter.join("{:02x}".format(ord(c)) for c in data)+">"
hexrep = "<"+delimiter.join("{:02x}".format(c) for c in data)+">"
return hexrep
def panic():

View File

@ -1,22 +1,17 @@
# configobj.py
# A config file reader/writer that supports nested sections in config files.
# Copyright (C) 2005-2010 Michael Foord, Nicola Larosa
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# nico AT tekNico DOT net
# Copyright (C) 2005-2014:
# (name) : (email)
# Michael Foord: fuzzyman AT voidspace DOT org DOT uk
# Nicola Larosa: nico AT tekNico DOT net
# Rob Dennis: rdennis AT gmail DOT com
# Eli Courtwright: eli AT courtwright DOT org
# ConfigObj 4
# http://www.voidspace.org.uk/python/configobj.html
# This software is licensed under the terms of the BSD license.
# http://opensource.org/licenses/BSD-3-Clause
# Released subject to the BSD License
# Please see http://www.voidspace.org.uk/python/license.shtml
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
# For information about bugfixes, updates and support, please join the
# ConfigObj mailing list:
# http://lists.sourceforge.net/lists/listinfo/configobj-develop
# Comments, suggestions and bug reports welcome.
from __future__ import generators
# ConfigObj 5 - main repository for documentation and issue tracking:
# https://github.com/DiffSK/configobj
import os
import re
@ -24,6 +19,8 @@ import sys
from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
import six
__version__ = '5.0.6'
# imported lazily to avoid startup performance hit if it isn't used
compiler = None
@ -38,7 +35,7 @@ BOMS = {
BOM_UTF16: ('utf_16', 'utf_16'),
}
# All legal variants of the BOM codecs.
# The list of aliases is not meant to be exhaustive, is there a
# TODO: the list of aliases is not meant to be exhaustive, is there a
# better way ?
BOM_LIST = {
'utf_16': 'utf_16',
@ -83,20 +80,7 @@ tdquot = "'''%s'''"
# Sentinel for use in getattr calls to replace hasattr
MISSING = object()
__version__ = '4.7.2'
try:
any
except NameError:
def any(iterable):
for entry in iterable:
if entry:
return True
return False
__all__ = (
'__version__',
'DEFAULT_INDENT_TYPE',
'DEFAULT_INTERPOLATION',
'ConfigObjError',
@ -137,6 +121,8 @@ OPTION_DEFAULTS = {
'write_empty_values': False,
}
# this could be replaced if six is used for compatibility, or there are no
# more assertions about items being a string
def getObj(s):
@ -155,13 +141,12 @@ class UnknownType(Exception):
class Builder(object):
def build(self, o):
m = getattr(self, 'build_' + o.__class__.__name__, None)
if m is None:
raise UnknownType(o.__class__.__name__)
return m(o)
def build_List(self, o):
return map(self.build, o.getChildren())
return list(map(self.build, o.getChildren()))
def build_Const(self, o):
return o.value
@ -170,7 +155,7 @@ class Builder(object):
d = {}
i = iter(map(self.build, o.getChildren()))
for el in i:
d[el] = i.next()
d[el] = next(i)
return d
def build_Tuple(self, o):
@ -188,7 +173,7 @@ class Builder(object):
raise UnknownType('Undefined Name')
def build_Add(self, o):
real, imag = map(self.build_Const, o.getChildren())
real, imag = list(map(self.build_Const, o.getChildren()))
try:
real = float(real)
except TypeError:
@ -214,8 +199,10 @@ _builder = Builder()
def unrepr(s):
if not s:
return s
return _builder.build(getObj(s))
# this is supposed to be safe
import ast
return ast.literal_eval(s)
class ConfigObjError(SyntaxError):
@ -518,7 +505,7 @@ class Section(dict):
self._initialise()
# we do this explicitly so that __setitem__ is used properly
# (rather than just passing to ``dict.__init__``)
for entry, value in indict.iteritems():
for entry, value in indict.items():
self[entry] = value
@ -566,11 +553,11 @@ class Section(dict):
"""Fetch the item and do string interpolation."""
val = dict.__getitem__(self, key)
if self.main.interpolation:
if isinstance(val, basestring):
if isinstance(val, six.string_types):
return self._interpolate(key, val)
if isinstance(val, list):
def _check(entry):
if isinstance(entry, basestring):
if isinstance(entry, six.string_types):
return self._interpolate(key, entry)
return entry
new = [_check(entry) for entry in val]
@ -593,7 +580,7 @@ class Section(dict):
``unrepr`` must be set when setting a value to a dictionary, without
creating a new sub-section.
"""
if not isinstance(key, basestring):
if not isinstance(key, six.string_types):
raise ValueError('The key "%s" is not a string.' % key)
# add the comment
@ -627,11 +614,11 @@ class Section(dict):
if key not in self:
self.scalars.append(key)
if not self.main.stringify:
if isinstance(value, basestring):
if isinstance(value, six.string_types):
pass
elif isinstance(value, (list, tuple)):
for entry in value:
if not isinstance(entry, basestring):
if not isinstance(entry, six.string_types):
raise TypeError('Value is not a string "%s".' % entry)
else:
raise TypeError('Value is not a string "%s".' % value)
@ -721,7 +708,7 @@ class Section(dict):
def items(self):
"""D.items() -> list of D's (key, value) pairs, as 2-tuples"""
return zip((self.scalars + self.sections), self.values())
return list(zip((self.scalars + self.sections), list(self.values())))
def keys(self):
@ -736,7 +723,7 @@ class Section(dict):
def iteritems(self):
"""D.iteritems() -> an iterator over the (key, value) items of D"""
return iter(self.items())
return iter(list(self.items()))
def iterkeys(self):
@ -748,7 +735,7 @@ class Section(dict):
def itervalues(self):
"""D.itervalues() -> an iterator over the values of D"""
return iter(self.values())
return iter(list(self.values()))
def __repr__(self):
@ -814,7 +801,7 @@ class Section(dict):
>>> c2
ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
"""
for key, val in indict.items():
for key, val in list(indict.items()):
if (key in self and isinstance(self[key], dict) and
isinstance(val, dict)):
self[key].merge(val)
@ -972,7 +959,7 @@ class Section(dict):
return False
else:
try:
if not isinstance(val, basestring):
if not isinstance(val, six.string_types):
# TODO: Why do we raise a KeyError here?
raise KeyError()
else:
@ -1013,15 +1000,15 @@ class Section(dict):
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_float('a')
>>> a.as_float('a') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ValueError: invalid literal for float(): fish
>>> a['b'] = '1'
>>> a.as_float('b')
1.0
>>> a['b'] = '3.2'
>>> a.as_float('b')
3.2000000000000002
>>> a.as_float('b') #doctest: +ELLIPSIS
3.2...
"""
return float(self[key])
@ -1224,7 +1211,7 @@ class ConfigObj(Section):
for entry in options:
if entry not in OPTION_DEFAULTS:
raise TypeError('Unrecognised option "%s".' % entry)
for entry, value in OPTION_DEFAULTS.items():
for entry, value in list(OPTION_DEFAULTS.items()):
if entry not in options:
options[entry] = value
keyword_value = _options[entry]
@ -1243,12 +1230,11 @@ class ConfigObj(Section):
def _load(self, infile, configspec):
if isinstance(infile, basestring):
if isinstance(infile, six.string_types):
self.filename = infile
if os.path.isfile(infile):
h = open(infile, 'rb')
infile = h.read() or []
h.close()
with open(infile, 'rb') as h:
content = h.readlines() or []
elif self.file_error:
# raise an error if the file doesn't exist
raise IOError('Config file not found: "%s".' % self.filename)
@ -1257,13 +1243,12 @@ class ConfigObj(Section):
if self.create_empty:
# this is a good test that the filename specified
# isn't impossible - like on a non-existent device
h = open(infile, 'w')
h.write('')
h.close()
infile = []
with open(infile, 'w') as h:
h.write('')
content = []
elif isinstance(infile, (list, tuple)):
infile = list(infile)
content = list(infile)
elif isinstance(infile, dict):
# initialise self
@ -1291,21 +1276,21 @@ class ConfigObj(Section):
elif getattr(infile, 'read', MISSING) is not MISSING:
# This supports file like objects
infile = infile.read() or []
content = infile.read() or []
# needs splitting into lines - but needs doing *after* decoding
# in case it's not an 8 bit encoding
else:
raise TypeError('infile must be a filename, file like object, or list of lines.')
if infile:
if content:
# don't do it for the empty ConfigObj
infile = self._handle_bom(infile)
content = self._handle_bom(content)
# infile is now *always* a list
#
# Set the newlines attribute (first line ending it finds)
# and strip trailing '\n' or '\r' from lines
for line in infile:
if (not line) or (line[-1] not in ('\r', '\n', '\r\n')):
for line in content:
if (not line) or (line[-1] not in ('\r', '\n')):
continue
for end in ('\r\n', '\n', '\r'):
if line.endswith(end):
@ -1313,9 +1298,10 @@ class ConfigObj(Section):
break
break
infile = [line.rstrip('\r\n') for line in infile]
assert all(isinstance(line, six.string_types) for line in content), repr(content)
content = [line.rstrip('\r\n') for line in content]
self._parse(infile)
self._parse(content)
# if we had any errors, now is the time to raise them
if self._errors:
info = "at line %s." % self._errors[0].line_number
@ -1404,6 +1390,7 @@ class ConfigObj(Section):
``infile`` must always be returned as a list of lines, but may be
passed in as a single string.
"""
if ((self.encoding is not None) and
(self.encoding.lower() not in BOM_LIST)):
# No need to check for a BOM
@ -1415,6 +1402,13 @@ class ConfigObj(Section):
line = infile[0]
else:
line = infile
if isinstance(line, six.text_type):
# it's already decoded and there's no need to do anything
# else, just use the _decode utility method to handle
# listifying appropriately
return self._decode(infile, self.encoding)
if self.encoding is not None:
# encoding explicitly supplied
# And it could have an associated BOM
@ -1423,7 +1417,7 @@ class ConfigObj(Section):
enc = BOM_LIST[self.encoding.lower()]
if enc == 'utf_16':
# For UTF16 we try big endian and little endian
for BOM, (encoding, final_encoding) in BOMS.items():
for BOM, (encoding, final_encoding) in list(BOMS.items()):
if not final_encoding:
# skip UTF8
continue
@ -1453,8 +1447,9 @@ class ConfigObj(Section):
return self._decode(infile, self.encoding)
# No encoding specified - so we need to check for UTF8/UTF16
for BOM, (encoding, final_encoding) in BOMS.items():
if not line.startswith(BOM):
for BOM, (encoding, final_encoding) in list(BOMS.items()):
if not isinstance(line, six.binary_type) or not line.startswith(BOM):
# didn't specify a BOM, or it's not a bytestring
continue
else:
# BOM discovered
@ -1468,25 +1463,32 @@ class ConfigObj(Section):
infile[0] = newline
else:
infile = newline
# UTF8 - don't decode
if isinstance(infile, basestring):
# UTF-8
if isinstance(infile, six.text_type):
return infile.splitlines(True)
elif isinstance(infile, six.binary_type):
return infile.decode('utf-8').splitlines(True)
else:
return infile
return self._decode(infile, 'utf-8')
# UTF16 - have to decode
return self._decode(infile, encoding)
# No BOM discovered and no encoding specified, just return
if isinstance(infile, basestring):
# infile read from a file will be a single string
return infile.splitlines(True)
return infile
if six.PY2 and isinstance(line, str):
# don't actually do any decoding, since we're on python 2 and
# returning a bytestring is fine
return self._decode(infile, None)
# No BOM discovered and no encoding specified, default to UTF-8
if isinstance(infile, six.binary_type):
return infile.decode('utf-8').splitlines(True)
else:
return self._decode(infile, 'utf-8')
def _a_to_u(self, aString):
"""Decode ASCII strings to unicode if a self.encoding is specified."""
if self.encoding:
return aString.decode('ascii')
if isinstance(aString, six.binary_type) and self.encoding:
return aString.decode(self.encoding)
else:
return aString
@ -1497,34 +1499,42 @@ class ConfigObj(Section):
if is a string, it also needs converting to a list.
"""
if isinstance(infile, basestring):
# can't be unicode
if isinstance(infile, six.string_types):
return infile.splitlines(True)
if isinstance(infile, six.binary_type):
# NOTE: Could raise a ``UnicodeDecodeError``
return infile.decode(encoding).splitlines(True)
for i, line in enumerate(infile):
if not isinstance(line, unicode):
# NOTE: The isinstance test here handles mixed lists of unicode/string
# NOTE: But the decode will break on any non-string values
# NOTE: Or could raise a ``UnicodeDecodeError``
infile[i] = line.decode(encoding)
if encoding:
return infile.decode(encoding).splitlines(True)
else:
return infile.splitlines(True)
if encoding:
for i, line in enumerate(infile):
if isinstance(line, six.binary_type):
# NOTE: The isinstance test here handles mixed lists of unicode/string
# NOTE: But the decode will break on any non-string values
# NOTE: Or could raise a ``UnicodeDecodeError``
infile[i] = line.decode(encoding)
return infile
def _decode_element(self, line):
"""Decode element to unicode if necessary."""
if not self.encoding:
return line
if isinstance(line, str) and self.default_encoding:
if isinstance(line, six.binary_type) and self.default_encoding:
return line.decode(self.default_encoding)
return line
else:
return line
# TODO: this may need to be modified
def _str(self, value):
"""
Used by ``stringify`` within validate, to turn non-string values
into strings.
"""
if not isinstance(value, basestring):
if not isinstance(value, six.string_types):
# intentially 'str' because it's just whatever the "normal"
# string type is for the python version we're dealing with
return str(value)
else:
return value
@ -1571,7 +1581,7 @@ class ConfigObj(Section):
self.indent_type = indent
cur_depth = sect_open.count('[')
if cur_depth != sect_close.count(']'):
self._handle_error("Cannot compute the section depth at line %s.",
self._handle_error("Cannot compute the section depth",
NestingError, infile, cur_index)
continue
@ -1581,7 +1591,7 @@ class ConfigObj(Section):
parent = self._match_depth(this_section,
cur_depth).parent
except SyntaxError:
self._handle_error("Cannot compute nesting level at line %s.",
self._handle_error("Cannot compute nesting level",
NestingError, infile, cur_index)
continue
elif cur_depth == this_section.depth:
@ -1591,12 +1601,13 @@ class ConfigObj(Section):
# the new section is a child the current section
parent = this_section
else:
self._handle_error("Section too nested at line %s.",
self._handle_error("Section too nested",
NestingError, infile, cur_index)
continue
sect_name = self._unquote(sect_name)
if sect_name in parent:
self._handle_error('Duplicate section name at line %s.',
self._handle_error('Duplicate section name',
DuplicateError, infile, cur_index)
continue
@ -1615,10 +1626,8 @@ class ConfigObj(Section):
# so it should be a valid ``key = value`` line
mat = self._keyword.match(line)
if mat is None:
# it neither matched as a keyword
# or a section marker
self._handle_error(
'Invalid line at line "%s".',
'Invalid line ({0!r}) (matched as neither section nor keyword)'.format(line),
ParseError, infile, cur_index)
else:
# is a keyword value
@ -1633,7 +1642,7 @@ class ConfigObj(Section):
value, infile, cur_index, maxline)
except SyntaxError:
self._handle_error(
'Parse error in value at line %s.',
'Parse error in multiline value',
ParseError, infile, cur_index)
continue
else:
@ -1641,11 +1650,11 @@ class ConfigObj(Section):
comment = ''
try:
value = unrepr(value)
except Exception, e:
except Exception as e:
if type(e) == UnknownType:
msg = 'Unknown name or type in value at line %s.'
msg = 'Unknown name or type in value'
else:
msg = 'Parse error in value at line %s.'
msg = 'Parse error from unrepr-ing multiline value'
self._handle_error(msg, UnreprError, infile,
cur_index)
continue
@ -1654,11 +1663,11 @@ class ConfigObj(Section):
comment = ''
try:
value = unrepr(value)
except Exception, e:
except Exception as e:
if isinstance(e, UnknownType):
msg = 'Unknown name or type in value at line %s.'
msg = 'Unknown name or type in value'
else:
msg = 'Parse error in value at line %s.'
msg = 'Parse error from unrepr-ing value'
self._handle_error(msg, UnreprError, infile,
cur_index)
continue
@ -1668,14 +1677,14 @@ class ConfigObj(Section):
(value, comment) = self._handle_value(value)
except SyntaxError:
self._handle_error(
'Parse error in value at line %s.',
'Parse error in value',
ParseError, infile, cur_index)
continue
#
key = self._unquote(key)
if key in this_section:
self._handle_error(
'Duplicate keyword name at line %s.',
'Duplicate keyword name',
DuplicateError, infile, cur_index)
continue
# add the key.
@ -1726,7 +1735,7 @@ class ConfigObj(Section):
"""
line = infile[cur_index]
cur_index += 1
message = text % cur_index
message = '{0} at line {1}.'.format(text, cur_index)
error = ErrorClass(message, cur_index, line)
if self.raise_errors:
# raise the error - parsing stops here
@ -1777,8 +1786,10 @@ class ConfigObj(Section):
return self._quote(value[0], multiline=False) + ','
return ', '.join([self._quote(val, multiline=False)
for val in value])
if not isinstance(value, basestring):
if not isinstance(value, six.string_types):
if self.stringify:
# intentially 'str' because it's just whatever the "normal"
# string type is for the python version we're dealing with
value = str(value)
else:
raise TypeError('Value "%s" is not a string.' % value)
@ -1929,11 +1940,11 @@ class ConfigObj(Section):
raise_errors=True,
file_error=True,
_inspec=True)
except ConfigObjError, e:
except ConfigObjError as e:
# FIXME: Should these errors have a reference
# to the already parsed ConfigObj ?
raise ConfigspecError('Parsing configspec failed: %s' % e)
except IOError, e:
except IOError as e:
raise IOError('Reading configspec failed: %s' % e)
self.configspec = configspec
@ -2049,7 +2060,7 @@ class ConfigObj(Section):
this_entry = section[entry]
comment = self._handle_comment(section.inline_comments[entry])
if isinstance(this_entry, dict):
if isinstance(this_entry, Section):
# a section
out.append(self._write_marker(
indent_string,
@ -2097,21 +2108,25 @@ class ConfigObj(Section):
# Windows specific hack to avoid writing '\r\r\n'
newline = '\n'
output = self._a_to_u(newline).join(out)
if self.encoding:
output = output.encode(self.encoding)
if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
# Add the UTF8 BOM
output = BOM_UTF8 + output
if not output.endswith(newline):
output += newline
if outfile is not None:
outfile.write(output)
else:
h = open(self.filename, 'wb')
h.write(output)
h.close()
if isinstance(output, six.binary_type):
output_bytes = output
else:
output_bytes = output.encode(self.encoding or
self.default_encoding or
'ascii')
if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
# Add the UTF8 BOM
output_bytes = BOM_UTF8 + output_bytes
if outfile is not None:
outfile.write(output_bytes)
else:
with open(self.filename, 'wb') as h:
h.write(output_bytes)
def validate(self, validator, preserve_errors=False, copy=False,
section=None):
@ -2189,7 +2204,7 @@ class ConfigObj(Section):
val,
missing=missing
)
except validator.baseErrorClass, e:
except validator.baseErrorClass as e:
if not preserve_errors or isinstance(e, self._vdtMissingValue):
out[entry] = False
else:
@ -2338,7 +2353,7 @@ class ConfigObj(Section):
This method raises a ``ReloadError`` if the ConfigObj doesn't have
a filename attribute pointing to a file.
"""
if not isinstance(self.filename, basestring):
if not isinstance(self.filename, six.string_types):
raise ReloadError()
filename = self.filename
@ -2416,13 +2431,13 @@ def flatten_errors(cfg, res, levels=None, results=None):
levels = []
results = []
if res == True:
return results
return sorted(results)
if res == False or isinstance(res, Exception):
results.append((levels[:], None, res))
if levels:
levels.pop()
return results
for (key, val) in res.items():
return sorted(results)
for (key, val) in list(res.items()):
if val == True:
continue
if isinstance(cfg.get(key), dict):
@ -2436,7 +2451,7 @@ def flatten_errors(cfg, res, levels=None, results=None):
if levels:
levels.pop()
#
return results
return sorted(results)
def get_extra_values(conf, _prepend=()):
@ -2465,4 +2480,4 @@ def get_extra_values(conf, _prepend=()):
return out
"""*A programming language is a medium of expression.* - Paul Graham"""
"""*A programming language is a medium of expression.* - Paul Graham"""