mirror of
https://github.com/markqvist/Reticulum.git
synced 2024-11-05 05:40:14 +00:00
Preliminary Python3 support
This commit is contained in:
parent
9f8da39614
commit
a339ae3d28
@ -61,7 +61,7 @@ def announceLoop(destination):
|
|||||||
# destination on the network, which will let clients
|
# destination on the network, which will let clients
|
||||||
# know how to create messages directed towards it.
|
# know how to create messages directed towards it.
|
||||||
while True:
|
while True:
|
||||||
entered = raw_input()
|
entered = input()
|
||||||
destination.announce()
|
destination.announce()
|
||||||
RNS.log("Sent announce from "+RNS.prettyhexrep(destination.hash))
|
RNS.log("Sent announce from "+RNS.prettyhexrep(destination.hash))
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ def client(destination_hexhash, configpath, timeout=None):
|
|||||||
try:
|
try:
|
||||||
if len(destination_hexhash) != 20:
|
if len(destination_hexhash) != 20:
|
||||||
raise ValueError("Destination length is invalid, must be 20 hexadecimal characters (10 bytes)")
|
raise ValueError("Destination length is invalid, must be 20 hexadecimal characters (10 bytes)")
|
||||||
destination_hash = destination_hexhash.decode("hex")
|
destination_hash = bytes.fromhex(destination_hexhash)
|
||||||
except:
|
except:
|
||||||
RNS.log("Invalid destination entered. Check your input!\n")
|
RNS.log("Invalid destination entered. Check your input!\n")
|
||||||
exit()
|
exit()
|
||||||
@ -106,7 +106,7 @@ def client(destination_hexhash, configpath, timeout=None):
|
|||||||
# echo request to the destination specified on the
|
# echo request to the destination specified on the
|
||||||
# command line.
|
# command line.
|
||||||
while True:
|
while True:
|
||||||
raw_input()
|
input()
|
||||||
|
|
||||||
# Let's first check if RNS knows a path to the destination.
|
# Let's first check if RNS knows a path to the destination.
|
||||||
# If it does, we'll load the server identity and create a packet
|
# If it does, we'll load the server identity and create a packet
|
||||||
|
@ -62,7 +62,7 @@ def announceLoop(destination):
|
|||||||
# destination on the network, which will let clients
|
# destination on the network, which will let clients
|
||||||
# know how to create messages directed towards it.
|
# know how to create messages directed towards it.
|
||||||
while True:
|
while True:
|
||||||
entered = raw_input()
|
entered = input()
|
||||||
destination.announce()
|
destination.announce()
|
||||||
RNS.log("Sent announce from "+RNS.prettyhexrep(destination.hash))
|
RNS.log("Sent announce from "+RNS.prettyhexrep(destination.hash))
|
||||||
|
|
||||||
@ -116,18 +116,19 @@ def client_disconnected(link):
|
|||||||
|
|
||||||
def client_request(message, packet):
|
def client_request(message, packet):
|
||||||
global serve_path
|
global serve_path
|
||||||
if message in list_files():
|
filename = message.decode("utf-8")
|
||||||
|
if filename in list_files():
|
||||||
try:
|
try:
|
||||||
# If we have the requested file, we'll
|
# If we have the requested file, we'll
|
||||||
# read it and pack it as a resource
|
# read it and pack it as a resource
|
||||||
RNS.log("Client requested \""+message+"\"")
|
RNS.log("Client requested \""+filename+"\"")
|
||||||
file = open(os.path.join(serve_path, message), "r")
|
file = open(os.path.join(serve_path, filename), "rb")
|
||||||
file_resource = RNS.Resource(file.read(), packet.link, callback=resource_sending_concluded)
|
file_resource = RNS.Resource(file.read(), packet.link, callback=resource_sending_concluded)
|
||||||
file_resource.filename = message
|
file_resource.filename = filename
|
||||||
except:
|
except:
|
||||||
# If somethign went wrong, we close
|
# If somethign went wrong, we close
|
||||||
# the link
|
# the link
|
||||||
RNS.log("Error while reading file \""+message+"\"", RNS.LOG_ERROR)
|
RNS.log("Error while reading file \""+filename+"\"", RNS.LOG_ERROR)
|
||||||
packet.link.teardown()
|
packet.link.teardown()
|
||||||
else:
|
else:
|
||||||
# If we don't have it, we close the link
|
# If we don't have it, we close the link
|
||||||
@ -172,7 +173,7 @@ def client(destination_hexhash, configpath):
|
|||||||
try:
|
try:
|
||||||
if len(destination_hexhash) != 20:
|
if len(destination_hexhash) != 20:
|
||||||
raise ValueError("Destination length is invalid, must be 20 hexadecimal characters (10 bytes)")
|
raise ValueError("Destination length is invalid, must be 20 hexadecimal characters (10 bytes)")
|
||||||
destination_hash = destination_hexhash.decode("hex")
|
destination_hash = bytes.fromhex(destination_hexhash)
|
||||||
except:
|
except:
|
||||||
RNS.log("Invalid destination entered. Check your input!\n")
|
RNS.log("Invalid destination entered. Check your input!\n")
|
||||||
exit()
|
exit()
|
||||||
@ -230,11 +231,11 @@ def download(filename):
|
|||||||
# We just create a packet containing the
|
# We just create a packet containing the
|
||||||
# requested filename, and send it down the
|
# requested filename, and send it down the
|
||||||
# link.
|
# link.
|
||||||
request_packet = RNS.Packet(server_link, filename)
|
request_packet = RNS.Packet(server_link, filename.encode("utf-8"))
|
||||||
request_packet.send()
|
request_packet.send()
|
||||||
|
|
||||||
print("")
|
print("")
|
||||||
print("Requested \""+filename+"\" from server, waiting for download to begin...")
|
print(("Requested \""+filename+"\" from server, waiting for download to begin..."))
|
||||||
menu_mode = "download_started"
|
menu_mode = "download_started"
|
||||||
|
|
||||||
# This function runs a simple menu for the user
|
# This function runs a simple menu for the user
|
||||||
@ -258,7 +259,7 @@ def menu():
|
|||||||
# Wait
|
# Wait
|
||||||
time.sleep(0.25)
|
time.sleep(0.25)
|
||||||
|
|
||||||
user_input = raw_input()
|
user_input = input()
|
||||||
if user_input == "q" or user_input == "quit" or user_input == "exit":
|
if user_input == "q" or user_input == "quit" or user_input == "exit":
|
||||||
should_quit = True
|
should_quit = True
|
||||||
print("")
|
print("")
|
||||||
@ -288,7 +289,7 @@ def print_menu():
|
|||||||
print_filelist()
|
print_filelist()
|
||||||
print("")
|
print("")
|
||||||
print("Select a file to download by entering name or number, or q to quit")
|
print("Select a file to download by entering name or number, or q to quit")
|
||||||
print("> "),
|
print(("> "), end=' ')
|
||||||
elif menu_mode == "download_started":
|
elif menu_mode == "download_started":
|
||||||
download_began = time.time()
|
download_began = time.time()
|
||||||
while menu_mode == "download_started":
|
while menu_mode == "download_started":
|
||||||
@ -305,12 +306,12 @@ def print_menu():
|
|||||||
while menu_mode == "downloading":
|
while menu_mode == "downloading":
|
||||||
global current_download
|
global current_download
|
||||||
percent = round(current_download.progress() * 100.0, 1)
|
percent = round(current_download.progress() * 100.0, 1)
|
||||||
print("\rProgress: "+str(percent)+" % "),
|
print(("\rProgress: "+str(percent)+" % "), end=' ')
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
||||||
if menu_mode == "save_error":
|
if menu_mode == "save_error":
|
||||||
print("\rProgress: 100.0 %"),
|
print(("\rProgress: 100.0 %"), end=' ')
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
print("")
|
print("")
|
||||||
print("Could not write downloaded file to disk")
|
print("Could not write downloaded file to disk")
|
||||||
@ -319,16 +320,16 @@ def print_menu():
|
|||||||
|
|
||||||
if menu_mode == "download_concluded":
|
if menu_mode == "download_concluded":
|
||||||
if current_download.status == RNS.Resource.COMPLETE:
|
if current_download.status == RNS.Resource.COMPLETE:
|
||||||
print("\rProgress: 100.0 %"),
|
print(("\rProgress: 100.0 %"), end=' ')
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
print("")
|
print("")
|
||||||
print("The download completed! Pres enter to return to the menu.")
|
print("The download completed! Press enter to return to the menu.")
|
||||||
raw_input()
|
input()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print("")
|
print("")
|
||||||
print("The download failed! Pres enter to return to the menu.")
|
print("The download failed! Press enter to return to the menu.")
|
||||||
raw_input()
|
input()
|
||||||
|
|
||||||
current_download = None
|
current_download = None
|
||||||
menu_mode = "main"
|
menu_mode = "main"
|
||||||
@ -431,7 +432,7 @@ def download_concluded(resource):
|
|||||||
saved_filename = current_filename+"."+str(counter)
|
saved_filename = current_filename+"."+str(counter)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file = open(saved_filename, "w")
|
file = open(saved_filename, "wb")
|
||||||
file.write(resource.data)
|
file.write(resource.data)
|
||||||
file.close()
|
file.close()
|
||||||
menu_mode = "download_concluded"
|
menu_mode = "download_concluded"
|
||||||
|
81
Examples/Minimal.py
Normal file
81
Examples/Minimal.py
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
##########################################################
|
||||||
|
# This RNS example demonstrates a minimal setup, that #
|
||||||
|
# will start up the Reticulum Network Stack, generate a #
|
||||||
|
# new destination, and let the user send an announce. #
|
||||||
|
##########################################################
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import RNS
|
||||||
|
|
||||||
|
# Let's define an app name. We'll use this for all
|
||||||
|
# destinations we create. Since this basic example
|
||||||
|
# is part of a range of example utilities, we'll put
|
||||||
|
# them all within the app namespace "example_utilities"
|
||||||
|
APP_NAME = "example_utilitites"
|
||||||
|
|
||||||
|
# This initialisation is executed when the program is started
|
||||||
|
def program_setup(configpath):
|
||||||
|
# We must first initialise Reticulum
|
||||||
|
reticulum = RNS.Reticulum(configpath)
|
||||||
|
|
||||||
|
# Randomly create a new identity for our example
|
||||||
|
identity = RNS.Identity()
|
||||||
|
|
||||||
|
# Using the identity we just created, we create a destination.
|
||||||
|
# Destinations are endpoints in Reticulum, that can be addressed
|
||||||
|
# and communicated with. Destinations can also announce their
|
||||||
|
# existence, which will let the network know they are reachable
|
||||||
|
# and autoomatically create paths to them, from anywhere else
|
||||||
|
# in the network.
|
||||||
|
destination = RNS.Destination(identity, RNS.Destination.IN, RNS.Destination.SINGLE, APP_NAME, "minimalsample")
|
||||||
|
|
||||||
|
# We configure the destination to automatically prove all
|
||||||
|
# packets adressed to it. By doing this, RNS will automatically
|
||||||
|
# generate a proof for each incoming packet and transmit it
|
||||||
|
# back to the sender of that packet. This will let anyone that
|
||||||
|
# tries to communicate with the destination know whether their
|
||||||
|
# communication was received correctly.
|
||||||
|
destination.set_proof_strategy(RNS.Destination.PROVE_ALL)
|
||||||
|
|
||||||
|
# Everything's ready!
|
||||||
|
# Let's hand over control to the announce loop
|
||||||
|
announceLoop(destination)
|
||||||
|
|
||||||
|
|
||||||
|
def announceLoop(destination):
|
||||||
|
# Let the user know that everything is ready
|
||||||
|
RNS.log("Minimal example "+RNS.prettyhexrep(destination.hash)+" running, hit enter to manually send an announce (Ctrl-C to quit)")
|
||||||
|
|
||||||
|
# We enter a loop that runs until the users exits.
|
||||||
|
# If the user hits enter, we will announce our server
|
||||||
|
# destination on the network, which will let clients
|
||||||
|
# know how to create messages directed towards it.
|
||||||
|
while True:
|
||||||
|
entered = input()
|
||||||
|
destination.announce()
|
||||||
|
RNS.log("Sent announce from "+RNS.prettyhexrep(destination.hash))
|
||||||
|
|
||||||
|
|
||||||
|
##########################################################
|
||||||
|
#### Program Startup #####################################
|
||||||
|
##########################################################
|
||||||
|
|
||||||
|
# This part of the program gets run at startup,
|
||||||
|
# and parses input from the user, and then starts
|
||||||
|
# the desired program mode.
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
parser = argparse.ArgumentParser(description="Bare minimum example to start Reticulum and create a destination")
|
||||||
|
parser.add_argument("--config", action="store", default=None, help="path to alternative Reticulum config directory", type=str)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.config:
|
||||||
|
configarg = args.config
|
||||||
|
else:
|
||||||
|
configarg = None
|
||||||
|
|
||||||
|
program_setup(configarg)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("")
|
||||||
|
exit()
|
@ -54,7 +54,7 @@ class Destination:
|
|||||||
|
|
||||||
# Create a digest for the destination
|
# Create a digest for the destination
|
||||||
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
|
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
|
||||||
digest.update(name)
|
digest.update(name.encode("UTF-8"))
|
||||||
|
|
||||||
return digest.finalize()[:10]
|
return digest.finalize()[:10]
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ class Destination:
|
|||||||
|
|
||||||
self.name = Destination.getDestinationName(app_name, *aspects)
|
self.name = Destination.getDestinationName(app_name, *aspects)
|
||||||
self.hash = Destination.getDestinationHash(app_name, *aspects)
|
self.hash = Destination.getDestinationHash(app_name, *aspects)
|
||||||
self.hexhash = self.hash.encode("hex_codec")
|
self.hexhash = self.hash.hex()
|
||||||
|
|
||||||
self.callback = None
|
self.callback = None
|
||||||
self.proofcallback = None
|
self.proofcallback = None
|
||||||
|
@ -4,7 +4,7 @@ import os
|
|||||||
import RNS
|
import RNS
|
||||||
import time
|
import time
|
||||||
import atexit
|
import atexit
|
||||||
import vendor.umsgpack as umsgpack
|
from .vendor import umsgpack as umsgpack
|
||||||
from cryptography.hazmat.primitives import hashes
|
from cryptography.hazmat.primitives import hashes
|
||||||
from cryptography.hazmat.backends import default_backend
|
from cryptography.hazmat.backends import default_backend
|
||||||
from cryptography.hazmat.primitives import serialization
|
from cryptography.hazmat.primitives import serialization
|
||||||
@ -50,7 +50,7 @@ class Identity:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def saveKnownDestinations():
|
def saveKnownDestinations():
|
||||||
RNS.log("Saving known destinations to storage...", RNS.LOG_VERBOSE)
|
RNS.log("Saving known destinations to storage...", RNS.LOG_VERBOSE)
|
||||||
file = open(RNS.Reticulum.storagepath+"/known_destinations","w")
|
file = open(RNS.Reticulum.storagepath+"/known_destinations","wb")
|
||||||
umsgpack.dump(Identity.known_destinations, file)
|
umsgpack.dump(Identity.known_destinations, file)
|
||||||
file.close()
|
file.close()
|
||||||
RNS.log("Done saving known destinations to storage", RNS.LOG_VERBOSE)
|
RNS.log("Done saving known destinations to storage", RNS.LOG_VERBOSE)
|
||||||
@ -59,7 +59,7 @@ class Identity:
|
|||||||
def loadKnownDestinations():
|
def loadKnownDestinations():
|
||||||
if os.path.isfile(RNS.Reticulum.storagepath+"/known_destinations"):
|
if os.path.isfile(RNS.Reticulum.storagepath+"/known_destinations"):
|
||||||
try:
|
try:
|
||||||
file = open(RNS.Reticulum.storagepath+"/known_destinations","r")
|
file = open(RNS.Reticulum.storagepath+"/known_destinations","rb")
|
||||||
Identity.known_destinations = umsgpack.load(file)
|
Identity.known_destinations = umsgpack.load(file)
|
||||||
file.close()
|
file.close()
|
||||||
RNS.log("Loaded "+str(len(Identity.known_destinations))+" known destinations from storage", RNS.LOG_VERBOSE)
|
RNS.log("Loaded "+str(len(Identity.known_destinations))+" known destinations from storage", RNS.LOG_VERBOSE)
|
||||||
@ -80,7 +80,7 @@ class Identity:
|
|||||||
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
|
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
|
||||||
digest.update(data)
|
digest.update(data)
|
||||||
|
|
||||||
return digest.finalize()[:(Identity.TRUNCATED_HASHLENGTH/8)]
|
return digest.finalize()[:(Identity.TRUNCATED_HASHLENGTH//8)]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def getRandomHash():
|
def getRandomHash():
|
||||||
@ -91,12 +91,12 @@ class Identity:
|
|||||||
if packet.packet_type == RNS.Packet.ANNOUNCE:
|
if packet.packet_type == RNS.Packet.ANNOUNCE:
|
||||||
RNS.log("Validating announce from "+RNS.prettyhexrep(packet.destination_hash), RNS.LOG_DEBUG)
|
RNS.log("Validating announce from "+RNS.prettyhexrep(packet.destination_hash), RNS.LOG_DEBUG)
|
||||||
destination_hash = packet.destination_hash
|
destination_hash = packet.destination_hash
|
||||||
public_key = packet.data[10:Identity.DERKEYSIZE/8+10]
|
public_key = packet.data[10:Identity.DERKEYSIZE//8+10]
|
||||||
random_hash = packet.data[Identity.DERKEYSIZE/8+10:Identity.DERKEYSIZE/8+20]
|
random_hash = packet.data[Identity.DERKEYSIZE//8+10:Identity.DERKEYSIZE//8+20]
|
||||||
signature = packet.data[Identity.DERKEYSIZE/8+20:Identity.DERKEYSIZE/8+20+Identity.KEYSIZE/8]
|
signature = packet.data[Identity.DERKEYSIZE//8+20:Identity.DERKEYSIZE//8+20+Identity.KEYSIZE//8]
|
||||||
app_data = ""
|
app_data = b""
|
||||||
if len(packet.data) > Identity.DERKEYSIZE/8+20+Identity.KEYSIZE/8:
|
if len(packet.data) > Identity.DERKEYSIZE//8+20+Identity.KEYSIZE//8:
|
||||||
app_data = packet.data[Identity.DERKEYSIZE/8+20+Identity.KEYSIZE/8:]
|
app_data = packet.data[Identity.DERKEYSIZE//8+20+Identity.KEYSIZE//8:]
|
||||||
|
|
||||||
signed_data = destination_hash+public_key+random_hash+app_data
|
signed_data = destination_hash+public_key+random_hash+app_data
|
||||||
|
|
||||||
@ -198,11 +198,11 @@ class Identity:
|
|||||||
|
|
||||||
def updateHashes(self):
|
def updateHashes(self):
|
||||||
self.hash = Identity.truncatedHash(self.pub_bytes)
|
self.hash = Identity.truncatedHash(self.pub_bytes)
|
||||||
self.hexhash = self.hash.encode("hex_codec")
|
self.hexhash = self.hash.hex()
|
||||||
|
|
||||||
def save(self, path):
|
def save(self, path):
|
||||||
try:
|
try:
|
||||||
with open(path, "w") as key_file:
|
with open(path, "wb") as key_file:
|
||||||
key_file.write(self.prv_bytes)
|
key_file.write(self.prv_bytes)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@ -212,7 +212,7 @@ class Identity:
|
|||||||
|
|
||||||
def load(self, path):
|
def load(self, path):
|
||||||
try:
|
try:
|
||||||
with open(path, "r") as key_file:
|
with open(path, "rb") as key_file:
|
||||||
prv_bytes = key_file.read()
|
prv_bytes = key_file.read()
|
||||||
return self.loadPrivateKey(prv_bytes)
|
return self.loadPrivateKey(prv_bytes)
|
||||||
return False
|
return False
|
||||||
@ -222,10 +222,10 @@ class Identity:
|
|||||||
|
|
||||||
def encrypt(self, plaintext):
|
def encrypt(self, plaintext):
|
||||||
if self.pub != None:
|
if self.pub != None:
|
||||||
chunksize = (Identity.KEYSIZE-Identity.PADDINGSIZE)/8
|
chunksize = (Identity.KEYSIZE-Identity.PADDINGSIZE)//8
|
||||||
chunks = int(math.ceil(len(plaintext)/(float(chunksize))))
|
chunks = int(math.ceil(len(plaintext)/(float(chunksize))))
|
||||||
|
|
||||||
ciphertext = "";
|
ciphertext = b"";
|
||||||
for chunk in range(chunks):
|
for chunk in range(chunks):
|
||||||
start = chunk*chunksize
|
start = chunk*chunksize
|
||||||
end = (chunk+1)*chunksize
|
end = (chunk+1)*chunksize
|
||||||
@ -249,10 +249,10 @@ class Identity:
|
|||||||
if self.prv != None:
|
if self.prv != None:
|
||||||
plaintext = None
|
plaintext = None
|
||||||
try:
|
try:
|
||||||
chunksize = (Identity.KEYSIZE)/8
|
chunksize = (Identity.KEYSIZE)//8
|
||||||
chunks = int(math.ceil(len(ciphertext)/(float(chunksize))))
|
chunks = int(math.ceil(len(ciphertext)/(float(chunksize))))
|
||||||
|
|
||||||
plaintext = "";
|
plaintext = b"";
|
||||||
for chunk in range(chunks):
|
for chunk in range(chunks):
|
||||||
start = chunk*chunksize
|
start = chunk*chunksize
|
||||||
end = (chunk+1)*chunksize
|
end = (chunk+1)*chunksize
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import print_function
|
|
||||||
from Interface import Interface
|
from .Interface import Interface
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import sys
|
import sys
|
||||||
import serial
|
import serial
|
||||||
@ -288,7 +288,7 @@ class AX25KISSInterface(Interface):
|
|||||||
in_frame = False
|
in_frame = False
|
||||||
command = KISS.CMD_UNKNOWN
|
command = KISS.CMD_UNKNOWN
|
||||||
escape = False
|
escape = False
|
||||||
sleep(0.08)
|
sleep(0.08)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.online = False
|
self.online = False
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import print_function
|
|
||||||
from Interface import Interface
|
from .Interface import Interface
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import sys
|
import sys
|
||||||
import serial
|
import serial
|
||||||
@ -236,11 +236,11 @@ class KISSInterface(Interface):
|
|||||||
else:
|
else:
|
||||||
time_since_last = int(time.time()*1000) - last_read_ms
|
time_since_last = int(time.time()*1000) - last_read_ms
|
||||||
if len(data_buffer) > 0 and time_since_last > self.timeout:
|
if len(data_buffer) > 0 and time_since_last > self.timeout:
|
||||||
data_buffer = ""
|
data_buffer = ""
|
||||||
in_frame = False
|
in_frame = False
|
||||||
command = KISS.CMD_UNKNOWN
|
command = KISS.CMD_UNKNOWN
|
||||||
escape = False
|
escape = False
|
||||||
sleep(0.08)
|
sleep(0.08)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.online = False
|
self.online = False
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import print_function
|
|
||||||
from Interface import Interface
|
from .Interface import Interface
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import sys
|
import sys
|
||||||
import serial
|
import serial
|
||||||
@ -416,11 +416,11 @@ class RNodeInterface(Interface):
|
|||||||
time_since_last = int(time.time()*1000) - last_read_ms
|
time_since_last = int(time.time()*1000) - last_read_ms
|
||||||
if len(data_buffer) > 0 and time_since_last > self.timeout:
|
if len(data_buffer) > 0 and time_since_last > self.timeout:
|
||||||
RNS.log(str(self)+" serial read timeout", RNS.LOG_DEBUG)
|
RNS.log(str(self)+" serial read timeout", RNS.LOG_DEBUG)
|
||||||
data_buffer = ""
|
data_buffer = ""
|
||||||
in_frame = False
|
in_frame = False
|
||||||
command = KISS.CMD_UNKNOWN
|
command = KISS.CMD_UNKNOWN
|
||||||
escape = False
|
escape = False
|
||||||
sleep(0.08)
|
sleep(0.08)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.online = False
|
self.online = False
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import print_function
|
|
||||||
from Interface import Interface
|
from .Interface import Interface
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import sys
|
import sys
|
||||||
import serial
|
import serial
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from Interface import Interface
|
from .Interface import Interface
|
||||||
import SocketServer
|
import socketserver
|
||||||
import threading
|
import threading
|
||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
@ -27,7 +27,7 @@ class UdpInterface(Interface):
|
|||||||
|
|
||||||
self.owner = owner
|
self.owner = owner
|
||||||
address = (self.bind_ip, self.bind_port)
|
address = (self.bind_ip, self.bind_port)
|
||||||
self.server = SocketServer.UDPServer(address, handlerFactory(self.processIncoming))
|
self.server = socketserver.UDPServer(address, handlerFactory(self.processIncoming))
|
||||||
|
|
||||||
thread = threading.Thread(target=self.server.serve_forever)
|
thread = threading.Thread(target=self.server.serve_forever)
|
||||||
thread.setDaemon(True)
|
thread.setDaemon(True)
|
||||||
@ -52,10 +52,10 @@ class UdpInterface(Interface):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "UdpInterface["+self.name+"/"+self.bind_ip+":"+str(self.bind_port)+"]"
|
return "UdpInterface["+self.name+"/"+self.bind_ip+":"+str(self.bind_port)+"]"
|
||||||
|
|
||||||
class UdpInterfaceHandler(SocketServer.BaseRequestHandler):
|
class UdpInterfaceHandler(socketserver.BaseRequestHandler):
|
||||||
def __init__(self, callback, *args, **keys):
|
def __init__(self, callback, *args, **keys):
|
||||||
self.callback = callback
|
self.callback = callback
|
||||||
SocketServer.BaseRequestHandler.__init__(self, *args, **keys)
|
socketserver.BaseRequestHandler.__init__(self, *args, **keys)
|
||||||
|
|
||||||
def handle(self):
|
def handle(self):
|
||||||
data = self.request[0]
|
data = self.request[0]
|
||||||
|
14
RNS/Link.py
14
RNS/Link.py
@ -5,7 +5,7 @@ from cryptography.hazmat.primitives.asymmetric import ec
|
|||||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||||
from cryptography.fernet import Fernet
|
from cryptography.fernet import Fernet
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import vendor.umsgpack as umsgpack
|
from .vendor import umsgpack as umsgpack
|
||||||
import threading
|
import threading
|
||||||
import base64
|
import base64
|
||||||
import time
|
import time
|
||||||
@ -182,7 +182,7 @@ class Link:
|
|||||||
if self.initiator:
|
if self.initiator:
|
||||||
peer_pub_bytes = packet.data[:Link.ECPUBSIZE]
|
peer_pub_bytes = packet.data[:Link.ECPUBSIZE]
|
||||||
signed_data = self.link_id+peer_pub_bytes
|
signed_data = self.link_id+peer_pub_bytes
|
||||||
signature = packet.data[Link.ECPUBSIZE:RNS.Identity.KEYSIZE/8+Link.ECPUBSIZE]
|
signature = packet.data[Link.ECPUBSIZE:RNS.Identity.KEYSIZE//8+Link.ECPUBSIZE]
|
||||||
|
|
||||||
if self.destination.identity.validate(signature, signed_data):
|
if self.destination.identity.validate(signature, signed_data):
|
||||||
self.loadPeer(peer_pub_bytes)
|
self.loadPeer(peer_pub_bytes)
|
||||||
@ -378,23 +378,23 @@ class Link:
|
|||||||
elif packet.context == RNS.Packet.RESOURCE_REQ:
|
elif packet.context == RNS.Packet.RESOURCE_REQ:
|
||||||
plaintext = self.decrypt(packet.data)
|
plaintext = self.decrypt(packet.data)
|
||||||
if ord(plaintext[:1]) == RNS.Resource.HASHMAP_IS_EXHAUSTED:
|
if ord(plaintext[:1]) == RNS.Resource.HASHMAP_IS_EXHAUSTED:
|
||||||
resource_hash = plaintext[1+RNS.Resource.MAPHASH_LEN:RNS.Identity.HASHLENGTH/8+1+RNS.Resource.MAPHASH_LEN]
|
resource_hash = plaintext[1+RNS.Resource.MAPHASH_LEN:RNS.Identity.HASHLENGTH//8+1+RNS.Resource.MAPHASH_LEN]
|
||||||
else:
|
else:
|
||||||
resource_hash = plaintext[1:RNS.Identity.HASHLENGTH/8+1]
|
resource_hash = plaintext[1:RNS.Identity.HASHLENGTH//8+1]
|
||||||
for resource in self.outgoing_resources:
|
for resource in self.outgoing_resources:
|
||||||
if resource.hash == resource_hash:
|
if resource.hash == resource_hash:
|
||||||
resource.request(plaintext)
|
resource.request(plaintext)
|
||||||
|
|
||||||
elif packet.context == RNS.Packet.RESOURCE_HMU:
|
elif packet.context == RNS.Packet.RESOURCE_HMU:
|
||||||
plaintext = self.decrypt(packet.data)
|
plaintext = self.decrypt(packet.data)
|
||||||
resource_hash = plaintext[:RNS.Identity.HASHLENGTH/8]
|
resource_hash = plaintext[:RNS.Identity.HASHLENGTH//8]
|
||||||
for resource in self.incoming_resources:
|
for resource in self.incoming_resources:
|
||||||
if resource_hash == resource.hash:
|
if resource_hash == resource.hash:
|
||||||
resource.hashmap_update_packet(plaintext)
|
resource.hashmap_update_packet(plaintext)
|
||||||
|
|
||||||
elif packet.context == RNS.Packet.RESOURCE_ICL:
|
elif packet.context == RNS.Packet.RESOURCE_ICL:
|
||||||
plaintext = self.decrypt(packet.data)
|
plaintext = self.decrypt(packet.data)
|
||||||
resource_hash = plaintext[:RNS.Identity.HASHLENGTH/8]
|
resource_hash = plaintext[:RNS.Identity.HASHLENGTH//8]
|
||||||
for resource in self.incoming_resources:
|
for resource in self.incoming_resources:
|
||||||
if resource_hash == resource.hash:
|
if resource_hash == resource.hash:
|
||||||
resource.cancel()
|
resource.cancel()
|
||||||
@ -415,7 +415,7 @@ class Link:
|
|||||||
|
|
||||||
elif packet.packet_type == RNS.Packet.PROOF:
|
elif packet.packet_type == RNS.Packet.PROOF:
|
||||||
if packet.context == RNS.Packet.RESOURCE_PRF:
|
if packet.context == RNS.Packet.RESOURCE_PRF:
|
||||||
resource_hash = packet.data[0:RNS.Identity.HASHLENGTH/8]
|
resource_hash = packet.data[0:RNS.Identity.HASHLENGTH//8]
|
||||||
for resource in self.outgoing_resources:
|
for resource in self.outgoing_resources:
|
||||||
if resource_hash == resource.hash:
|
if resource_hash == resource.hash:
|
||||||
resource.validateProof(packet.data)
|
resource.validateProof(packet.data)
|
||||||
|
@ -89,11 +89,10 @@ class Packet:
|
|||||||
|
|
||||||
def pack(self):
|
def pack(self):
|
||||||
self.destination_hash = self.destination.hash
|
self.destination_hash = self.destination.hash
|
||||||
self.header = ""
|
self.header = b""
|
||||||
self.header += struct.pack("!B", self.flags)
|
self.header += struct.pack("!B", self.flags)
|
||||||
self.header += struct.pack("!B", self.hops)
|
self.header += struct.pack("!B", self.hops)
|
||||||
|
|
||||||
|
|
||||||
if self.context == Packet.LRPROOF:
|
if self.context == Packet.LRPROOF:
|
||||||
self.header += self.destination.link_id
|
self.header += self.destination.link_id
|
||||||
self.ciphertext = self.data
|
self.ciphertext = self.data
|
||||||
@ -135,8 +134,7 @@ class Packet:
|
|||||||
raise IOError("Packet with header type 2 must have a transport ID")
|
raise IOError("Packet with header type 2 must have a transport ID")
|
||||||
|
|
||||||
|
|
||||||
self.header += chr(self.context)
|
self.header += bytes([self.context])
|
||||||
|
|
||||||
self.raw = self.header + self.ciphertext
|
self.raw = self.header + self.ciphertext
|
||||||
|
|
||||||
if len(self.raw) > self.MTU:
|
if len(self.raw) > self.MTU:
|
||||||
@ -146,8 +144,8 @@ class Packet:
|
|||||||
self.updateHash()
|
self.updateHash()
|
||||||
|
|
||||||
def unpack(self):
|
def unpack(self):
|
||||||
self.flags = ord(self.raw[0])
|
self.flags = self.raw[0]
|
||||||
self.hops = ord(self.raw[1])
|
self.hops = self.raw[1]
|
||||||
|
|
||||||
self.header_type = (self.flags & 0b11000000) >> 6
|
self.header_type = (self.flags & 0b11000000) >> 6
|
||||||
self.transport_type = (self.flags & 0b00110000) >> 4
|
self.transport_type = (self.flags & 0b00110000) >> 4
|
||||||
@ -229,7 +227,7 @@ class Packet:
|
|||||||
return RNS.Identity.truncatedHash(self.getHashablePart())
|
return RNS.Identity.truncatedHash(self.getHashablePart())
|
||||||
|
|
||||||
def getHashablePart(self):
|
def getHashablePart(self):
|
||||||
hashable_part = struct.pack("!B", struct.unpack("!B", self.raw[0])[0] & 0b00001111)
|
hashable_part = bytes([self.raw[0] & 0b00001111])
|
||||||
if self.header_type == Packet.HEADER_2:
|
if self.header_type == Packet.HEADER_2:
|
||||||
hashable_part += self.raw[12:]
|
hashable_part += self.raw[12:]
|
||||||
else:
|
else:
|
||||||
@ -253,8 +251,8 @@ class PacketReceipt:
|
|||||||
DELIVERED = 0x02
|
DELIVERED = 0x02
|
||||||
|
|
||||||
|
|
||||||
EXPL_LENGTH = RNS.Identity.HASHLENGTH/8+RNS.Identity.SIGLENGTH/8
|
EXPL_LENGTH = RNS.Identity.HASHLENGTH//8+RNS.Identity.SIGLENGTH//8
|
||||||
IMPL_LENGTH = RNS.Identity.SIGLENGTH/8
|
IMPL_LENGTH = RNS.Identity.SIGLENGTH//8
|
||||||
|
|
||||||
# Creates a new packet receipt from a sent packet
|
# Creates a new packet receipt from a sent packet
|
||||||
def __init__(self, packet):
|
def __init__(self, packet):
|
||||||
@ -280,8 +278,8 @@ class PacketReceipt:
|
|||||||
# TODO: Hardcoded as explicit proofs for now
|
# TODO: Hardcoded as explicit proofs for now
|
||||||
if True or len(proof) == PacketReceipt.EXPL_LENGTH:
|
if True or len(proof) == PacketReceipt.EXPL_LENGTH:
|
||||||
# This is an explicit proof
|
# This is an explicit proof
|
||||||
proof_hash = proof[:RNS.Identity.HASHLENGTH/8]
|
proof_hash = proof[:RNS.Identity.HASHLENGTH//8]
|
||||||
signature = proof[RNS.Identity.HASHLENGTH/8:RNS.Identity.HASHLENGTH/8+RNS.Identity.SIGLENGTH/8]
|
signature = proof[RNS.Identity.HASHLENGTH//8:RNS.Identity.HASHLENGTH//8+RNS.Identity.SIGLENGTH//8]
|
||||||
if proof_hash == self.hash:
|
if proof_hash == self.hash:
|
||||||
proof_valid = link.validate(signature, self.hash)
|
proof_valid = link.validate(signature, self.hash)
|
||||||
if proof_valid:
|
if proof_valid:
|
||||||
@ -297,7 +295,8 @@ class PacketReceipt:
|
|||||||
return False
|
return False
|
||||||
elif len(proof) == PacketReceipt.IMPL_LENGTH:
|
elif len(proof) == PacketReceipt.IMPL_LENGTH:
|
||||||
pass
|
pass
|
||||||
# signature = proof[:RNS.Identity.SIGLENGTH/8]
|
# TODO: Why is this disabled?
|
||||||
|
# signature = proof[:RNS.Identity.SIGLENGTH//8]
|
||||||
# proof_valid = self.link.validate(signature, self.hash)
|
# proof_valid = self.link.validate(signature, self.hash)
|
||||||
# if proof_valid:
|
# if proof_valid:
|
||||||
# self.status = PacketReceipt.DELIVERED
|
# self.status = PacketReceipt.DELIVERED
|
||||||
@ -317,8 +316,8 @@ class PacketReceipt:
|
|||||||
def validateProof(self, proof):
|
def validateProof(self, proof):
|
||||||
if len(proof) == PacketReceipt.EXPL_LENGTH:
|
if len(proof) == PacketReceipt.EXPL_LENGTH:
|
||||||
# This is an explicit proof
|
# This is an explicit proof
|
||||||
proof_hash = proof[:RNS.Identity.HASHLENGTH/8]
|
proof_hash = proof[:RNS.Identity.HASHLENGTH//8]
|
||||||
signature = proof[RNS.Identity.HASHLENGTH/8:RNS.Identity.HASHLENGTH/8+RNS.Identity.SIGLENGTH/8]
|
signature = proof[RNS.Identity.HASHLENGTH//8:RNS.Identity.HASHLENGTH//8+RNS.Identity.SIGLENGTH//8]
|
||||||
if proof_hash == self.hash:
|
if proof_hash == self.hash:
|
||||||
proof_valid = self.destination.identity.validate(signature, self.hash)
|
proof_valid = self.destination.identity.validate(signature, self.hash)
|
||||||
if proof_valid:
|
if proof_valid:
|
||||||
@ -337,7 +336,7 @@ class PacketReceipt:
|
|||||||
if self.destination.identity == None:
|
if self.destination.identity == None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
signature = proof[:RNS.Identity.SIGLENGTH/8]
|
signature = proof[:RNS.Identity.SIGLENGTH//8]
|
||||||
proof_valid = self.destination.identity.validate(signature, self.hash)
|
proof_valid = self.destination.identity.validate(signature, self.hash)
|
||||||
if proof_valid:
|
if proof_valid:
|
||||||
self.status = PacketReceipt.DELIVERED
|
self.status = PacketReceipt.DELIVERED
|
||||||
|
@ -3,7 +3,7 @@ import bz2
|
|||||||
import math
|
import math
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
import vendor.umsgpack as umsgpack
|
from .vendor import umsgpack as umsgpack
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
class Resource:
|
class Resource:
|
||||||
@ -74,7 +74,7 @@ class Resource:
|
|||||||
resource.watchdog_job()
|
resource.watchdog_job()
|
||||||
|
|
||||||
return resource
|
return resource
|
||||||
except Exception as e:
|
except:
|
||||||
RNS.log("Could not decode resource advertisement, dropping resource", RNS.LOG_DEBUG)
|
RNS.log("Could not decode resource advertisement, dropping resource", RNS.LOG_DEBUG)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -123,7 +123,7 @@ class Resource:
|
|||||||
|
|
||||||
self.size = len(self.data)
|
self.size = len(self.data)
|
||||||
|
|
||||||
self.hashmap = ""
|
self.hashmap = b""
|
||||||
self.sent_parts = 0
|
self.sent_parts = 0
|
||||||
self.parts = []
|
self.parts = []
|
||||||
for i in range(0,int(math.ceil(self.size/float(Resource.SDU)))):
|
for i in range(0,int(math.ceil(self.size/float(Resource.SDU)))):
|
||||||
@ -158,7 +158,7 @@ class Resource:
|
|||||||
self.last_activity = time.time()
|
self.last_activity = time.time()
|
||||||
self.retries_left = self.max_retries
|
self.retries_left = self.max_retries
|
||||||
|
|
||||||
update = umsgpack.unpackb(plaintext[RNS.Identity.HASHLENGTH/8:])
|
update = umsgpack.unpackb(plaintext[RNS.Identity.HASHLENGTH//8:])
|
||||||
self.hashmap_update(update[0], update[1])
|
self.hashmap_update(update[0], update[1])
|
||||||
|
|
||||||
|
|
||||||
@ -166,7 +166,7 @@ class Resource:
|
|||||||
if not self.status == Resource.FAILED:
|
if not self.status == Resource.FAILED:
|
||||||
self.status = Resource.TRANSFERRING
|
self.status = Resource.TRANSFERRING
|
||||||
seg_len = ResourceAdvertisement.HASHMAP_MAX_LEN
|
seg_len = ResourceAdvertisement.HASHMAP_MAX_LEN
|
||||||
hashes = len(hashmap)/Resource.MAPHASH_LEN
|
hashes = len(hashmap)//Resource.MAPHASH_LEN
|
||||||
for i in range(0,hashes):
|
for i in range(0,hashes):
|
||||||
if self.hashmap[i+segment*seg_len] == None:
|
if self.hashmap[i+segment*seg_len] == None:
|
||||||
self.hashmap_height += 1
|
self.hashmap_height += 1
|
||||||
@ -283,7 +283,7 @@ class Resource:
|
|||||||
if not self.status == Resource.FAILED:
|
if not self.status == Resource.FAILED:
|
||||||
try:
|
try:
|
||||||
self.status = Resource.ASSEMBLING
|
self.status = Resource.ASSEMBLING
|
||||||
stream = ""
|
stream = b""
|
||||||
for part in self.parts:
|
for part in self.parts:
|
||||||
stream += part
|
stream += part
|
||||||
|
|
||||||
@ -324,8 +324,8 @@ class Resource:
|
|||||||
|
|
||||||
def validateProof(self, proof_data):
|
def validateProof(self, proof_data):
|
||||||
if not self.status == Resource.FAILED:
|
if not self.status == Resource.FAILED:
|
||||||
if len(proof_data) == RNS.Identity.HASHLENGTH/8*2:
|
if len(proof_data) == RNS.Identity.HASHLENGTH//8*2:
|
||||||
if proof_data[RNS.Identity.HASHLENGTH/8:] == self.expected_proof:
|
if proof_data[RNS.Identity.HASHLENGTH//8:] == self.expected_proof:
|
||||||
self.status = Resource.COMPLETE
|
self.status = Resource.COMPLETE
|
||||||
if self.callback != None:
|
if self.callback != None:
|
||||||
self.link.resource_concluded(self)
|
self.link.resource_concluded(self)
|
||||||
@ -379,7 +379,7 @@ class Resource:
|
|||||||
if not self.waiting_for_hmu:
|
if not self.waiting_for_hmu:
|
||||||
self.outstanding_parts = 0
|
self.outstanding_parts = 0
|
||||||
hashmap_exhausted = Resource.HASHMAP_IS_NOT_EXHAUSTED
|
hashmap_exhausted = Resource.HASHMAP_IS_NOT_EXHAUSTED
|
||||||
requested_hashes = ""
|
requested_hashes = b""
|
||||||
|
|
||||||
i = 0; pn = 0
|
i = 0; pn = 0
|
||||||
for part in self.parts:
|
for part in self.parts:
|
||||||
@ -397,12 +397,13 @@ class Resource:
|
|||||||
if i >= self.window or hashmap_exhausted == Resource.HASHMAP_IS_EXHAUSTED:
|
if i >= self.window or hashmap_exhausted == Resource.HASHMAP_IS_EXHAUSTED:
|
||||||
break
|
break
|
||||||
|
|
||||||
hmu_part = chr(hashmap_exhausted)
|
hmu_part = bytes([hashmap_exhausted])
|
||||||
if hashmap_exhausted == Resource.HASHMAP_IS_EXHAUSTED:
|
if hashmap_exhausted == Resource.HASHMAP_IS_EXHAUSTED:
|
||||||
last_map_hash = self.hashmap[self.hashmap_height-1]
|
last_map_hash = self.hashmap[self.hashmap_height-1]
|
||||||
hmu_part += last_map_hash
|
hmu_part += last_map_hash
|
||||||
self.waiting_for_hmu = True
|
self.waiting_for_hmu = True
|
||||||
|
|
||||||
|
requested_data = b""
|
||||||
request_data = hmu_part + self.hash + requested_hashes
|
request_data = hmu_part + self.hash + requested_hashes
|
||||||
request_packet = RNS.Packet(self.link, request_data, context = RNS.Packet.RESOURCE_REQ)
|
request_packet = RNS.Packet(self.link, request_data, context = RNS.Packet.RESOURCE_REQ)
|
||||||
|
|
||||||
@ -424,12 +425,12 @@ class Resource:
|
|||||||
|
|
||||||
self.retries_left = self.max_retries
|
self.retries_left = self.max_retries
|
||||||
|
|
||||||
wants_more_hashmap = True if ord(request_data[0]) == Resource.HASHMAP_IS_EXHAUSTED else False
|
wants_more_hashmap = True if request_data[0] == Resource.HASHMAP_IS_EXHAUSTED else False
|
||||||
pad = 1+Resource.MAPHASH_LEN if wants_more_hashmap else 1
|
pad = 1+Resource.MAPHASH_LEN if wants_more_hashmap else 1
|
||||||
|
|
||||||
requested_hashes = request_data[pad+RNS.Identity.HASHLENGTH/8:]
|
requested_hashes = request_data[pad+RNS.Identity.HASHLENGTH//8:]
|
||||||
|
|
||||||
for i in range(0,len(requested_hashes)/Resource.MAPHASH_LEN):
|
for i in range(0,len(requested_hashes)//Resource.MAPHASH_LEN):
|
||||||
requested_hash = requested_hashes[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
|
requested_hash = requested_hashes[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
|
||||||
|
|
||||||
pi = 0
|
pi = 0
|
||||||
@ -458,13 +459,13 @@ class Resource:
|
|||||||
RNS.log("Resource sequencing error, cancelling transfer!", RNS.LOG_ERROR)
|
RNS.log("Resource sequencing error, cancelling transfer!", RNS.LOG_ERROR)
|
||||||
self.cancel()
|
self.cancel()
|
||||||
else:
|
else:
|
||||||
segment = part_index / ResourceAdvertisement.HASHMAP_MAX_LEN
|
segment = part_index // ResourceAdvertisement.HASHMAP_MAX_LEN
|
||||||
|
|
||||||
|
|
||||||
hashmap_start = segment*ResourceAdvertisement.HASHMAP_MAX_LEN
|
hashmap_start = segment*ResourceAdvertisement.HASHMAP_MAX_LEN
|
||||||
hashmap_end = min((segment+1)*ResourceAdvertisement.HASHMAP_MAX_LEN, len(self.parts))
|
hashmap_end = min((segment+1)*ResourceAdvertisement.HASHMAP_MAX_LEN, len(self.parts))
|
||||||
|
|
||||||
hashmap = ""
|
hashmap = b""
|
||||||
for i in range(hashmap_start,hashmap_end):
|
for i in range(hashmap_start,hashmap_end):
|
||||||
hashmap += self.hashmap[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
|
hashmap += self.hashmap[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
|
||||||
|
|
||||||
@ -523,18 +524,18 @@ class ResourceAdvertisement:
|
|||||||
hashmap_start = segment*ResourceAdvertisement.HASHMAP_MAX_LEN
|
hashmap_start = segment*ResourceAdvertisement.HASHMAP_MAX_LEN
|
||||||
hashmap_end = min((segment+1)*ResourceAdvertisement.HASHMAP_MAX_LEN, self.n)
|
hashmap_end = min((segment+1)*ResourceAdvertisement.HASHMAP_MAX_LEN, self.n)
|
||||||
|
|
||||||
hashmap = ""
|
hashmap = b""
|
||||||
for i in range(hashmap_start,hashmap_end):
|
for i in range(hashmap_start,hashmap_end):
|
||||||
hashmap += self.m[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
|
hashmap += self.m[i*Resource.MAPHASH_LEN:(i+1)*Resource.MAPHASH_LEN]
|
||||||
|
|
||||||
dictionary = {
|
dictionary = {
|
||||||
u"t": self.t,
|
"t": self.t,
|
||||||
u"d": self.d,
|
"d": self.d,
|
||||||
u"n": self.n,
|
"n": self.n,
|
||||||
u"h": self.h,
|
"h": self.h,
|
||||||
u"r": self.r,
|
"r": self.r,
|
||||||
u"f": self.f,
|
"f": self.f,
|
||||||
u"m": hashmap
|
"m": hashmap
|
||||||
}
|
}
|
||||||
|
|
||||||
return umsgpack.packb(dictionary)
|
return umsgpack.packb(dictionary)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from Interfaces import *
|
from .Interfaces import *
|
||||||
import ConfigParser
|
import configparser
|
||||||
from vendor.configobj import ConfigObj
|
from .vendor.configobj import ConfigObj
|
||||||
import RNS
|
import RNS
|
||||||
import atexit
|
import atexit
|
||||||
import struct
|
import struct
|
||||||
|
@ -6,7 +6,7 @@ import struct
|
|||||||
import threading
|
import threading
|
||||||
import traceback
|
import traceback
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import vendor.umsgpack as umsgpack
|
from .vendor import umsgpack as umsgpack
|
||||||
|
|
||||||
class Transport:
|
class Transport:
|
||||||
# Constants
|
# Constants
|
||||||
@ -84,7 +84,7 @@ class Transport:
|
|||||||
packet_hashlist_path = RNS.Reticulum.configdir+"/packet_hashlist"
|
packet_hashlist_path = RNS.Reticulum.configdir+"/packet_hashlist"
|
||||||
if os.path.isfile(packet_hashlist_path):
|
if os.path.isfile(packet_hashlist_path):
|
||||||
try:
|
try:
|
||||||
file = open(packet_hashlist_path, "r")
|
file = open(packet_hashlist_path, "rb")
|
||||||
Transport.packet_hashlist = umsgpack.unpackb(file.read())
|
Transport.packet_hashlist = umsgpack.unpackb(file.read())
|
||||||
file.close()
|
file.close()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -144,7 +144,7 @@ class Transport:
|
|||||||
announce_identity = RNS.Identity.recall(packet.destination_hash)
|
announce_identity = RNS.Identity.recall(packet.destination_hash)
|
||||||
announce_destination = RNS.Destination(announce_identity, RNS.Destination.OUT, RNS.Destination.SINGLE, "unknown", "unknown");
|
announce_destination = RNS.Destination(announce_identity, RNS.Destination.OUT, RNS.Destination.SINGLE, "unknown", "unknown");
|
||||||
announce_destination.hash = packet.destination_hash
|
announce_destination.hash = packet.destination_hash
|
||||||
announce_destination.hexhash = announce_destination.hash.encode("hex_codec")
|
announce_destination.hexhash = announce_destination.hash.hex()
|
||||||
new_packet = RNS.Packet(announce_destination, announce_data, RNS.Packet.ANNOUNCE, context = announce_context, header_type = RNS.Packet.HEADER_2, transport_type = Transport.TRANSPORT, transport_id = Transport.identity.hash)
|
new_packet = RNS.Packet(announce_destination, announce_data, RNS.Packet.ANNOUNCE, context = announce_context, header_type = RNS.Packet.HEADER_2, transport_type = Transport.TRANSPORT, transport_id = Transport.identity.hash)
|
||||||
new_packet.hops = announce_entry[4]
|
new_packet.hops = announce_entry[4]
|
||||||
RNS.log("Rebroadcasting announce for "+RNS.prettyhexrep(announce_destination.hash)+" with hop count "+str(new_packet.hops), RNS.LOG_DEBUG)
|
RNS.log("Rebroadcasting announce for "+RNS.prettyhexrep(announce_destination.hash)+" with hop count "+str(new_packet.hops), RNS.LOG_DEBUG)
|
||||||
@ -423,7 +423,7 @@ class Transport:
|
|||||||
# First, check that the announce is not for a destination
|
# First, check that the announce is not for a destination
|
||||||
# local to this system, and that hops are less than the max
|
# local to this system, and that hops are less than the max
|
||||||
if (not any(packet.destination_hash == d.hash for d in Transport.destinations) and packet.hops < Transport.PATHFINDER_M+1):
|
if (not any(packet.destination_hash == d.hash for d in Transport.destinations) and packet.hops < Transport.PATHFINDER_M+1):
|
||||||
random_blob = packet.data[RNS.Identity.DERKEYSIZE/8+10:RNS.Identity.DERKEYSIZE/8+20]
|
random_blob = packet.data[RNS.Identity.DERKEYSIZE//8+10:RNS.Identity.DERKEYSIZE//8+20]
|
||||||
random_blobs = []
|
random_blobs = []
|
||||||
if packet.destination_hash in Transport.destination_table:
|
if packet.destination_hash in Transport.destination_table:
|
||||||
random_blobs = Transport.destination_table[packet.destination_hash][4]
|
random_blobs = Transport.destination_table[packet.destination_hash][4]
|
||||||
@ -541,7 +541,7 @@ class Transport:
|
|||||||
# plaintext = link.decrypt(packet.data)
|
# plaintext = link.decrypt(packet.data)
|
||||||
|
|
||||||
if len(packet.data) == RNS.PacketReceipt.EXPL_LENGTH:
|
if len(packet.data) == RNS.PacketReceipt.EXPL_LENGTH:
|
||||||
proof_hash = packet.data[:RNS.Identity.HASHLENGTH/8]
|
proof_hash = packet.data[:RNS.Identity.HASHLENGTH//8]
|
||||||
else:
|
else:
|
||||||
proof_hash = None
|
proof_hash = None
|
||||||
|
|
||||||
@ -612,7 +612,7 @@ class Transport:
|
|||||||
if RNS.Transport.shouldCache(packet):
|
if RNS.Transport.shouldCache(packet):
|
||||||
try:
|
try:
|
||||||
packet_hash = RNS.hexrep(packet.getHash(), delimit=False)
|
packet_hash = RNS.hexrep(packet.getHash(), delimit=False)
|
||||||
file = open(RNS.Reticulum.cachepath+"/"+packet_hash, "w")
|
file = open(RNS.Reticulum.cachepath+"/"+packet_hash, "wb")
|
||||||
file.write(packet.raw)
|
file.write(packet.raw)
|
||||||
file.close()
|
file.close()
|
||||||
RNS.log("Wrote packet "+packet_hash+" to cache", RNS.LOG_EXTREME)
|
RNS.log("Wrote packet "+packet_hash+" to cache", RNS.LOG_EXTREME)
|
||||||
@ -628,7 +628,7 @@ class Transport:
|
|||||||
packet_hash = RNS.hexrep(packet.data, delimit=False)
|
packet_hash = RNS.hexrep(packet.data, delimit=False)
|
||||||
path = RNS.Reticulum.cachepath+"/"+packet_hash
|
path = RNS.Reticulum.cachepath+"/"+packet_hash
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
file = open(path, "r")
|
file = open(path, "rb")
|
||||||
raw = file.read()
|
raw = file.read()
|
||||||
file.close()
|
file.close()
|
||||||
packet = RNS.Packet(None, raw)
|
packet = RNS.Packet(None, raw)
|
||||||
@ -642,7 +642,7 @@ class Transport:
|
|||||||
RNS.log("Cache request for "+RNS.prettyhexrep(packet_hash), RNS.LOG_EXTREME)
|
RNS.log("Cache request for "+RNS.prettyhexrep(packet_hash), RNS.LOG_EXTREME)
|
||||||
path = RNS.Reticulum.cachepath+"/"+RNS.hexrep(packet_hash, delimit=False)
|
path = RNS.Reticulum.cachepath+"/"+RNS.hexrep(packet_hash, delimit=False)
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
file = open(path, "r")
|
file = open(path, "rb")
|
||||||
raw = file.read()
|
raw = file.read()
|
||||||
Transport.inbound(raw)
|
Transport.inbound(raw)
|
||||||
file.close()
|
file.close()
|
||||||
@ -665,8 +665,8 @@ class Transport:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def pathRequestHandler(data, packet):
|
def pathRequestHandler(data, packet):
|
||||||
if len(data) >= RNS.Identity.TRUNCATED_HASHLENGTH/8:
|
if len(data) >= RNS.Identity.TRUNCATED_HASHLENGTH//8:
|
||||||
Transport.pathRequest(data[:RNS.Identity.TRUNCATED_HASHLENGTH/8])
|
Transport.pathRequest(data[:RNS.Identity.TRUNCATED_HASHLENGTH//8])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def pathRequest(destination_hash):
|
def pathRequest(destination_hash):
|
||||||
@ -704,7 +704,7 @@ class Transport:
|
|||||||
def exitHandler():
|
def exitHandler():
|
||||||
try:
|
try:
|
||||||
packet_hashlist_path = RNS.Reticulum.configdir+"/packet_hashlist"
|
packet_hashlist_path = RNS.Reticulum.configdir+"/packet_hashlist"
|
||||||
file = open(packet_hashlist_path, "w")
|
file = open(packet_hashlist_path, "wb")
|
||||||
file.write(umsgpack.packb(Transport.packet_hashlist))
|
file.write(umsgpack.packb(Transport.packet_hashlist))
|
||||||
file.close()
|
file.close()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -82,7 +82,7 @@ def hexrep(data, delimit=True):
|
|||||||
|
|
||||||
def prettyhexrep(data):
|
def prettyhexrep(data):
|
||||||
delimiter = ""
|
delimiter = ""
|
||||||
hexrep = "<"+delimiter.join("{:02x}".format(ord(c)) for c in data)+">"
|
hexrep = "<"+delimiter.join("{:02x}".format(c) for c in data)+">"
|
||||||
return hexrep
|
return hexrep
|
||||||
|
|
||||||
def panic():
|
def panic():
|
||||||
|
277
RNS/vendor/configobj.py
vendored
277
RNS/vendor/configobj.py
vendored
@ -1,22 +1,17 @@
|
|||||||
# configobj.py
|
# configobj.py
|
||||||
# A config file reader/writer that supports nested sections in config files.
|
# A config file reader/writer that supports nested sections in config files.
|
||||||
# Copyright (C) 2005-2010 Michael Foord, Nicola Larosa
|
# Copyright (C) 2005-2014:
|
||||||
# E-mail: fuzzyman AT voidspace DOT org DOT uk
|
# (name) : (email)
|
||||||
# nico AT tekNico DOT net
|
# Michael Foord: fuzzyman AT voidspace DOT org DOT uk
|
||||||
|
# Nicola Larosa: nico AT tekNico DOT net
|
||||||
|
# Rob Dennis: rdennis AT gmail DOT com
|
||||||
|
# Eli Courtwright: eli AT courtwright DOT org
|
||||||
|
|
||||||
# ConfigObj 4
|
# This software is licensed under the terms of the BSD license.
|
||||||
# http://www.voidspace.org.uk/python/configobj.html
|
# http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
|
||||||
# Released subject to the BSD License
|
# ConfigObj 5 - main repository for documentation and issue tracking:
|
||||||
# Please see http://www.voidspace.org.uk/python/license.shtml
|
# https://github.com/DiffSK/configobj
|
||||||
|
|
||||||
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
|
|
||||||
# For information about bugfixes, updates and support, please join the
|
|
||||||
# ConfigObj mailing list:
|
|
||||||
# http://lists.sourceforge.net/lists/listinfo/configobj-develop
|
|
||||||
# Comments, suggestions and bug reports welcome.
|
|
||||||
|
|
||||||
from __future__ import generators
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@ -24,6 +19,8 @@ import sys
|
|||||||
|
|
||||||
from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
|
from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
|
||||||
|
|
||||||
|
import six
|
||||||
|
__version__ = '5.0.6'
|
||||||
|
|
||||||
# imported lazily to avoid startup performance hit if it isn't used
|
# imported lazily to avoid startup performance hit if it isn't used
|
||||||
compiler = None
|
compiler = None
|
||||||
@ -38,7 +35,7 @@ BOMS = {
|
|||||||
BOM_UTF16: ('utf_16', 'utf_16'),
|
BOM_UTF16: ('utf_16', 'utf_16'),
|
||||||
}
|
}
|
||||||
# All legal variants of the BOM codecs.
|
# All legal variants of the BOM codecs.
|
||||||
# The list of aliases is not meant to be exhaustive, is there a
|
# TODO: the list of aliases is not meant to be exhaustive, is there a
|
||||||
# better way ?
|
# better way ?
|
||||||
BOM_LIST = {
|
BOM_LIST = {
|
||||||
'utf_16': 'utf_16',
|
'utf_16': 'utf_16',
|
||||||
@ -83,20 +80,7 @@ tdquot = "'''%s'''"
|
|||||||
# Sentinel for use in getattr calls to replace hasattr
|
# Sentinel for use in getattr calls to replace hasattr
|
||||||
MISSING = object()
|
MISSING = object()
|
||||||
|
|
||||||
__version__ = '4.7.2'
|
|
||||||
|
|
||||||
try:
|
|
||||||
any
|
|
||||||
except NameError:
|
|
||||||
def any(iterable):
|
|
||||||
for entry in iterable:
|
|
||||||
if entry:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
'__version__',
|
|
||||||
'DEFAULT_INDENT_TYPE',
|
'DEFAULT_INDENT_TYPE',
|
||||||
'DEFAULT_INTERPOLATION',
|
'DEFAULT_INTERPOLATION',
|
||||||
'ConfigObjError',
|
'ConfigObjError',
|
||||||
@ -137,6 +121,8 @@ OPTION_DEFAULTS = {
|
|||||||
'write_empty_values': False,
|
'write_empty_values': False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# this could be replaced if six is used for compatibility, or there are no
|
||||||
|
# more assertions about items being a string
|
||||||
|
|
||||||
|
|
||||||
def getObj(s):
|
def getObj(s):
|
||||||
@ -155,13 +141,12 @@ class UnknownType(Exception):
|
|||||||
class Builder(object):
|
class Builder(object):
|
||||||
|
|
||||||
def build(self, o):
|
def build(self, o):
|
||||||
m = getattr(self, 'build_' + o.__class__.__name__, None)
|
|
||||||
if m is None:
|
if m is None:
|
||||||
raise UnknownType(o.__class__.__name__)
|
raise UnknownType(o.__class__.__name__)
|
||||||
return m(o)
|
return m(o)
|
||||||
|
|
||||||
def build_List(self, o):
|
def build_List(self, o):
|
||||||
return map(self.build, o.getChildren())
|
return list(map(self.build, o.getChildren()))
|
||||||
|
|
||||||
def build_Const(self, o):
|
def build_Const(self, o):
|
||||||
return o.value
|
return o.value
|
||||||
@ -170,7 +155,7 @@ class Builder(object):
|
|||||||
d = {}
|
d = {}
|
||||||
i = iter(map(self.build, o.getChildren()))
|
i = iter(map(self.build, o.getChildren()))
|
||||||
for el in i:
|
for el in i:
|
||||||
d[el] = i.next()
|
d[el] = next(i)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def build_Tuple(self, o):
|
def build_Tuple(self, o):
|
||||||
@ -188,7 +173,7 @@ class Builder(object):
|
|||||||
raise UnknownType('Undefined Name')
|
raise UnknownType('Undefined Name')
|
||||||
|
|
||||||
def build_Add(self, o):
|
def build_Add(self, o):
|
||||||
real, imag = map(self.build_Const, o.getChildren())
|
real, imag = list(map(self.build_Const, o.getChildren()))
|
||||||
try:
|
try:
|
||||||
real = float(real)
|
real = float(real)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@ -214,8 +199,10 @@ _builder = Builder()
|
|||||||
def unrepr(s):
|
def unrepr(s):
|
||||||
if not s:
|
if not s:
|
||||||
return s
|
return s
|
||||||
return _builder.build(getObj(s))
|
|
||||||
|
|
||||||
|
# this is supposed to be safe
|
||||||
|
import ast
|
||||||
|
return ast.literal_eval(s)
|
||||||
|
|
||||||
|
|
||||||
class ConfigObjError(SyntaxError):
|
class ConfigObjError(SyntaxError):
|
||||||
@ -518,7 +505,7 @@ class Section(dict):
|
|||||||
self._initialise()
|
self._initialise()
|
||||||
# we do this explicitly so that __setitem__ is used properly
|
# we do this explicitly so that __setitem__ is used properly
|
||||||
# (rather than just passing to ``dict.__init__``)
|
# (rather than just passing to ``dict.__init__``)
|
||||||
for entry, value in indict.iteritems():
|
for entry, value in indict.items():
|
||||||
self[entry] = value
|
self[entry] = value
|
||||||
|
|
||||||
|
|
||||||
@ -566,11 +553,11 @@ class Section(dict):
|
|||||||
"""Fetch the item and do string interpolation."""
|
"""Fetch the item and do string interpolation."""
|
||||||
val = dict.__getitem__(self, key)
|
val = dict.__getitem__(self, key)
|
||||||
if self.main.interpolation:
|
if self.main.interpolation:
|
||||||
if isinstance(val, basestring):
|
if isinstance(val, six.string_types):
|
||||||
return self._interpolate(key, val)
|
return self._interpolate(key, val)
|
||||||
if isinstance(val, list):
|
if isinstance(val, list):
|
||||||
def _check(entry):
|
def _check(entry):
|
||||||
if isinstance(entry, basestring):
|
if isinstance(entry, six.string_types):
|
||||||
return self._interpolate(key, entry)
|
return self._interpolate(key, entry)
|
||||||
return entry
|
return entry
|
||||||
new = [_check(entry) for entry in val]
|
new = [_check(entry) for entry in val]
|
||||||
@ -593,7 +580,7 @@ class Section(dict):
|
|||||||
``unrepr`` must be set when setting a value to a dictionary, without
|
``unrepr`` must be set when setting a value to a dictionary, without
|
||||||
creating a new sub-section.
|
creating a new sub-section.
|
||||||
"""
|
"""
|
||||||
if not isinstance(key, basestring):
|
if not isinstance(key, six.string_types):
|
||||||
raise ValueError('The key "%s" is not a string.' % key)
|
raise ValueError('The key "%s" is not a string.' % key)
|
||||||
|
|
||||||
# add the comment
|
# add the comment
|
||||||
@ -627,11 +614,11 @@ class Section(dict):
|
|||||||
if key not in self:
|
if key not in self:
|
||||||
self.scalars.append(key)
|
self.scalars.append(key)
|
||||||
if not self.main.stringify:
|
if not self.main.stringify:
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, six.string_types):
|
||||||
pass
|
pass
|
||||||
elif isinstance(value, (list, tuple)):
|
elif isinstance(value, (list, tuple)):
|
||||||
for entry in value:
|
for entry in value:
|
||||||
if not isinstance(entry, basestring):
|
if not isinstance(entry, six.string_types):
|
||||||
raise TypeError('Value is not a string "%s".' % entry)
|
raise TypeError('Value is not a string "%s".' % entry)
|
||||||
else:
|
else:
|
||||||
raise TypeError('Value is not a string "%s".' % value)
|
raise TypeError('Value is not a string "%s".' % value)
|
||||||
@ -721,7 +708,7 @@ class Section(dict):
|
|||||||
|
|
||||||
def items(self):
|
def items(self):
|
||||||
"""D.items() -> list of D's (key, value) pairs, as 2-tuples"""
|
"""D.items() -> list of D's (key, value) pairs, as 2-tuples"""
|
||||||
return zip((self.scalars + self.sections), self.values())
|
return list(zip((self.scalars + self.sections), list(self.values())))
|
||||||
|
|
||||||
|
|
||||||
def keys(self):
|
def keys(self):
|
||||||
@ -736,7 +723,7 @@ class Section(dict):
|
|||||||
|
|
||||||
def iteritems(self):
|
def iteritems(self):
|
||||||
"""D.iteritems() -> an iterator over the (key, value) items of D"""
|
"""D.iteritems() -> an iterator over the (key, value) items of D"""
|
||||||
return iter(self.items())
|
return iter(list(self.items()))
|
||||||
|
|
||||||
|
|
||||||
def iterkeys(self):
|
def iterkeys(self):
|
||||||
@ -748,7 +735,7 @@ class Section(dict):
|
|||||||
|
|
||||||
def itervalues(self):
|
def itervalues(self):
|
||||||
"""D.itervalues() -> an iterator over the values of D"""
|
"""D.itervalues() -> an iterator over the values of D"""
|
||||||
return iter(self.values())
|
return iter(list(self.values()))
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
@ -814,7 +801,7 @@ class Section(dict):
|
|||||||
>>> c2
|
>>> c2
|
||||||
ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
|
ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
|
||||||
"""
|
"""
|
||||||
for key, val in indict.items():
|
for key, val in list(indict.items()):
|
||||||
if (key in self and isinstance(self[key], dict) and
|
if (key in self and isinstance(self[key], dict) and
|
||||||
isinstance(val, dict)):
|
isinstance(val, dict)):
|
||||||
self[key].merge(val)
|
self[key].merge(val)
|
||||||
@ -972,7 +959,7 @@ class Section(dict):
|
|||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
if not isinstance(val, basestring):
|
if not isinstance(val, six.string_types):
|
||||||
# TODO: Why do we raise a KeyError here?
|
# TODO: Why do we raise a KeyError here?
|
||||||
raise KeyError()
|
raise KeyError()
|
||||||
else:
|
else:
|
||||||
@ -1013,15 +1000,15 @@ class Section(dict):
|
|||||||
|
|
||||||
>>> a = ConfigObj()
|
>>> a = ConfigObj()
|
||||||
>>> a['a'] = 'fish'
|
>>> a['a'] = 'fish'
|
||||||
>>> a.as_float('a')
|
>>> a.as_float('a') #doctest: +IGNORE_EXCEPTION_DETAIL
|
||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
ValueError: invalid literal for float(): fish
|
ValueError: invalid literal for float(): fish
|
||||||
>>> a['b'] = '1'
|
>>> a['b'] = '1'
|
||||||
>>> a.as_float('b')
|
>>> a.as_float('b')
|
||||||
1.0
|
1.0
|
||||||
>>> a['b'] = '3.2'
|
>>> a['b'] = '3.2'
|
||||||
>>> a.as_float('b')
|
>>> a.as_float('b') #doctest: +ELLIPSIS
|
||||||
3.2000000000000002
|
3.2...
|
||||||
"""
|
"""
|
||||||
return float(self[key])
|
return float(self[key])
|
||||||
|
|
||||||
@ -1224,7 +1211,7 @@ class ConfigObj(Section):
|
|||||||
for entry in options:
|
for entry in options:
|
||||||
if entry not in OPTION_DEFAULTS:
|
if entry not in OPTION_DEFAULTS:
|
||||||
raise TypeError('Unrecognised option "%s".' % entry)
|
raise TypeError('Unrecognised option "%s".' % entry)
|
||||||
for entry, value in OPTION_DEFAULTS.items():
|
for entry, value in list(OPTION_DEFAULTS.items()):
|
||||||
if entry not in options:
|
if entry not in options:
|
||||||
options[entry] = value
|
options[entry] = value
|
||||||
keyword_value = _options[entry]
|
keyword_value = _options[entry]
|
||||||
@ -1243,12 +1230,11 @@ class ConfigObj(Section):
|
|||||||
|
|
||||||
|
|
||||||
def _load(self, infile, configspec):
|
def _load(self, infile, configspec):
|
||||||
if isinstance(infile, basestring):
|
if isinstance(infile, six.string_types):
|
||||||
self.filename = infile
|
self.filename = infile
|
||||||
if os.path.isfile(infile):
|
if os.path.isfile(infile):
|
||||||
h = open(infile, 'rb')
|
with open(infile, 'rb') as h:
|
||||||
infile = h.read() or []
|
content = h.readlines() or []
|
||||||
h.close()
|
|
||||||
elif self.file_error:
|
elif self.file_error:
|
||||||
# raise an error if the file doesn't exist
|
# raise an error if the file doesn't exist
|
||||||
raise IOError('Config file not found: "%s".' % self.filename)
|
raise IOError('Config file not found: "%s".' % self.filename)
|
||||||
@ -1257,13 +1243,12 @@ class ConfigObj(Section):
|
|||||||
if self.create_empty:
|
if self.create_empty:
|
||||||
# this is a good test that the filename specified
|
# this is a good test that the filename specified
|
||||||
# isn't impossible - like on a non-existent device
|
# isn't impossible - like on a non-existent device
|
||||||
h = open(infile, 'w')
|
with open(infile, 'w') as h:
|
||||||
h.write('')
|
h.write('')
|
||||||
h.close()
|
content = []
|
||||||
infile = []
|
|
||||||
|
|
||||||
elif isinstance(infile, (list, tuple)):
|
elif isinstance(infile, (list, tuple)):
|
||||||
infile = list(infile)
|
content = list(infile)
|
||||||
|
|
||||||
elif isinstance(infile, dict):
|
elif isinstance(infile, dict):
|
||||||
# initialise self
|
# initialise self
|
||||||
@ -1291,21 +1276,21 @@ class ConfigObj(Section):
|
|||||||
|
|
||||||
elif getattr(infile, 'read', MISSING) is not MISSING:
|
elif getattr(infile, 'read', MISSING) is not MISSING:
|
||||||
# This supports file like objects
|
# This supports file like objects
|
||||||
infile = infile.read() or []
|
content = infile.read() or []
|
||||||
# needs splitting into lines - but needs doing *after* decoding
|
# needs splitting into lines - but needs doing *after* decoding
|
||||||
# in case it's not an 8 bit encoding
|
# in case it's not an 8 bit encoding
|
||||||
else:
|
else:
|
||||||
raise TypeError('infile must be a filename, file like object, or list of lines.')
|
raise TypeError('infile must be a filename, file like object, or list of lines.')
|
||||||
|
|
||||||
if infile:
|
if content:
|
||||||
# don't do it for the empty ConfigObj
|
# don't do it for the empty ConfigObj
|
||||||
infile = self._handle_bom(infile)
|
content = self._handle_bom(content)
|
||||||
# infile is now *always* a list
|
# infile is now *always* a list
|
||||||
#
|
#
|
||||||
# Set the newlines attribute (first line ending it finds)
|
# Set the newlines attribute (first line ending it finds)
|
||||||
# and strip trailing '\n' or '\r' from lines
|
# and strip trailing '\n' or '\r' from lines
|
||||||
for line in infile:
|
for line in content:
|
||||||
if (not line) or (line[-1] not in ('\r', '\n', '\r\n')):
|
if (not line) or (line[-1] not in ('\r', '\n')):
|
||||||
continue
|
continue
|
||||||
for end in ('\r\n', '\n', '\r'):
|
for end in ('\r\n', '\n', '\r'):
|
||||||
if line.endswith(end):
|
if line.endswith(end):
|
||||||
@ -1313,9 +1298,10 @@ class ConfigObj(Section):
|
|||||||
break
|
break
|
||||||
break
|
break
|
||||||
|
|
||||||
infile = [line.rstrip('\r\n') for line in infile]
|
assert all(isinstance(line, six.string_types) for line in content), repr(content)
|
||||||
|
content = [line.rstrip('\r\n') for line in content]
|
||||||
|
|
||||||
self._parse(infile)
|
self._parse(content)
|
||||||
# if we had any errors, now is the time to raise them
|
# if we had any errors, now is the time to raise them
|
||||||
if self._errors:
|
if self._errors:
|
||||||
info = "at line %s." % self._errors[0].line_number
|
info = "at line %s." % self._errors[0].line_number
|
||||||
@ -1404,6 +1390,7 @@ class ConfigObj(Section):
|
|||||||
``infile`` must always be returned as a list of lines, but may be
|
``infile`` must always be returned as a list of lines, but may be
|
||||||
passed in as a single string.
|
passed in as a single string.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if ((self.encoding is not None) and
|
if ((self.encoding is not None) and
|
||||||
(self.encoding.lower() not in BOM_LIST)):
|
(self.encoding.lower() not in BOM_LIST)):
|
||||||
# No need to check for a BOM
|
# No need to check for a BOM
|
||||||
@ -1415,6 +1402,13 @@ class ConfigObj(Section):
|
|||||||
line = infile[0]
|
line = infile[0]
|
||||||
else:
|
else:
|
||||||
line = infile
|
line = infile
|
||||||
|
|
||||||
|
if isinstance(line, six.text_type):
|
||||||
|
# it's already decoded and there's no need to do anything
|
||||||
|
# else, just use the _decode utility method to handle
|
||||||
|
# listifying appropriately
|
||||||
|
return self._decode(infile, self.encoding)
|
||||||
|
|
||||||
if self.encoding is not None:
|
if self.encoding is not None:
|
||||||
# encoding explicitly supplied
|
# encoding explicitly supplied
|
||||||
# And it could have an associated BOM
|
# And it could have an associated BOM
|
||||||
@ -1423,7 +1417,7 @@ class ConfigObj(Section):
|
|||||||
enc = BOM_LIST[self.encoding.lower()]
|
enc = BOM_LIST[self.encoding.lower()]
|
||||||
if enc == 'utf_16':
|
if enc == 'utf_16':
|
||||||
# For UTF16 we try big endian and little endian
|
# For UTF16 we try big endian and little endian
|
||||||
for BOM, (encoding, final_encoding) in BOMS.items():
|
for BOM, (encoding, final_encoding) in list(BOMS.items()):
|
||||||
if not final_encoding:
|
if not final_encoding:
|
||||||
# skip UTF8
|
# skip UTF8
|
||||||
continue
|
continue
|
||||||
@ -1453,8 +1447,9 @@ class ConfigObj(Section):
|
|||||||
return self._decode(infile, self.encoding)
|
return self._decode(infile, self.encoding)
|
||||||
|
|
||||||
# No encoding specified - so we need to check for UTF8/UTF16
|
# No encoding specified - so we need to check for UTF8/UTF16
|
||||||
for BOM, (encoding, final_encoding) in BOMS.items():
|
for BOM, (encoding, final_encoding) in list(BOMS.items()):
|
||||||
if not line.startswith(BOM):
|
if not isinstance(line, six.binary_type) or not line.startswith(BOM):
|
||||||
|
# didn't specify a BOM, or it's not a bytestring
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
# BOM discovered
|
# BOM discovered
|
||||||
@ -1468,25 +1463,32 @@ class ConfigObj(Section):
|
|||||||
infile[0] = newline
|
infile[0] = newline
|
||||||
else:
|
else:
|
||||||
infile = newline
|
infile = newline
|
||||||
# UTF8 - don't decode
|
# UTF-8
|
||||||
if isinstance(infile, basestring):
|
if isinstance(infile, six.text_type):
|
||||||
return infile.splitlines(True)
|
return infile.splitlines(True)
|
||||||
|
elif isinstance(infile, six.binary_type):
|
||||||
|
return infile.decode('utf-8').splitlines(True)
|
||||||
else:
|
else:
|
||||||
return infile
|
return self._decode(infile, 'utf-8')
|
||||||
# UTF16 - have to decode
|
# UTF16 - have to decode
|
||||||
return self._decode(infile, encoding)
|
return self._decode(infile, encoding)
|
||||||
|
|
||||||
# No BOM discovered and no encoding specified, just return
|
|
||||||
if isinstance(infile, basestring):
|
if six.PY2 and isinstance(line, str):
|
||||||
# infile read from a file will be a single string
|
# don't actually do any decoding, since we're on python 2 and
|
||||||
return infile.splitlines(True)
|
# returning a bytestring is fine
|
||||||
return infile
|
return self._decode(infile, None)
|
||||||
|
# No BOM discovered and no encoding specified, default to UTF-8
|
||||||
|
if isinstance(infile, six.binary_type):
|
||||||
|
return infile.decode('utf-8').splitlines(True)
|
||||||
|
else:
|
||||||
|
return self._decode(infile, 'utf-8')
|
||||||
|
|
||||||
|
|
||||||
def _a_to_u(self, aString):
|
def _a_to_u(self, aString):
|
||||||
"""Decode ASCII strings to unicode if a self.encoding is specified."""
|
"""Decode ASCII strings to unicode if a self.encoding is specified."""
|
||||||
if self.encoding:
|
if isinstance(aString, six.binary_type) and self.encoding:
|
||||||
return aString.decode('ascii')
|
return aString.decode(self.encoding)
|
||||||
else:
|
else:
|
||||||
return aString
|
return aString
|
||||||
|
|
||||||
@ -1497,34 +1499,42 @@ class ConfigObj(Section):
|
|||||||
|
|
||||||
if is a string, it also needs converting to a list.
|
if is a string, it also needs converting to a list.
|
||||||
"""
|
"""
|
||||||
if isinstance(infile, basestring):
|
if isinstance(infile, six.string_types):
|
||||||
# can't be unicode
|
return infile.splitlines(True)
|
||||||
|
if isinstance(infile, six.binary_type):
|
||||||
# NOTE: Could raise a ``UnicodeDecodeError``
|
# NOTE: Could raise a ``UnicodeDecodeError``
|
||||||
return infile.decode(encoding).splitlines(True)
|
if encoding:
|
||||||
for i, line in enumerate(infile):
|
return infile.decode(encoding).splitlines(True)
|
||||||
if not isinstance(line, unicode):
|
else:
|
||||||
# NOTE: The isinstance test here handles mixed lists of unicode/string
|
return infile.splitlines(True)
|
||||||
# NOTE: But the decode will break on any non-string values
|
|
||||||
# NOTE: Or could raise a ``UnicodeDecodeError``
|
if encoding:
|
||||||
infile[i] = line.decode(encoding)
|
for i, line in enumerate(infile):
|
||||||
|
if isinstance(line, six.binary_type):
|
||||||
|
# NOTE: The isinstance test here handles mixed lists of unicode/string
|
||||||
|
# NOTE: But the decode will break on any non-string values
|
||||||
|
# NOTE: Or could raise a ``UnicodeDecodeError``
|
||||||
|
infile[i] = line.decode(encoding)
|
||||||
return infile
|
return infile
|
||||||
|
|
||||||
|
|
||||||
def _decode_element(self, line):
|
def _decode_element(self, line):
|
||||||
"""Decode element to unicode if necessary."""
|
"""Decode element to unicode if necessary."""
|
||||||
if not self.encoding:
|
if isinstance(line, six.binary_type) and self.default_encoding:
|
||||||
return line
|
|
||||||
if isinstance(line, str) and self.default_encoding:
|
|
||||||
return line.decode(self.default_encoding)
|
return line.decode(self.default_encoding)
|
||||||
return line
|
else:
|
||||||
|
return line
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: this may need to be modified
|
||||||
def _str(self, value):
|
def _str(self, value):
|
||||||
"""
|
"""
|
||||||
Used by ``stringify`` within validate, to turn non-string values
|
Used by ``stringify`` within validate, to turn non-string values
|
||||||
into strings.
|
into strings.
|
||||||
"""
|
"""
|
||||||
if not isinstance(value, basestring):
|
if not isinstance(value, six.string_types):
|
||||||
|
# intentially 'str' because it's just whatever the "normal"
|
||||||
|
# string type is for the python version we're dealing with
|
||||||
return str(value)
|
return str(value)
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
@ -1571,7 +1581,7 @@ class ConfigObj(Section):
|
|||||||
self.indent_type = indent
|
self.indent_type = indent
|
||||||
cur_depth = sect_open.count('[')
|
cur_depth = sect_open.count('[')
|
||||||
if cur_depth != sect_close.count(']'):
|
if cur_depth != sect_close.count(']'):
|
||||||
self._handle_error("Cannot compute the section depth at line %s.",
|
self._handle_error("Cannot compute the section depth",
|
||||||
NestingError, infile, cur_index)
|
NestingError, infile, cur_index)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -1581,7 +1591,7 @@ class ConfigObj(Section):
|
|||||||
parent = self._match_depth(this_section,
|
parent = self._match_depth(this_section,
|
||||||
cur_depth).parent
|
cur_depth).parent
|
||||||
except SyntaxError:
|
except SyntaxError:
|
||||||
self._handle_error("Cannot compute nesting level at line %s.",
|
self._handle_error("Cannot compute nesting level",
|
||||||
NestingError, infile, cur_index)
|
NestingError, infile, cur_index)
|
||||||
continue
|
continue
|
||||||
elif cur_depth == this_section.depth:
|
elif cur_depth == this_section.depth:
|
||||||
@ -1591,12 +1601,13 @@ class ConfigObj(Section):
|
|||||||
# the new section is a child the current section
|
# the new section is a child the current section
|
||||||
parent = this_section
|
parent = this_section
|
||||||
else:
|
else:
|
||||||
self._handle_error("Section too nested at line %s.",
|
self._handle_error("Section too nested",
|
||||||
NestingError, infile, cur_index)
|
NestingError, infile, cur_index)
|
||||||
|
continue
|
||||||
|
|
||||||
sect_name = self._unquote(sect_name)
|
sect_name = self._unquote(sect_name)
|
||||||
if sect_name in parent:
|
if sect_name in parent:
|
||||||
self._handle_error('Duplicate section name at line %s.',
|
self._handle_error('Duplicate section name',
|
||||||
DuplicateError, infile, cur_index)
|
DuplicateError, infile, cur_index)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -1615,10 +1626,8 @@ class ConfigObj(Section):
|
|||||||
# so it should be a valid ``key = value`` line
|
# so it should be a valid ``key = value`` line
|
||||||
mat = self._keyword.match(line)
|
mat = self._keyword.match(line)
|
||||||
if mat is None:
|
if mat is None:
|
||||||
# it neither matched as a keyword
|
|
||||||
# or a section marker
|
|
||||||
self._handle_error(
|
self._handle_error(
|
||||||
'Invalid line at line "%s".',
|
'Invalid line ({0!r}) (matched as neither section nor keyword)'.format(line),
|
||||||
ParseError, infile, cur_index)
|
ParseError, infile, cur_index)
|
||||||
else:
|
else:
|
||||||
# is a keyword value
|
# is a keyword value
|
||||||
@ -1633,7 +1642,7 @@ class ConfigObj(Section):
|
|||||||
value, infile, cur_index, maxline)
|
value, infile, cur_index, maxline)
|
||||||
except SyntaxError:
|
except SyntaxError:
|
||||||
self._handle_error(
|
self._handle_error(
|
||||||
'Parse error in value at line %s.',
|
'Parse error in multiline value',
|
||||||
ParseError, infile, cur_index)
|
ParseError, infile, cur_index)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
@ -1641,11 +1650,11 @@ class ConfigObj(Section):
|
|||||||
comment = ''
|
comment = ''
|
||||||
try:
|
try:
|
||||||
value = unrepr(value)
|
value = unrepr(value)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if type(e) == UnknownType:
|
if type(e) == UnknownType:
|
||||||
msg = 'Unknown name or type in value at line %s.'
|
msg = 'Unknown name or type in value'
|
||||||
else:
|
else:
|
||||||
msg = 'Parse error in value at line %s.'
|
msg = 'Parse error from unrepr-ing multiline value'
|
||||||
self._handle_error(msg, UnreprError, infile,
|
self._handle_error(msg, UnreprError, infile,
|
||||||
cur_index)
|
cur_index)
|
||||||
continue
|
continue
|
||||||
@ -1654,11 +1663,11 @@ class ConfigObj(Section):
|
|||||||
comment = ''
|
comment = ''
|
||||||
try:
|
try:
|
||||||
value = unrepr(value)
|
value = unrepr(value)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if isinstance(e, UnknownType):
|
if isinstance(e, UnknownType):
|
||||||
msg = 'Unknown name or type in value at line %s.'
|
msg = 'Unknown name or type in value'
|
||||||
else:
|
else:
|
||||||
msg = 'Parse error in value at line %s.'
|
msg = 'Parse error from unrepr-ing value'
|
||||||
self._handle_error(msg, UnreprError, infile,
|
self._handle_error(msg, UnreprError, infile,
|
||||||
cur_index)
|
cur_index)
|
||||||
continue
|
continue
|
||||||
@ -1668,14 +1677,14 @@ class ConfigObj(Section):
|
|||||||
(value, comment) = self._handle_value(value)
|
(value, comment) = self._handle_value(value)
|
||||||
except SyntaxError:
|
except SyntaxError:
|
||||||
self._handle_error(
|
self._handle_error(
|
||||||
'Parse error in value at line %s.',
|
'Parse error in value',
|
||||||
ParseError, infile, cur_index)
|
ParseError, infile, cur_index)
|
||||||
continue
|
continue
|
||||||
#
|
#
|
||||||
key = self._unquote(key)
|
key = self._unquote(key)
|
||||||
if key in this_section:
|
if key in this_section:
|
||||||
self._handle_error(
|
self._handle_error(
|
||||||
'Duplicate keyword name at line %s.',
|
'Duplicate keyword name',
|
||||||
DuplicateError, infile, cur_index)
|
DuplicateError, infile, cur_index)
|
||||||
continue
|
continue
|
||||||
# add the key.
|
# add the key.
|
||||||
@ -1726,7 +1735,7 @@ class ConfigObj(Section):
|
|||||||
"""
|
"""
|
||||||
line = infile[cur_index]
|
line = infile[cur_index]
|
||||||
cur_index += 1
|
cur_index += 1
|
||||||
message = text % cur_index
|
message = '{0} at line {1}.'.format(text, cur_index)
|
||||||
error = ErrorClass(message, cur_index, line)
|
error = ErrorClass(message, cur_index, line)
|
||||||
if self.raise_errors:
|
if self.raise_errors:
|
||||||
# raise the error - parsing stops here
|
# raise the error - parsing stops here
|
||||||
@ -1777,8 +1786,10 @@ class ConfigObj(Section):
|
|||||||
return self._quote(value[0], multiline=False) + ','
|
return self._quote(value[0], multiline=False) + ','
|
||||||
return ', '.join([self._quote(val, multiline=False)
|
return ', '.join([self._quote(val, multiline=False)
|
||||||
for val in value])
|
for val in value])
|
||||||
if not isinstance(value, basestring):
|
if not isinstance(value, six.string_types):
|
||||||
if self.stringify:
|
if self.stringify:
|
||||||
|
# intentially 'str' because it's just whatever the "normal"
|
||||||
|
# string type is for the python version we're dealing with
|
||||||
value = str(value)
|
value = str(value)
|
||||||
else:
|
else:
|
||||||
raise TypeError('Value "%s" is not a string.' % value)
|
raise TypeError('Value "%s" is not a string.' % value)
|
||||||
@ -1929,11 +1940,11 @@ class ConfigObj(Section):
|
|||||||
raise_errors=True,
|
raise_errors=True,
|
||||||
file_error=True,
|
file_error=True,
|
||||||
_inspec=True)
|
_inspec=True)
|
||||||
except ConfigObjError, e:
|
except ConfigObjError as e:
|
||||||
# FIXME: Should these errors have a reference
|
# FIXME: Should these errors have a reference
|
||||||
# to the already parsed ConfigObj ?
|
# to the already parsed ConfigObj ?
|
||||||
raise ConfigspecError('Parsing configspec failed: %s' % e)
|
raise ConfigspecError('Parsing configspec failed: %s' % e)
|
||||||
except IOError, e:
|
except IOError as e:
|
||||||
raise IOError('Reading configspec failed: %s' % e)
|
raise IOError('Reading configspec failed: %s' % e)
|
||||||
|
|
||||||
self.configspec = configspec
|
self.configspec = configspec
|
||||||
@ -2049,7 +2060,7 @@ class ConfigObj(Section):
|
|||||||
this_entry = section[entry]
|
this_entry = section[entry]
|
||||||
comment = self._handle_comment(section.inline_comments[entry])
|
comment = self._handle_comment(section.inline_comments[entry])
|
||||||
|
|
||||||
if isinstance(this_entry, dict):
|
if isinstance(this_entry, Section):
|
||||||
# a section
|
# a section
|
||||||
out.append(self._write_marker(
|
out.append(self._write_marker(
|
||||||
indent_string,
|
indent_string,
|
||||||
@ -2097,21 +2108,25 @@ class ConfigObj(Section):
|
|||||||
# Windows specific hack to avoid writing '\r\r\n'
|
# Windows specific hack to avoid writing '\r\r\n'
|
||||||
newline = '\n'
|
newline = '\n'
|
||||||
output = self._a_to_u(newline).join(out)
|
output = self._a_to_u(newline).join(out)
|
||||||
if self.encoding:
|
|
||||||
output = output.encode(self.encoding)
|
|
||||||
if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
|
|
||||||
# Add the UTF8 BOM
|
|
||||||
output = BOM_UTF8 + output
|
|
||||||
|
|
||||||
if not output.endswith(newline):
|
if not output.endswith(newline):
|
||||||
output += newline
|
output += newline
|
||||||
if outfile is not None:
|
|
||||||
outfile.write(output)
|
|
||||||
else:
|
|
||||||
h = open(self.filename, 'wb')
|
|
||||||
h.write(output)
|
|
||||||
h.close()
|
|
||||||
|
|
||||||
|
if isinstance(output, six.binary_type):
|
||||||
|
output_bytes = output
|
||||||
|
else:
|
||||||
|
output_bytes = output.encode(self.encoding or
|
||||||
|
self.default_encoding or
|
||||||
|
'ascii')
|
||||||
|
|
||||||
|
if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
|
||||||
|
# Add the UTF8 BOM
|
||||||
|
output_bytes = BOM_UTF8 + output_bytes
|
||||||
|
|
||||||
|
if outfile is not None:
|
||||||
|
outfile.write(output_bytes)
|
||||||
|
else:
|
||||||
|
with open(self.filename, 'wb') as h:
|
||||||
|
h.write(output_bytes)
|
||||||
|
|
||||||
def validate(self, validator, preserve_errors=False, copy=False,
|
def validate(self, validator, preserve_errors=False, copy=False,
|
||||||
section=None):
|
section=None):
|
||||||
@ -2189,7 +2204,7 @@ class ConfigObj(Section):
|
|||||||
val,
|
val,
|
||||||
missing=missing
|
missing=missing
|
||||||
)
|
)
|
||||||
except validator.baseErrorClass, e:
|
except validator.baseErrorClass as e:
|
||||||
if not preserve_errors or isinstance(e, self._vdtMissingValue):
|
if not preserve_errors or isinstance(e, self._vdtMissingValue):
|
||||||
out[entry] = False
|
out[entry] = False
|
||||||
else:
|
else:
|
||||||
@ -2338,7 +2353,7 @@ class ConfigObj(Section):
|
|||||||
This method raises a ``ReloadError`` if the ConfigObj doesn't have
|
This method raises a ``ReloadError`` if the ConfigObj doesn't have
|
||||||
a filename attribute pointing to a file.
|
a filename attribute pointing to a file.
|
||||||
"""
|
"""
|
||||||
if not isinstance(self.filename, basestring):
|
if not isinstance(self.filename, six.string_types):
|
||||||
raise ReloadError()
|
raise ReloadError()
|
||||||
|
|
||||||
filename = self.filename
|
filename = self.filename
|
||||||
@ -2416,13 +2431,13 @@ def flatten_errors(cfg, res, levels=None, results=None):
|
|||||||
levels = []
|
levels = []
|
||||||
results = []
|
results = []
|
||||||
if res == True:
|
if res == True:
|
||||||
return results
|
return sorted(results)
|
||||||
if res == False or isinstance(res, Exception):
|
if res == False or isinstance(res, Exception):
|
||||||
results.append((levels[:], None, res))
|
results.append((levels[:], None, res))
|
||||||
if levels:
|
if levels:
|
||||||
levels.pop()
|
levels.pop()
|
||||||
return results
|
return sorted(results)
|
||||||
for (key, val) in res.items():
|
for (key, val) in list(res.items()):
|
||||||
if val == True:
|
if val == True:
|
||||||
continue
|
continue
|
||||||
if isinstance(cfg.get(key), dict):
|
if isinstance(cfg.get(key), dict):
|
||||||
@ -2436,7 +2451,7 @@ def flatten_errors(cfg, res, levels=None, results=None):
|
|||||||
if levels:
|
if levels:
|
||||||
levels.pop()
|
levels.pop()
|
||||||
#
|
#
|
||||||
return results
|
return sorted(results)
|
||||||
|
|
||||||
|
|
||||||
def get_extra_values(conf, _prepend=()):
|
def get_extra_values(conf, _prepend=()):
|
||||||
|
Loading…
Reference in New Issue
Block a user