Onionr/onionr/onionrutils.py

676 lines
25 KiB
Python
Raw Normal View History

2018-01-09 22:58:12 +00:00
'''
Onionr - P2P Microblogging Platform & Social network
OnionrUtils offers various useful functions to Onionr. Relatively misc.
'''
'''
2018-01-09 22:58:12 +00:00
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
# Misc functions that do not fit in the main api, but are useful
import getpass, sys, requests, os, socket, hashlib, logger, sqlite3, config, binascii, time, base64, json, glob, shutil, math, json, re
2018-02-21 09:32:31 +00:00
import nacl.signing, nacl.encoding
2018-05-19 22:11:51 +00:00
from onionrblockapi import Block
import onionrexceptions
from defusedxml import minidom
2018-09-09 05:12:41 +00:00
import onionrevents
2018-08-30 04:07:11 +00:00
import pgpwords, onionrusers, storagecounter
2018-01-26 06:28:11 +00:00
if sys.version_info < (3, 6):
try:
import sha3
except ModuleNotFoundError:
2018-01-26 07:22:48 +00:00
logger.fatal('On Python 3 versions prior to 3.6.x, you need the sha3 module')
2018-01-26 06:28:11 +00:00
sys.exit(1)
2018-02-23 01:58:36 +00:00
2018-01-26 06:28:11 +00:00
class OnionrUtils:
2018-02-23 01:58:36 +00:00
'''
Various useful functions for validating things, etc functions, connectivity
2018-02-23 01:58:36 +00:00
'''
2018-01-26 06:28:11 +00:00
def __init__(self, coreInstance):
#self.fingerprintFile = 'data/own-fingerprint.txt' #TODO Remove since probably not needed
self._core = coreInstance # onionr core instance
self.timingToken = '' # for when we make local connections to our http api, to bypass timing attack defense mechanism
self.avoidDupe = [] # list used to prevent duplicate requests per peer for certain actions
self.peerProcessing = {} # dict of current peer actions: peer, actionList
self.storageCounter = storagecounter.StorageCounter(self._core) # used to keep track of how much data onionr is using on disk
config.reload() # onionr config
2018-01-09 22:58:12 +00:00
return
2018-04-19 01:47:35 +00:00
def getTimeBypassToken(self):
'''
Load our timingToken from disk for faster local HTTP API
'''
2018-04-19 02:16:10 +00:00
try:
if os.path.exists('data/time-bypass.txt'):
with open('data/time-bypass.txt', 'r') as bypass:
self.timingToken = bypass.read()
except Exception as error:
2018-08-04 02:52:45 +00:00
logger.error('Failed to fetch time bypass token.', error = error)
return self.timingToken
2018-03-03 07:26:02 +00:00
def getRoundedEpoch(self, roundS=60):
2018-04-19 01:47:35 +00:00
'''
Returns the epoch, rounded down to given seconds (Default 60)
'''
epoch = self.getEpoch()
return epoch - (epoch % roundS)
2018-03-16 15:35:37 +00:00
def mergeKeys(self, newKeyList):
2018-04-19 01:47:35 +00:00
'''
Merge ed25519 key list to our database, comma seperated string
2018-04-19 01:47:35 +00:00
'''
2018-04-19 02:16:10 +00:00
try:
retVal = False
if newKeyList != False:
for key in newKeyList.split(','):
2018-05-07 06:55:03 +00:00
key = key.split('-')
# Test if key is valid
try:
if len(key[0]) > 60 or len(key[1]) > 1000:
logger.warn('%s or its pow value is too large.' % key[0])
continue
except IndexError:
logger.warn('No pow token')
2018-05-07 06:55:03 +00:00
continue
try:
value = base64.b64decode(key[1])
except binascii.Error:
continue
# Load the pow token
hashedKey = self._core._crypto.blake2bHash(key[0])
powHash = self._core._crypto.blake2bHash(value + hashedKey)
2018-05-15 05:16:00 +00:00
try:
powHash = powHash.encode()
except AttributeError:
pass
# if POW meets required difficulty, TODO make configurable/dynamic
2018-05-15 05:16:00 +00:00
if powHash.startswith(b'0000'):
# if we don't already have the key and its not our key, add it.
2018-05-07 06:55:03 +00:00
if not key[0] in self._core.listPeers(randomOrder=False) and type(key) != None and key[0] != self._core._crypto.pubKey:
if self._core.addPeer(key[0], key[1]):
# Check if the peer has a set username already
onionrusers.OnionrUser(self._core, key[0]).findAndSetID()
2018-05-07 06:55:03 +00:00
retVal = True
else:
logger.warn("Failed to add key")
2018-05-07 06:55:03 +00:00
else:
pass
#logger.debug('%s pow failed' % key[0])
2018-04-19 02:16:10 +00:00
return retVal
except Exception as error:
logger.error('Failed to merge keys.', error=error)
return False
2018-04-19 01:47:35 +00:00
2018-03-16 15:35:37 +00:00
def mergeAdders(self, newAdderList):
2018-04-19 01:47:35 +00:00
'''
Merge peer adders list to our database
'''
2018-04-19 02:16:10 +00:00
try:
retVal = False
if newAdderList != False:
for adder in newAdderList.split(','):
adder = adder.strip()
if not adder in self._core.listAdders(randomOrder = False) and adder != self.getMyAddress() and not self._core._blacklist.inBlacklist(adder):
2018-08-20 01:32:21 +00:00
if not config.get('tor.v3onions') and len(adder) == 62:
2018-08-19 04:07:09 +00:00
continue
if self._core.addAddress(adder):
2018-08-21 20:01:50 +00:00
# Check if we have the maxmium amount of allowed stored peers
if config.get('peers.maxStoredPeers') > len(self._core.listAdders()):
2018-08-21 20:01:50 +00:00
logger.info('Added %s to db.' % adder, timestamp = True)
retVal = True
else:
logger.warn('Reached the maximum amount of peers in the net database as allowed by your config.')
2018-04-19 02:16:10 +00:00
else:
pass
#logger.debug('%s is either our address or already in our DB' % adder)
2018-04-19 02:16:10 +00:00
return retVal
except Exception as error:
logger.error('Failed to merge adders.', error = error)
2018-04-19 02:16:10 +00:00
return False
2018-03-16 15:35:37 +00:00
2018-04-03 21:47:48 +00:00
def getMyAddress(self):
2018-04-19 02:16:10 +00:00
try:
2018-04-23 03:49:53 +00:00
with open('./data/hs/hostname', 'r') as hostname:
return hostname.read().strip()
2018-04-19 02:16:10 +00:00
except Exception as error:
logger.error('Failed to read my address.', error = error)
2018-04-23 03:49:53 +00:00
return None
2018-04-03 21:47:48 +00:00
2018-04-21 03:10:50 +00:00
def localCommand(self, command, silent = True):
'''
Send a command to the local http API server, securely. Intended for local clients, DO NOT USE for remote peers.
'''
2018-02-23 01:58:36 +00:00
config.reload()
self.getTimeBypassToken()
2018-02-23 01:58:36 +00:00
# TODO: URL encode parameters, just as an extra measure. May not be needed, but should be added regardless.
try:
with open('data/host.txt', 'r') as host:
hostname = host.read()
except FileNotFoundError:
return False
payload = 'http://%s:%s/client/?action=%s&token=%s&timingToken=%s' % (hostname, config.get('client.port'), command, config.get('client.hmac'), self.timingToken)
try:
retData = requests.get(payload).text
2018-04-19 02:16:10 +00:00
except Exception as error:
2018-04-21 03:10:50 +00:00
if not silent:
logger.error('Failed to make local request (command: %s):%s' % (command, error))
2018-04-19 01:47:35 +00:00
retData = False
return retData
2018-01-27 00:52:20 +00:00
def getPassword(self, message='Enter password: ', confirm = True):
'''
Get a password without showing the users typing and confirm the input
'''
2018-01-09 22:58:12 +00:00
# Get a password safely with confirmation and return it
while True:
print(message)
pass1 = getpass.getpass()
2018-01-27 00:52:20 +00:00
if confirm:
print('Confirm password: ')
pass2 = getpass.getpass()
if pass1 != pass2:
logger.error("Passwords do not match.")
2018-01-29 06:01:36 +00:00
logger.readline()
2018-01-27 00:52:20 +00:00
else:
break
2018-01-09 22:58:12 +00:00
else:
break
2018-01-20 07:23:09 +00:00
return pass1
def getHumanReadableID(self, pub=''):
'''gets a human readable ID from a public key'''
if pub == '':
pub = self._core._crypto.pubKey
pub = base64.b16encode(base64.b32decode(pub)).decode()
return '-'.join(pgpwords.wordify(pub))
def getBlockMetadataFromData(self, blockData):
'''
accepts block contents as string, returns a tuple of metadata, meta (meta being internal metadata, which will be returned as an encrypted base64 string if it is encrypted, dict if not).
2018-07-31 04:41:32 +00:00
'''
meta = {}
metadata = {}
data = blockData
try:
blockData = blockData.encode()
except AttributeError:
pass
2018-07-31 04:41:32 +00:00
try:
metadata = json.loads(blockData[:blockData.find(b'\n')].decode())
except json.decoder.JSONDecodeError:
pass
else:
data = blockData[blockData.find(b'\n'):].decode()
if not metadata['encryptType'] in ('asym', 'sym'):
try:
meta = json.loads(metadata['meta'])
except KeyError:
pass
2018-07-31 04:41:32 +00:00
meta = metadata['meta']
return (metadata, meta, data)
def checkPort(self, port, host=''):
'''
Checks if a port is available, returns bool
'''
2018-01-20 07:23:09 +00:00
# inspired by https://www.reddit.com/r/learnpython/comments/2i4qrj/how_to_write_a_python_script_that_checks_to_see/ckzarux/
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
retVal = False
try:
2018-01-27 00:52:20 +00:00
sock.bind((host, port))
2018-01-20 07:23:09 +00:00
except OSError as e:
if e.errno is 98:
retVal = True
finally:
sock.close()
return retVal
def checkIsIP(self, ip):
'''
Check if a string is a valid IPv4 address
'''
try:
socket.inet_aton(ip)
except:
return False
else:
return True
2018-07-31 04:41:32 +00:00
2018-07-11 07:35:22 +00:00
def processBlockMetadata(self, blockHash):
'''
Read metadata from a block and cache it to the block database
'''
myBlock = Block(blockHash, self._core)
if myBlock.isEncrypted:
myBlock.decrypt()
blockType = myBlock.getMetadata('type') # we would use myBlock.getType() here, but it is bugged with encrypted blocks
signer = self.bytesToStr(myBlock.signer)
valid = myBlock.verifySig()
try:
if len(blockType) <= 10:
self._core.updateBlockInfo(blockHash, 'dataType', blockType)
onionrevents.event('processBlocks', data = {'block': myBlock, 'type': blockType, 'signer': signer, 'validSig': valid}, onionr = None)
2018-09-09 05:12:41 +00:00
except TypeError:
pass
2018-07-11 07:35:22 +00:00
def escapeAnsi(self, line):
'''
Remove ANSI escape codes from a string with regex
2018-07-31 04:41:32 +00:00
taken or adapted from: https://stackoverflow.com/a/38662876
'''
ansi_escape = re.compile(r'(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]')
return ansi_escape.sub('', line)
2018-01-26 06:28:11 +00:00
def getBlockDBHash(self):
'''
Return a sha3_256 hash of the blocks DB
'''
2018-04-19 02:16:10 +00:00
try:
with open(self._core.blockDB, 'rb') as data:
data = data.read()
hasher = hashlib.sha3_256()
hasher.update(data)
dataHash = hasher.hexdigest()
2018-04-19 02:16:10 +00:00
return dataHash
except Exception as error:
logger.error('Failed to get block DB hash.', error=error)
2018-01-26 06:28:11 +00:00
2018-01-29 02:52:48 +00:00
def hasBlock(self, hash):
'''
Check for new block in the list
'''
2018-01-29 02:52:48 +00:00
conn = sqlite3.connect(self._core.blockDB)
c = conn.cursor()
if not self.validateHash(hash):
raise Exception("Invalid hash")
for result in c.execute("SELECT COUNT() FROM hashes where hash='" + hash + "'"):
if result[0] >= 1:
conn.commit()
conn.close()
return True
else:
conn.commit()
conn.close()
return False
2018-04-23 03:42:37 +00:00
def hasKey(self, key):
'''
Check for key in list of public keys
'''
return key in self._core.listPeers()
2018-01-26 06:28:11 +00:00
def validateHash(self, data, length=64):
'''
Validate if a string is a valid hex formatted hash
'''
2018-01-26 06:28:11 +00:00
retVal = True
2018-01-28 22:38:10 +00:00
if data == False or data == True:
return False
2018-01-28 22:21:51 +00:00
data = data.strip()
2018-01-26 06:28:11 +00:00
if len(data) != length:
retVal = False
else:
try:
int(data, 16)
except ValueError:
retVal = False
2018-01-26 07:22:48 +00:00
return retVal
2018-07-31 04:41:32 +00:00
def validateMetadata(self, metadata, blockData):
'''Validate metadata meets onionr spec (does not validate proof value computation), take in either dictionary or json string'''
# TODO, make this check sane sizes
2018-06-26 04:39:45 +00:00
retData = False
2018-07-31 04:41:32 +00:00
2018-06-26 04:39:45 +00:00
# convert to dict if it is json string
if type(metadata) is str:
try:
metadata = json.loads(metadata)
except json.JSONDecodeError:
pass
# Validate metadata dict for invalid keys to sizes that are too large
if type(metadata) is dict:
for i in metadata:
try:
self._core.requirements.blockMetadataLengths[i]
except KeyError:
logger.warn('Block has invalid metadata key ' + i)
break
else:
if self._core.requirements.blockMetadataLengths[i] < len(metadata[i]):
logger.warn('Block metadata key ' + i + ' exceeded maximum size')
break
if i == 'time':
if not self.isIntegerString(metadata[i]):
logger.warn('Block metadata time stamp is not integer string')
break
2018-06-26 04:39:45 +00:00
else:
# if metadata loop gets no errors, it does not break, therefore metadata is valid
# make sure we do not have another block with the same data content (prevent data duplication and replay attacks)
2018-08-16 14:03:59 +00:00
nonce = self._core._utils.bytesToStr(self._core._crypto.sha3Hash(blockData))
try:
with open(self._core.dataNonceFile, 'r') as nonceFile:
if nonce in nonceFile.read():
retData = False # we've seen that nonce before, so we can't pass metadata
raise onionrexceptions.DataExists
except FileNotFoundError:
retData = True
except onionrexceptions.DataExists:
# do not set retData to True, because nonce has been seen before
pass
else:
retData = True
2018-06-26 04:39:45 +00:00
else:
logger.warn('In call to utils.validateMetadata, metadata must be JSON string or a dictionary object')
return retData
2018-02-23 01:58:36 +00:00
2018-02-21 09:32:31 +00:00
def validatePubKey(self, key):
2018-04-19 02:16:10 +00:00
'''
Validate if a string is a valid base32 encoded Ed25519 key
'''
2018-02-21 09:32:31 +00:00
retVal = False
try:
2018-02-22 06:08:04 +00:00
nacl.signing.SigningKey(seed=key, encoder=nacl.encoding.Base32Encoder)
2018-02-21 09:32:31 +00:00
except nacl.exceptions.ValueError:
pass
except base64.binascii.Error as err:
2018-04-02 07:21:58 +00:00
pass
2018-02-22 06:08:04 +00:00
else:
retVal = True
2018-02-21 09:32:31 +00:00
return retVal
2018-07-31 04:41:32 +00:00
def isIntegerString(self, data):
'''Check if a string is a valid base10 integer'''
try:
int(data)
except ValueError:
return False
else:
return True
2018-01-26 09:46:21 +00:00
def validateID(self, id):
'''
2018-02-21 09:32:31 +00:00
Validate if an address is a valid tor or i2p hidden service
'''
2018-04-19 02:16:10 +00:00
try:
idLength = len(id)
retVal = True
idNoDomain = ''
peerType = ''
# i2p b32 addresses are 60 characters long (including .b32.i2p)
if idLength == 60:
peerType = 'i2p'
if not id.endswith('.b32.i2p'):
2018-01-26 09:46:21 +00:00
retVal = False
else:
2018-04-19 02:16:10 +00:00
idNoDomain = id.split('.b32.i2p')[0]
# Onion v2's are 22 (including .onion), v3's are 62 with .onion
elif idLength == 22 or idLength == 62:
peerType = 'onion'
if not id.endswith('.onion'):
2018-01-26 09:46:21 +00:00
retVal = False
2018-04-19 02:16:10 +00:00
else:
idNoDomain = id.split('.onion')[0]
else:
retVal = False
2018-04-19 02:16:10 +00:00
if retVal:
if peerType == 'i2p':
try:
id.split('.b32.i2p')[2]
except:
pass
else:
retVal = False
elif peerType == 'onion':
try:
id.split('.onion')[2]
except:
pass
else:
retVal = False
if not idNoDomain.isalnum():
retVal = False
2018-02-23 01:58:36 +00:00
2018-04-19 02:16:10 +00:00
return retVal
except:
return False
2018-04-26 07:40:39 +00:00
def getPeerByHashId(self, hash):
'''
Return the pubkey of the user if known from the hash
'''
if self._core._crypto.pubKeyHashID() == hash:
retData = self._core._crypto.pubKey
return retData
conn = sqlite3.connect(self._core.peerDB)
c = conn.cursor()
command = (hash,)
retData = ''
for row in c.execute('SELECT ID FROM peers where hashID=?', command):
if row[0] != '':
retData = row[0]
2018-05-02 06:22:40 +00:00
return retData
2018-05-02 06:50:29 +00:00
def isCommunicatorRunning(self, timeout = 5, interval = 0.1):
try:
runcheck_file = 'data/.runcheck'
2018-05-02 06:22:40 +00:00
if os.path.isfile(runcheck_file):
os.remove(runcheck_file)
2018-05-02 06:50:29 +00:00
logger.debug('%s file appears to have existed before the run check.' % runcheck_file, timestamp = False)
2018-05-02 06:22:40 +00:00
2018-05-02 06:50:29 +00:00
self._core.daemonQueueAdd('runCheck')
starttime = time.time()
while True:
time.sleep(interval)
if os.path.isfile(runcheck_file):
os.remove(runcheck_file)
return True
elif time.time() - starttime >= timeout:
return False
except:
return False
def token(self, size = 32):
2018-05-11 02:05:56 +00:00
'''
Generates a secure random hex encoded token
'''
2018-05-10 07:42:24 +00:00
return binascii.hexlify(os.urandom(size))
def importNewBlocks(self, scanDir=''):
2018-05-11 02:05:56 +00:00
'''
This function is intended to scan for new blocks ON THE DISK and import them
'''
2018-05-10 07:42:24 +00:00
blockList = self._core.getBlockList()
if scanDir == '':
scanDir = self._core.blockDataLocation
if not scanDir.endswith('/'):
scanDir += '/'
for block in glob.glob(scanDir + "*.dat"):
if block.replace(scanDir, '').replace('.dat', '') not in blockList:
logger.info('Found new block on dist %s' % block)
2018-05-10 07:42:24 +00:00
with open(block, 'rb') as newBlock:
block = block.replace(scanDir, '').replace('.dat', '')
if self._core._crypto.sha3Hash(newBlock.read()) == block.replace('.dat', ''):
self._core.addToBlockDB(block.replace('.dat', ''), dataSaved=True)
logger.info('Imported block %s.' % block)
2018-05-10 07:42:24 +00:00
else:
logger.warn('Failed to verify hash for %s' % block)
2018-05-11 02:05:56 +00:00
2018-05-11 05:18:39 +00:00
def progressBar(self, value = 0, endvalue = 100, width = None):
'''
Outputs a progress bar with a percentage. Write \n after use.
'''
if width is None or height is None:
width, height = shutil.get_terminal_size((80, 24))
bar_length = width - 6
percent = float(value) / endvalue
arrow = '' * int(round(percent * bar_length)-1) + '>'
spaces = ' ' * (bar_length - len(arrow))
sys.stdout.write("\r{0}{1}%".format(arrow + spaces, int(round(percent * 100))))
sys.stdout.flush()
2018-05-19 22:11:51 +00:00
2018-05-18 21:49:05 +00:00
def getEpoch(self):
'''returns epoch'''
return math.floor(time.time())
2018-05-11 05:18:39 +00:00
def doPostRequest(self, url, data={}, port=0, proxyType='tor'):
'''
Do a POST request through a local tor or i2p instance
'''
if proxyType == 'tor':
if port == 0:
port = self._core.torPort
2018-08-04 05:50:07 +00:00
proxies = {'http': 'socks4a://127.0.0.1:' + str(port), 'https': 'socks4a://127.0.0.1:' + str(port)}
elif proxyType == 'i2p':
proxies = {'http': 'http://127.0.0.1:4444'}
else:
return
headers = {'user-agent': 'PyOnionr'}
try:
2018-08-04 05:50:07 +00:00
proxies = {'http': 'socks4a://127.0.0.1:' + str(port), 'https': 'socks4a://127.0.0.1:' + str(port)}
r = requests.post(url, data=data, headers=headers, proxies=proxies, allow_redirects=False, timeout=(15, 30))
retData = r.text
except KeyboardInterrupt:
raise KeyboardInterrupt
except requests.exceptions.RequestException as e:
logger.debug('Error: %s' % str(e))
retData = False
return retData
def doGetRequest(self, url, port=0, proxyType='tor'):
'''
Do a get request through a local tor or i2p instance
'''
retData = False
if proxyType == 'tor':
if port == 0:
raise onionrexceptions.MissingPort('Socks port required for Tor HTTP get request')
2018-08-04 05:50:07 +00:00
proxies = {'http': 'socks4a://127.0.0.1:' + str(port), 'https': 'socks4a://127.0.0.1:' + str(port)}
elif proxyType == 'i2p':
proxies = {'http': 'http://127.0.0.1:4444'}
else:
return
headers = {'user-agent': 'PyOnionr'}
try:
2018-08-04 05:50:07 +00:00
proxies = {'http': 'socks4a://127.0.0.1:' + str(port), 'https': 'socks4a://127.0.0.1:' + str(port)}
r = requests.get(url, headers=headers, proxies=proxies, allow_redirects=False, timeout=(15, 30))
retData = r.text
except KeyboardInterrupt:
raise KeyboardInterrupt
2018-08-04 03:47:22 +00:00
except ValueError as e:
logger.debug('Failed to make request', error = e)
except requests.exceptions.RequestException as e:
2018-09-23 04:53:09 +00:00
if not 'ConnectTimeoutError' in str(e):
logger.debug('Error: %s' % str(e))
retData = False
return retData
def getNistBeaconSalt(self, torPort=0, rounding=3600):
'''
Get the token for the current hour from the NIST randomness beacon
'''
if torPort == 0:
try:
sys.argv[2]
except IndexError:
raise onionrexceptions.MissingPort('Missing Tor socks port')
retData = ''
curTime = self.getRoundedEpoch(rounding)
self.nistSaltTimestamp = curTime
data = self.doGetRequest('https://beacon.nist.gov/rest/record/' + str(curTime), port=torPort)
dataXML = minidom.parseString(data, forbid_dtd=True, forbid_entities=True, forbid_external=True)
try:
retData = dataXML.getElementsByTagName('outputValue')[0].childNodes[0].data
except ValueError:
logger.warn('Could not get NIST beacon value')
else:
self.powSalt = retData
return retData
2018-08-10 07:03:49 +00:00
def strToBytes(self, data):
try:
data = data.encode()
except AttributeError:
pass
return data
def bytesToStr(self, data):
try:
data = data.decode()
except AttributeError:
pass
return data
2018-08-21 20:01:50 +00:00
2018-08-23 19:46:23 +00:00
def checkNetwork(self, torPort=0):
2018-08-21 20:01:50 +00:00
'''Check if we are connected to the internet (through Tor)'''
retData = False
connectURLs = []
try:
with open('static-data/connect-check.txt', 'r') as connectTest:
connectURLs = connectTest.read().split(',')
for url in connectURLs:
2018-08-23 19:46:23 +00:00
if self.doGetRequest(url, port=torPort) != False:
2018-08-21 20:01:50 +00:00
retData = True
break
except FileNotFoundError:
pass
return retData
2018-05-11 02:05:56 +00:00
def size(path='.'):
'''
Returns the size of a folder's contents in bytes
'''
total = 0
if os.path.exists(path):
if os.path.isfile(path):
total = os.path.getsize(path)
else:
for entry in os.scandir(path):
if entry.is_file():
total += entry.stat().st_size
elif entry.is_dir():
total += size(entry.path)
return total
def humanSize(num, suffix='B'):
2018-05-11 05:18:39 +00:00
'''
Converts from bytes to a human readable format.
'''
2018-05-11 02:05:56 +00:00
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return "%.1f %s%s" % (num, unit, suffix)
num /= 1024.0
2018-08-21 20:01:50 +00:00
return "%.1f %s%s" % (num, 'Yi', suffix)