Browse Source

first commit

master
Adam Gibson 9 years ago
parent
commit
24e3efbf2a
No known key found for this signature in database
GPG Key ID: B3AE09F1E9A3197A
  1. 92
      base/commands.py
  2. 69
      base/support.py
  3. 6
      bitcoin/__init__.py
  4. 73
      bitcoin/bci.py
  5. 92
      bitcoin/secp256k1_deterministic.py
  6. 393
      bitcoin/secp256k1_main.py
  7. 436
      bitcoin/secp256k1_transaction.py
  8. 38
      client/__init__.py
  9. 1024
      client/blockchaininterface.py
  10. 508
      client/btc.py
  11. 230
      client/client_protocol.py
  12. 92
      client/commands.py
  13. 376
      client/configure.py
  14. 119
      client/jsonrpc.py
  15. 267
      client/old_mnemonic.py
  16. 659
      client/podle.py
  17. 674
      client/slowaes.py
  18. 410
      client/socks.py
  19. 429
      client/support.py
  20. 562
      client/taker.py
  21. 465
      client/wallet.py
  22. 22
      daemon/__init__.py
  23. 99
      daemon/enc_wrapper.py
  24. 472
      daemon/irc.py
  25. 1026
      daemon/message_channel.py
  26. 132
      daemon/orderbookwatch.py
  27. 31
      daemon/protocol.py
  28. 410
      daemon/socks.py
  29. 318
      joinmarketd.py
  30. 201
      sendpayment.py

92
base/commands.py

@ -0,0 +1,92 @@
from twisted.protocols.amp import Integer, String, Unicode, Boolean, Command
class DaemonNotReady(Exception):
pass
class JMCommand(Command):
#a default response type
response = [('accepted', Boolean())]
#commands from client to daemon
class JMInit(JMCommand):
arguments = [('bcsource', String()),
('network', String()),
('irc_configs', String()),
('minmakers', Integer()),
('maker_timeout_sec', Integer())]
errors = {DaemonNotReady: 'daemon is not ready'}
class JMStartMC(JMCommand):
arguments = [('nick', String())]
class JMSetup(JMCommand):
arguments = [('role', String()),
('n_counterparties', Integer())]
class JMRequestOffers(JMCommand):
arguments = []
class JMFill(JMCommand):
arguments = [('amount', Integer()),
('commitment', String()),
('revelation', String()),
('filled_offers', String())]
class JMMakeTx(JMCommand):
arguments = [('nick_list', String()),
('txhex', String())]
class JMMsgSignature(JMCommand):
arguments = [('nick', String()),
('cmd', String()),
('msg_to_return', String()),
('hostid', String())]
class JMMsgSignatureVerify(JMCommand):
arguments = [('verif_result', Boolean()),
('nick', String()),
('fullmsg', String()),
('hostid', String())]
#commands from daemon to client
class JMInitProto(JMCommand):
arguments = [('nick_hash_length', Integer()),
('nick_max_encoded', Integer()),
('joinmarket_nick_header', String()),
('joinmarket_version', Integer())]
class JMUp(JMCommand):
arguments = []
class JMSetupDone(JMCommand):
arguments = []
class JMOffers(JMCommand):
arguments = [('orderbook', String())]
class JMFillResponse(JMCommand):
arguments = [('success', Boolean()),
('ioauth_data', String())]
class JMSigReceived(JMCommand):
arguments = [('nick', String()),
('sig', String())]
class JMRequestMsgSig(JMCommand):
arguments = [('nick', String()),
('cmd', String()),
('msg', String()),
('msg_to_be_signed', String()),
('hostid', String())]
class JMRequestMsgSigVerify(JMCommand):
arguments = [('msg', String()),
('fullmsg', String()),
('sig', String()),
('pubkey', String()),
('nick', String()),
('hashlen', Integer()),
('max_encoded', Integer()),
('hostid', String())]

69
base/support.py

@ -0,0 +1,69 @@
from __future__ import absolute_import, print_function
import sys
import logging
import pprint
import random
logFormatter = logging.Formatter(
"%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s")
log = logging.getLogger('joinmarket')
log.setLevel(logging.DEBUG)
joinmarket_alert = ['']
core_alert = ['']
debug_silence = [False]
#TODO pass this through from client, bitcoin paramater:
DUST_THRESHOLD = 2730
#consoleHandler = logging.StreamHandler(stream=sys.stdout)
class JoinMarketStreamHandler(logging.StreamHandler):
def __init__(self, stream):
super(JoinMarketStreamHandler, self).__init__(stream)
def emit(self, record):
if joinmarket_alert[0]:
print('JoinMarket Alert Message: ' + joinmarket_alert[0])
if core_alert[0]:
print('Core Alert Message: ' + core_alert[0])
if not debug_silence[0]:
super(JoinMarketStreamHandler, self).emit(record)
consoleHandler = JoinMarketStreamHandler(stream=sys.stdout)
consoleHandler.setFormatter(logFormatter)
log.addHandler(consoleHandler)
log.debug('hello joinmarket')
def get_log():
"""
provides joinmarket logging instance
:return: log instance
"""
return log
def chunks(d, n):
return [d[x:x + n] for x in xrange(0, len(d), n)]
def debug_dump_object(obj, skip_fields=None):
if skip_fields is None:
skip_fields = []
log.debug('Class debug dump, name:' + obj.__class__.__name__)
for k, v in obj.__dict__.iteritems():
if k in skip_fields:
continue
if k == 'password' or k == 'given_password':
continue
log.debug('key=' + k)
if isinstance(v, str):
log.debug('string: len:' + str(len(v)))
log.debug(v)
elif isinstance(v, dict) or isinstance(v, list):
log.debug(pprint.pformat(v))
else:
log.debug(str(v))

6
bitcoin/__init__.py

@ -0,0 +1,6 @@
import secp256k1
from bitcoin.secp256k1_main import *
from bitcoin.secp256k1_transaction import *
from bitcoin.secp256k1_deterministic import *
from bitcoin.bci import *

73
bitcoin/bci.py

@ -0,0 +1,73 @@
#!/usr/bin/python
import json, re
import random
import sys
import time
import platform
from joinmarketclient.support import get_log
if platform.system() == "Windows":
import ssl
import urllib2
else:
try:
from urllib.request import build_opener
except:
from urllib2 import build_opener
log = get_log()
# Makes a request to a given URL (first arg) and optional params (second arg)
def make_request(*args):
if platform.system() == "Windows":
sctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
sh = urllib2.HTTPSHandler(debuglevel=0, context=sctx)
opener = urllib2.build_opener(sh)
else:
opener = build_opener()
opener.addheaders = [('User-agent',
'Mozilla/5.0' + str(random.randrange(1000000)))]
try:
return opener.open(*args).read().strip()
except Exception as e:
try:
p = e.read().strip()
except:
p = e
raise Exception(p)
def make_request_blockr(*args):
counter = 0
while True:
data = json.loads(make_request(*args))
if data['status'] == 'error' and data['code'] == 429:
log.debug('Blockr service error: ' + data['message'])
time.sleep(min(60, 2**counter / 2.))
counter += 1
continue
return data
# Pushes a transaction to the network using https://blockchain.info/pushtx
def bci_pushtx(tx):
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
return make_request('https://blockchain.info/pushtx', 'tx=' + tx)
def blockr_pushtx(tx, network='btc'):
if network == 'testnet':
blockr_url = 'https://tbtc.blockr.io/api/v1/tx/push'
elif network == 'btc':
blockr_url = 'https://btc.blockr.io/api/v1/tx/push'
else:
raise Exception('Unsupported network {0} for blockr_pushtx'.format(
network))
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
return make_request(blockr_url, '{"hex":"%s"}' % tx)

92
bitcoin/secp256k1_deterministic.py

@ -0,0 +1,92 @@
from bitcoin.secp256k1_main import *
import hmac
import hashlib
from binascii import hexlify
# Below code ASSUMES binary inputs and compressed pubkeys
MAINNET_PRIVATE = b'\x04\x88\xAD\xE4'
MAINNET_PUBLIC = b'\x04\x88\xB2\x1E'
TESTNET_PRIVATE = b'\x04\x35\x83\x94'
TESTNET_PUBLIC = b'\x04\x35\x87\xCF'
PRIVATE = [MAINNET_PRIVATE, TESTNET_PRIVATE]
PUBLIC = [MAINNET_PUBLIC, TESTNET_PUBLIC]
# BIP32 child key derivation
def raw_bip32_ckd(rawtuple, i):
vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple
i = int(i)
if vbytes in PRIVATE:
priv = key
pub = privtopub(key, False)
else:
pub = key
if i >= 2**31:
if vbytes in PUBLIC:
raise Exception("Can't do private derivation on public key!")
I = hmac.new(chaincode, b'\x00' + priv[:32] + encode(i, 256, 4),
hashlib.sha512).digest()
else:
I = hmac.new(chaincode, pub + encode(i, 256, 4),
hashlib.sha512).digest()
if vbytes in PRIVATE:
newkey = add_privkeys(I[:32] + B'\x01', priv, False)
fingerprint = bin_hash160(privtopub(key, False))[:4]
if vbytes in PUBLIC:
newkey = add_pubkeys([privtopub(I[:32] + '\x01', False), key], False)
fingerprint = bin_hash160(key)[:4]
return (vbytes, depth + 1, fingerprint, i, I[32:], newkey)
def bip32_serialize(rawtuple):
vbytes, depth, fingerprint, i, chaincode, key = rawtuple
i = encode(i, 256, 4)
chaincode = encode(hash_to_int(chaincode), 256, 32)
keydata = b'\x00' + key[:-1] if vbytes in PRIVATE else key
bindata = vbytes + from_int_to_byte(
depth % 256) + fingerprint + i + chaincode + keydata
return changebase(bindata + bin_dbl_sha256(bindata)[:4], 256, 58)
def bip32_deserialize(data):
dbin = changebase(data, 58, 256)
if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]:
raise Exception("Invalid checksum")
vbytes = dbin[0:4]
depth = from_byte_to_int(dbin[4])
fingerprint = dbin[5:9]
i = decode(dbin[9:13], 256)
chaincode = dbin[13:45]
key = dbin[46:78] + b'\x01' if vbytes in PRIVATE else dbin[45:78]
return (vbytes, depth, fingerprint, i, chaincode, key)
def raw_bip32_privtopub(rawtuple):
vbytes, depth, fingerprint, i, chaincode, key = rawtuple
newvbytes = MAINNET_PUBLIC if vbytes == MAINNET_PRIVATE else TESTNET_PUBLIC
return (newvbytes, depth, fingerprint, i, chaincode, privtopub(key, False))
def bip32_privtopub(data):
return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data)))
def bip32_ckd(data, i):
return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i))
def bip32_master_key(seed, vbytes=MAINNET_PRIVATE):
I = hmac.new(
from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest()
return bip32_serialize((vbytes, 0, b'\x00' * 4, 0, I[32:], I[:32] + b'\x01'
))
def bip32_extract_key(data):
return safe_hexlify(bip32_deserialize(data)[-1])
def bip32_descend(*args):
if len(args) == 2:
key, path = args
else:
key, path = args[0], map(int, args[1:])
for p in path:
key = bip32_ckd(key, p)
return bip32_extract_key(key)

393
bitcoin/secp256k1_main.py

@ -0,0 +1,393 @@
#!/usr/bin/python
from __future__ import print_function
from .py2specials import *
from .py3specials import *
import binascii
import hashlib
import re
import sys
import os
import base64
import time
import random
import hmac
import secp256k1
#Required only for PoDLE calculation:
N = 115792089237316195423570985008687907852837564279074904382605163141518161494337
#Global context for secp256k1 operations (helps with performance)
ctx = secp256k1.lib.secp256k1_context_create(secp256k1.ALL_FLAGS)
#required for point addition
dummy_pub = secp256k1.PublicKey(ctx=ctx)
#Standard prefix for Bitcoin message signing.
BITCOIN_MESSAGE_MAGIC = '\x18' + 'Bitcoin Signed Message:\n'
"""A custom nonce function acting as a pass-through.
Only used for reusable donation pubkeys (stealth).
"""
from cffi import FFI
ffi = FFI()
ffi.cdef('static int nonce_function_rand(unsigned char *nonce32,'
'const unsigned char *msg32,const unsigned char *key32,'
'const unsigned char *algo16,void *data,unsigned int attempt);')
ffi.set_source("_noncefunc",
"""
static int nonce_function_rand(unsigned char *nonce32,
const unsigned char *msg32,
const unsigned char *key32,
const unsigned char *algo16,
void *data,
unsigned int attempt)
{
memcpy(nonce32,data,32);
return 1;
}
""")
ffi.compile()
import _noncefunc
from _noncefunc import ffi
def tweak_mul(point, scalar):
"""Temporary hack because Windows binding had a bug in tweak_mul.
Can be removed when Windows binding is updated.
"""
return secp256k1._tweak_public(point,
secp256k1.lib.secp256k1_ec_pubkey_tweak_mul,
scalar)
"""PoDLE related primitives
"""
def getG(compressed=True):
"""Returns the public key binary
representation of secp256k1 G
"""
priv = "\x00"*31 + "\x01"
G = secp256k1.PrivateKey(priv, ctx=ctx).pubkey.serialize(compressed)
return G
podle_PublicKey_class = secp256k1.PublicKey
podle_PrivateKey_class = secp256k1.PrivateKey
def podle_PublicKey(P):
"""Returns a PublicKey object from a binary string
"""
return secp256k1.PublicKey(P, raw=True, ctx=ctx)
def podle_PrivateKey(priv):
"""Returns a PrivateKey object from a binary string
"""
return secp256k1.PrivateKey(priv, ctx=ctx)
def privkey_to_address(priv, from_hex=True, magicbyte=0):
return pubkey_to_address(privkey_to_pubkey(priv, from_hex), magicbyte)
privtoaddr = privkey_to_address
# Hashes
def bin_hash160(string):
intermed = hashlib.sha256(string).digest()
return hashlib.new('ripemd160', intermed).digest()
def hash160(string):
return safe_hexlify(bin_hash160(string))
def bin_sha256(string):
binary_data = string if isinstance(string, bytes) else bytes(string,
'utf-8')
return hashlib.sha256(binary_data).digest()
def sha256(string):
return bytes_to_hex_string(bin_sha256(string))
def bin_dbl_sha256(s):
bytes_to_hash = from_string_to_bytes(s)
return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest()
def dbl_sha256(string):
return safe_hexlify(bin_dbl_sha256(string))
def hash_to_int(x):
if len(x) in [40, 64]:
return decode(x, 16)
return decode(x, 256)
def num_to_var_int(x):
x = int(x)
if x < 253: return from_int_to_byte(x)
elif x < 65536: return from_int_to_byte(253) + encode(x, 256, 2)[::-1]
elif x < 4294967296: return from_int_to_byte(254) + encode(x, 256, 4)[::-1]
else: return from_int_to_byte(255) + encode(x, 256, 8)[::-1]
def message_sig_hash(message):
"""Used for construction of signatures of
messages, intended to be compatible with Bitcoin Core.
"""
padded = BITCOIN_MESSAGE_MAGIC + num_to_var_int(len(
message)) + from_string_to_bytes(message)
return bin_dbl_sha256(padded)
# Encodings
def b58check_to_bin(inp):
leadingzbytes = len(re.match('^1*', inp).group(0))
data = b'\x00' * leadingzbytes + changebase(inp, 58, 256)
assert bin_dbl_sha256(data[:-4])[:4] == data[-4:]
return data[1:-4]
def get_version_byte(inp):
leadingzbytes = len(re.match('^1*', inp).group(0))
data = b'\x00' * leadingzbytes + changebase(inp, 58, 256)
assert bin_dbl_sha256(data[:-4])[:4] == data[-4:]
return ord(data[0])
def hex_to_b58check(inp, magicbyte=0):
return bin_to_b58check(binascii.unhexlify(inp), magicbyte)
def b58check_to_hex(inp):
return safe_hexlify(b58check_to_bin(inp))
def pubkey_to_address(pubkey, magicbyte=0):
if len(pubkey) in [66, 130]:
return bin_to_b58check(
bin_hash160(binascii.unhexlify(pubkey)), magicbyte)
return bin_to_b58check(bin_hash160(pubkey), magicbyte)
pubtoaddr = pubkey_to_address
def wif_compressed_privkey(priv, vbyte=0):
"""Convert privkey in hex compressed to WIF compressed
"""
if len(priv) != 66:
raise Exception("Wrong length of compressed private key")
if priv[-2:] != '01':
raise Exception("Private key has wrong compression byte")
return bin_to_b58check(binascii.unhexlify(priv), 128 + int(vbyte))
def from_wif_privkey(wif_priv, compressed=True, vbyte=0):
"""Convert WIF compressed privkey to hex compressed.
Caller specifies the network version byte (0 for mainnet, 0x6f
for testnet) that the key should correspond to; if there is
a mismatch an error is thrown. WIF encoding uses 128+ this number.
"""
bin_key = b58check_to_bin(wif_priv)
claimed_version_byte = get_version_byte(wif_priv)
if not 128+vbyte == claimed_version_byte:
raise Exception(
"WIF key version byte is wrong network (mainnet/testnet?)")
if compressed and not len(bin_key) == 33:
raise Exception("Compressed private key is not 33 bytes")
if compressed and not bin_key[-1] == '\x01':
raise Exception("Private key has incorrect compression byte")
return safe_hexlify(bin_key)
def ecdsa_sign(msg, priv, usehex=True):
hashed_msg = message_sig_hash(msg)
if usehex:
#arguments to raw sign must be consistently hex or bin
hashed_msg = binascii.hexlify(hashed_msg)
sig = ecdsa_raw_sign(hashed_msg, priv, usehex, rawmsg=True)
#note those functions only handles binary, not hex
if usehex:
sig = binascii.unhexlify(sig)
return base64.b64encode(sig)
def ecdsa_verify(msg, sig, pub, usehex=True):
hashed_msg = message_sig_hash(msg)
sig = base64.b64decode(sig)
if usehex:
#arguments to raw_verify must be consistently hex or bin
hashed_msg = binascii.hexlify(hashed_msg)
sig = binascii.hexlify(sig)
return ecdsa_raw_verify(hashed_msg, pub, sig, usehex, rawmsg=True)
#Use secp256k1 to handle all EC and ECDSA operations.
#Data types: only hex and binary.
#Compressed and uncompressed private and public keys.
def hexbin(func):
'''To enable each function to 'speak' either hex or binary,
requires that the decorated function's final positional argument
is a boolean flag, True for hex and False for binary.
'''
def func_wrapper(*args, **kwargs):
if args[-1]:
newargs = []
for arg in args[:-1]:
if isinstance(arg, (list, tuple)):
newargs += [[x.decode('hex') for x in arg]]
else:
newargs += [arg.decode('hex')]
newargs += [False]
returnval = func(*newargs, **kwargs)
if isinstance(returnval, bool):
return returnval
else:
return binascii.hexlify(returnval)
else:
return func(*args, **kwargs)
return func_wrapper
def read_privkey(priv):
if len(priv) == 33:
if priv[-1] == '\x01':
compressed = True
else:
raise Exception("Invalid private key")
elif len(priv) == 32:
compressed = False
else:
raise Exception("Invalid private key")
return (compressed, priv[:32])
@hexbin
def privkey_to_pubkey_inner(priv, usehex):
'''Take 32/33 byte raw private key as input.
If 32 bytes, return compressed (33 byte) raw public key.
If 33 bytes, read the final byte as compression flag,
and return compressed/uncompressed public key as appropriate.'''
compressed, priv = read_privkey(priv)
#secp256k1 checks for validity of key value.
newpriv = secp256k1.PrivateKey(privkey=priv, ctx=ctx)
return newpriv.pubkey.serialize(compressed=compressed)
def privkey_to_pubkey(priv, usehex=True):
'''To avoid changing the interface from the legacy system,
allow an *optional* hex argument here (called differently from
maker/taker code to how it's called in bip32 code), then
pass to the standard hexbin decorator under the hood.
'''
return privkey_to_pubkey_inner(priv, usehex)
privtopub = privkey_to_pubkey
@hexbin
def multiply(s, pub, usehex, rawpub=True, return_serialized=True):
'''Input binary compressed pubkey P(33 bytes)
and scalar s(32 bytes), return s*P.
The return value is a binary compressed public key,
or a PublicKey object if return_serialized is False.
Note that the called function does the type checking
of the scalar s.
('raw' options passed in)
'''
newpub = secp256k1.PublicKey(pub, raw=rawpub, ctx=ctx)
#see note to "tweak_mul" function in podle.py
res = secp256k1._tweak_public(newpub,
secp256k1.lib.secp256k1_ec_pubkey_tweak_mul,
s)
if not return_serialized:
return res
return res.serialize()
@hexbin
def add_pubkeys(pubkeys, usehex):
'''Input a list of binary compressed pubkeys
and return their sum as a binary compressed pubkey.'''
r = secp256k1.PublicKey(ctx=ctx) #dummy holding object
pubkey_list = [secp256k1.PublicKey(x,
raw=True,
ctx=ctx).public_key for x in pubkeys]
r.combine(pubkey_list)
return r.serialize()
@hexbin
def add_privkeys(priv1, priv2, usehex):
'''Add privkey 1 to privkey 2.
Input keys must be in binary either compressed or not.
Returned key will have the same compression state.
Error if compression state of both input keys is not the same.'''
y, z = [read_privkey(x) for x in [priv1, priv2]]
if y[0] != z[0]:
raise Exception("cannot add privkeys, mixed compression formats")
else:
compressed = y[0]
newpriv1, newpriv2 = (y[1], z[1])
p1 = secp256k1.PrivateKey(newpriv1, raw=True, ctx=ctx)
res = p1.tweak_add(newpriv2)
if compressed:
res += '\x01'
return res
@hexbin
def ecdsa_raw_sign(msg,
priv,
usehex,
rawpriv=True,
rawmsg=False,
usenonce=None):
'''Take the binary message msg and sign it with the private key
priv.
By default priv is just a 32 byte string, if rawpriv is false
it is assumed to be DER encoded.
If rawmsg is True, no sha256 hash is applied to msg before signing.
In this case, msg must be a precalculated hash (256 bit).
If rawmsg is False, the secp256k1 lib will hash the message as part
of the ECDSA-SHA256 signing algo.
If usenonce is not None, its value is passed to the secp256k1 library
sign() function as the ndata value, which is then used in conjunction
with a custom nonce generating function, such that the nonce used in the ECDSA
sign algorithm is exactly that value (ndata there, usenonce here). 32 bytes.
Return value: the calculated signature.'''
if rawmsg and len(msg) != 32:
raise Exception("Invalid hash input to ECDSA raw sign.")
if rawpriv:
compressed, p = read_privkey(priv)
newpriv = secp256k1.PrivateKey(p, raw=True, ctx=ctx)
else:
newpriv = secp256k1.PrivateKey(priv, raw=False, ctx=ctx)
if usenonce:
if len(usenonce) != 32:
raise ValueError("Invalid nonce passed to ecdsa_sign: " + str(
usenonce))
nf = ffi.addressof(_noncefunc.lib, "nonce_function_rand")
ndata = ffi.new("char [32]", usenonce)
usenonce = (nf, ndata)
if usenonce:
sig = newpriv.ecdsa_sign(msg, raw=rawmsg, custom_nonce=usenonce)
else:
#partial fix for secp256k1-transient not including customnonce;
#partial because donations will crash on windows in the "if".
sig = newpriv.ecdsa_sign(msg, raw=rawmsg)
return newpriv.ecdsa_serialize(sig)
@hexbin
def ecdsa_raw_verify(msg, pub, sig, usehex, rawmsg=False):
'''Take the binary message msg and binary signature sig,
and verify it against the pubkey pub.
If rawmsg is True, no sha256 hash is applied to msg before verifying.
In this case, msg must be a precalculated hash (256 bit).
If rawmsg is False, the secp256k1 lib will hash the message as part
of the ECDSA-SHA256 verification algo.
Return value: True if the signature is valid for this pubkey, False
otherwise.
Since the arguments may come from external messages their content is
not guaranteed, so return False on any parsing exception.
'''
try:
if rawmsg:
assert len(msg) == 32
newpub = secp256k1.PublicKey(pubkey=pub, raw=True, ctx=ctx)
sigobj = newpub.ecdsa_deserialize(sig)
retval = newpub.ecdsa_verify(msg, sigobj, raw=rawmsg)
except:
return False
return retval
def estimate_tx_size(ins, outs, txtype='p2pkh'):
'''Estimate transaction size.
Assuming p2pkh:
out: 8+1+3+2+20=34, in: 1+32+4+1+1+~73+1+1+33=147,
ver:4,seq:4, +2 (len in,out)
total ~= 34*len_out + 147*len_in + 10 (sig sizes vary slightly)
'''
if txtype == 'p2pkh':
return 10 + ins * 147 + 34 * outs
else:
raise NotImplementedError("Non p2pkh transaction size estimation not" +
"yet implemented")

436
bitcoin/secp256k1_transaction.py

@ -0,0 +1,436 @@
#!/usr/bin/python
import binascii, re, json, copy, sys
from bitcoin.secp256k1_main import *
from _functools import reduce
import os
is_python2 = sys.version_info.major == 2
### Hex to bin converter and vice versa for objects
def json_is_base(obj, base):
if not is_python2 and isinstance(obj, bytes):
return False
alpha = get_code_string(base)
if isinstance(obj, string_types):
for i in range(len(obj)):
if alpha.find(obj[i]) == -1:
return False
return True
elif isinstance(obj, int_types) or obj is None:
return True
elif isinstance(obj, list):
for i in range(len(obj)):
if not json_is_base(obj[i], base):
return False
return True
else:
for x in obj:
if not json_is_base(obj[x], base):
return False
return True
def json_changebase(obj, changer):
if isinstance(obj, string_or_bytes_types):
return changer(obj)
elif isinstance(obj, int_types) or obj is None:
return obj
elif isinstance(obj, list):
return [json_changebase(x, changer) for x in obj]
return dict((x, json_changebase(obj[x], changer)) for x in obj)
# Transaction serialization and deserialization
def deserialize(tx):
if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
#tx = bytes(bytearray.fromhex(tx))
return json_changebase(
deserialize(binascii.unhexlify(tx)), lambda x: safe_hexlify(x))
# http://stackoverflow.com/questions/4851463/python-closure-write-to-variable-in-parent-scope
# Python's scoping rules are demented, requiring me to make pos an object
# so that it is call-by-reference
pos = [0]
def read_as_int(bytez):
pos[0] += bytez
return decode(tx[pos[0] - bytez:pos[0]][::-1], 256)
def read_var_int():
pos[0] += 1
val = from_byte_to_int(tx[pos[0] - 1])
if val < 253:
return val
return read_as_int(pow(2, val - 252))
def read_bytes(bytez):
pos[0] += bytez
return tx[pos[0] - bytez:pos[0]]
def read_var_string():
size = read_var_int()
return read_bytes(size)
obj = {"ins": [], "outs": []}
obj["version"] = read_as_int(4)
ins = read_var_int()
for i in range(ins):
obj["ins"].append({
"outpoint": {
"hash": read_bytes(32)[::-1],
"index": read_as_int(4)
},
"script": read_var_string(),
"sequence": read_as_int(4)
})
outs = read_var_int()
for i in range(outs):
obj["outs"].append({
"value": read_as_int(8),
"script": read_var_string()
})
obj["locktime"] = read_as_int(4)
return obj
def serialize(txobj):
#if isinstance(txobj, bytes):
# txobj = bytes_to_hex_string(txobj)
o = []
if json_is_base(txobj, 16):
json_changedbase = json_changebase(txobj,
lambda x: binascii.unhexlify(x))
hexlified = safe_hexlify(serialize(json_changedbase))
return hexlified
o.append(encode(txobj["version"], 256, 4)[::-1])
o.append(num_to_var_int(len(txobj["ins"])))
for inp in txobj["ins"]:
o.append(inp["outpoint"]["hash"][::-1])
o.append(encode(inp["outpoint"]["index"], 256, 4)[::-1])
o.append(num_to_var_int(len(inp["script"])) + (inp["script"] if inp[
"script"] or is_python2 else bytes()))
o.append(encode(inp["sequence"], 256, 4)[::-1])
o.append(num_to_var_int(len(txobj["outs"])))
for out in txobj["outs"]:
o.append(encode(out["value"], 256, 8)[::-1])
o.append(num_to_var_int(len(out["script"])) + out["script"])
o.append(encode(txobj["locktime"], 256, 4)[::-1])
return ''.join(o) if is_python2 else reduce(lambda x, y: x + y, o, bytes())
# Hashing transactions for signing
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def signature_form(tx, i, script, hashcode=SIGHASH_ALL):
i, hashcode = int(i), int(hashcode)
if isinstance(tx, string_or_bytes_types):
return serialize(signature_form(deserialize(tx), i, script, hashcode))
newtx = copy.deepcopy(tx)
for inp in newtx["ins"]:
inp["script"] = ""
newtx["ins"][i]["script"] = script
if hashcode & 0x1f == SIGHASH_NONE:
newtx["outs"] = []
for j, inp in enumerate(newtx["ins"]):
if j != i:
inp["sequence"] = 0
elif hashcode & 0x1f == SIGHASH_SINGLE:
if len(newtx["ins"]) > len(newtx["outs"]):
raise Exception(
"Transactions with sighash single should have len in <= len out")
newtx["outs"] = newtx["outs"][:i+1]
for out in newtx["outs"][:i]:
out['value'] = 2**64 - 1
out['script'] = ""
for j, inp in enumerate(newtx["ins"]):
if j != i:
inp["sequence"] = 0
if hashcode & SIGHASH_ANYONECANPAY:
newtx["ins"] = [newtx["ins"][i]]
else:
pass
return newtx
def txhash(tx, hashcode=None):
if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
tx = changebase(tx, 16, 256)
if hashcode:
return dbl_sha256(from_string_to_bytes(tx) + encode(
int(hashcode), 256, 4)[::-1])
else:
return safe_hexlify(bin_dbl_sha256(tx)[::-1])
def bin_txhash(tx, hashcode=None):
return binascii.unhexlify(txhash(tx, hashcode))
def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL, usenonce=None):
sig = ecdsa_raw_sign(
txhash(tx, hashcode),
priv,
True,
rawmsg=True,
usenonce=usenonce)
return sig + encode(hashcode, 16, 2)
def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL):
return ecdsa_raw_verify(
txhash(tx, hashcode),
pub,
sig[:-2],
True,
rawmsg=True)
# Scripts
def mk_pubkey_script(addr):
# Keep the auxiliary functions around for altcoins' sake
return '76a914' + b58check_to_hex(addr) + '88ac'
def mk_scripthash_script(addr):
return 'a914' + b58check_to_hex(addr) + '87'
# Address representation to output script
def address_to_script(addr):
if addr[0] == '3' or addr[0] == '2':
return mk_scripthash_script(addr)
else:
return mk_pubkey_script(addr)
# Output script to address representation
def script_to_address(script, vbyte=0):
if re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(
script) == 25:
return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses
else:
if vbyte in [111, 196]:
# Testnet
scripthash_byte = 196
else:
scripthash_byte = 5
# BIP0016 scripthash addresses
return bin_to_b58check(script[2:-1], scripthash_byte)
def p2sh_scriptaddr(script, magicbyte=5):
if re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
return hex_to_b58check(hash160(script), magicbyte)
scriptaddr = p2sh_scriptaddr
def deserialize_script(script):
if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
return json_changebase(
deserialize_script(binascii.unhexlify(script)),
lambda x: safe_hexlify(x))
out, pos = [], 0
while pos < len(script):
code = from_byte_to_int(script[pos])
if code == 0:
out.append(None)
pos += 1
elif code <= 75:
out.append(script[pos + 1:pos + 1 + code])
pos += 1 + code
elif code <= 78:
szsz = pow(2, code - 76)
sz = decode(script[pos + szsz:pos:-1], 256)
out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz])
pos += 1 + szsz + sz
elif code <= 96:
out.append(code - 80)
pos += 1
else:
out.append(code)
pos += 1
return out
def serialize_script_unit(unit):
if isinstance(unit, int):
if unit < 16:
return from_int_to_byte(unit + 80)
else:
return bytes([unit])
elif unit is None:
return b'\x00'
else:
if len(unit) <= 75:
return from_int_to_byte(len(unit)) + unit
elif len(unit) < 256:
return from_int_to_byte(76) + from_int_to_byte(len(unit)) + unit
elif len(unit) < 65536:
return from_int_to_byte(77) + encode(len(unit), 256, 2)[::-1] + unit
else:
return from_int_to_byte(78) + encode(len(unit), 256, 4)[::-1] + unit
if is_python2:
def serialize_script(script):
if json_is_base(script, 16):
return binascii.hexlify(serialize_script(json_changebase(
script, lambda x: binascii.unhexlify(x))))
return ''.join(map(serialize_script_unit, script))
else:
def serialize_script(script):
if json_is_base(script, 16):
return safe_hexlify(serialize_script(json_changebase(
script, lambda x: binascii.unhexlify(x))))
result = bytes()
for b in map(serialize_script_unit, script):
result += b if isinstance(b, bytes) else bytes(b, 'utf-8')
return result
def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k
if isinstance(args[0], list):
pubs, k = args[0], int(args[1])
else:
pubs = list(filter(lambda x: len(str(x)) >= 32, args))
k = int(args[len(pubs)])
return serialize_script([k] + pubs + [len(pubs)]) + 'ae'
# Signing and verifying
def verify_tx_input(tx, i, script, sig, pub):
if re.match('^[0-9a-fA-F]*$', tx):
tx = binascii.unhexlify(tx)
if re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
if not re.match('^[0-9a-fA-F]*$', sig):
sig = safe_hexlify(sig)
if not re.match('^[0-9a-fA-F]*$', pub):
pub = safe_hexlify(pub)
hashcode = decode(sig[-2:], 16)
modtx = signature_form(tx, int(i), script, hashcode)
return ecdsa_tx_verify(modtx, sig, pub, hashcode)
def sign(tx, i, priv, hashcode=SIGHASH_ALL, usenonce=None):
i = int(i)
if (not is_python2 and isinstance(re, bytes)) or not re.match(
'^[0-9a-fA-F]*$', tx):
return binascii.unhexlify(sign(safe_hexlify(tx), i, priv))
if len(priv) <= 33:
priv = safe_hexlify(priv)
pub = privkey_to_pubkey(priv, True)
address = pubkey_to_address(pub)
signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode)
sig = ecdsa_tx_sign(signing_tx, priv, hashcode, usenonce=usenonce)
txobj = deserialize(tx)
txobj["ins"][i]["script"] = serialize_script([sig, pub])
return serialize(txobj)
def signall(tx, priv):
# if priv is a dictionary, assume format is
# { 'txinhash:txinidx' : privkey }
if isinstance(priv, dict):
for e, i in enumerate(deserialize(tx)["ins"]):
k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])]
tx = sign(tx, e, k)
else:
for i in range(len(deserialize(tx)["ins"])):
tx = sign(tx, i, priv)
return tx
def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL):
if re.match('^[0-9a-fA-F]*$', tx):
tx = binascii.unhexlify(tx)
if re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
modtx = signature_form(tx, i, script, hashcode)
return ecdsa_tx_sign(modtx, pk, hashcode)
def apply_multisignatures(*args):
# tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n]
tx, i, script = args[0], int(args[1]), args[2]
sigs = args[3] if isinstance(args[3], list) else list(args[3:])
if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs]
if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
return safe_hexlify(apply_multisignatures(
binascii.unhexlify(tx), i, script, sigs))
txobj = deserialize(tx)
txobj["ins"][i]["script"] = serialize_script([None] + sigs + [script])
return serialize(txobj)
def is_inp(arg):
return len(arg) > 64 or "output" in arg or "outpoint" in arg
def mktx(*args):
# [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ...
ins, outs = [], []
for arg in args:
if isinstance(arg, list):
for a in arg:
(ins if is_inp(a) else outs).append(a)
else:
(ins if is_inp(arg) else outs).append(arg)
txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []}
for i in ins:
if isinstance(i, dict) and "outpoint" in i:
txobj["ins"].append(i)
else:
if isinstance(i, dict) and "output" in i:
i = i["output"]
txobj["ins"].append({
"outpoint": {"hash": i[:64],
"index": int(i[65:])},
"script": "",
"sequence": 4294967295
})
for o in outs:
if isinstance(o, string_or_bytes_types):
addr = o[:o.find(':')]
val = int(o[o.find(':') + 1:])
o = {}
if re.match('^[0-9a-fA-F]*$', addr):
o["script"] = addr
else:
o["address"] = addr
o["value"] = val
outobj = {}
if "address" in o:
outobj["script"] = address_to_script(o["address"])
elif "script" in o:
outobj["script"] = o["script"]
else:
raise Exception("Could not find 'address' or 'script' in output.")
outobj["value"] = o["value"]
txobj["outs"].append(outobj)
return serialize(txobj)

38
client/__init__.py

@ -0,0 +1,38 @@
from __future__ import print_function
import logging
#Full joinmarket uses its own bitcoin module;
#other implementations (like wallet plugins)
#can optionally include their own, which must
#be implemented as an interface in btc.py
from btc import *
from .support import get_log, calc_cj_fee, debug_dump_object, \
choose_sweep_orders, choose_orders, \
pick_order, cheapest_order_choose, weighted_order_choose, \
rand_norm_array, rand_pow_array, rand_exp_array, joinmarket_alert, core_alert
from .jsonrpc import JsonRpcError, JsonRpcConnectionError, JsonRpc
from .old_mnemonic import mn_decode, mn_encode
from .slowaes import decryptData, encryptData
from .taker import Taker
from .wallet import AbstractWallet, BitcoinCoreInterface, Wallet, \
BitcoinCoreWallet, estimate_tx_fee, ElectrumWrapWallet
from .configure import load_program_config, jm_single, get_p2pk_vbyte, \
get_network, jm_single, get_network, validate_address, get_irc_mchannels, \
check_utxo_blacklist
from .blockchaininterface import BlockrInterface, BlockchainInterface
from .client_protocol import JMTakerClientProtocolFactory, start_reactor
from .podle import set_commitment_file, get_commitment_file
from .commands import *
# Set default logging handler to avoid "No handler found" warnings.
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())

1024
client/blockchaininterface.py

File diff suppressed because it is too large Load Diff

508
client/btc.py

@ -0,0 +1,508 @@
"""Module to support bitcoin operations using a
different codebase than joinmarket's own.
"""
#Protocol constants
BTC_P2PK_VBYTE = {"mainnet": 0x00, "testnet": 0x6f}
BTC_P2SH_VBYTE = {"mainnet": 0x05, "testnet": 0xc4}
PODLE_COMMIT_FILE = None
from .support import get_log
import binascii, sys, re, hashlib, base64
from pprint import pformat
log = get_log()
#Required only for PoDLE calculation:
N = 115792089237316195423570985008687907852837564279074904382605163141518161494337
if sys.version_info.major == 2:
string_types = (str, unicode)
string_or_bytes_types = string_types
int_types = (int, float, long)
# Base switching
code_strings = {
2: '01',
10: '0123456789',
16: '0123456789abcdef',
32: 'abcdefghijklmnopqrstuvwxyz234567',
58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz',
256: ''.join([chr(x) for x in range(256)])
}
def bin_dbl_sha256(s):
bytes_to_hash = from_string_to_bytes(s)
return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest()
def lpad(msg, symbol, length):
if len(msg) >= length:
return msg
return symbol * (length - len(msg)) + msg
def get_code_string(base):
if base in code_strings:
return code_strings[base]
else:
raise ValueError("Invalid base!")
def changebase(string, frm, to, minlen=0):
if frm == to:
return lpad(string, get_code_string(frm)[0], minlen)
return encode(decode(string, frm), to, minlen)
def bin_to_b58check(inp, magicbyte=0):
inp_fmtd = chr(int(magicbyte)) + inp
leadingzbytes = len(re.match('^\x00*', inp_fmtd).group(0))
checksum = bin_dbl_sha256(inp_fmtd)[:4]
return '1' * leadingzbytes + changebase(inp_fmtd + checksum, 256, 58)
def bytes_to_hex_string(b):
return b.encode('hex')
def safe_from_hex(s):
return s.decode('hex')
def from_int_to_byte(a):
return chr(a)
def from_byte_to_int(a):
return ord(a)
def from_string_to_bytes(a):
return a
def safe_hexlify(a):
return binascii.hexlify(a)
def encode(val, base, minlen=0):
base, minlen = int(base), int(minlen)
code_string = get_code_string(base)
result = ""
while val > 0:
result = code_string[val % base] + result
val //= base
return code_string[0] * max(minlen - len(result), 0) + result
def decode(string, base):
base = int(base)
code_string = get_code_string(base)
result = 0
if base == 16:
string = string.lower()
while len(string) > 0:
result *= base
result += code_string.find(string[0])
string = string[1:]
return result
else:
raise NotImplementedError("Only Python2 currently supported by btc interface")
interface = "joinmarket-joinmarket"
try:
from bitcoin import *
bjm = True
except ImportError:
#TODO figure out the right flexibility structure
interface = "joinmarket-electrum"
if interface != "joinmarket-electrum":
raise NotImplementedError
not_supported_string = "not supported by: " + interface
#Electrum specific code starts here
import electrum.bitcoin as ebt
import electrum.transaction as etr
from ecdsa.ecdsa import curve_secp256k1, generator_secp256k1, point_is_valid
from ecdsa.util import string_to_number, sigdecode_der, sigencode_der
from ecdsa import VerifyingKey, BadSignatureError, BadDigestError
from ecdsa.curves import SECP256k1
from ecdsa.numbertheory import square_root_mod_prime
from ecdsa.ellipticcurve import Point
class PPubKey(object):
def __init__(self, serP):
self._point = ebt.ser_to_point(serP)
def serialize(self):
return ebt.point_to_ser(self._point)
class PPrivKey(object):
def __init__(self, scalar):
self._privkey = ebt.EC_KEY(scalar)
self.private_key = scalar
self.pubkey = PPubKey(binascii.unhexlify(
self._privkey.get_public_key()))
podle_PublicKey_class = PPubKey
podle_PrivateKey_class = PPrivKey
def podle_PublicKey(P):
return PPubKey(P)
def podle_PrivateKey(priv):
return PPrivKey(priv)
def multiply(s, pub, usehex, rawpub=True, return_serialized=True):
"""s should be 32 byte scalar, pub should be of type
podle_PublicKey_class
"""
if usehex:
s = binascii.unhexlify(s)
pub = binascii.unhexlify(pub)
ppub = PPubKey(pub)
p = ppub._point
s_int = decode(s, 256)
m = p * s_int
r = PPubKey(ebt.point_to_ser(m))
if return_serialized:
return r.serialize()
return r
def add_pubkeys(pubkeys, usehex):
"""Pubkeys should be a list (for compatibility).
"""
#Not supporting more than 2 items for now, not needed.
assert len(pubkeys) == 2
if usehex:
pubkeys = [binascii.unhexlify(x) for x in pubkeys]
p1pt, p2pt = [ebt.ser_to_point(x) for x in pubkeys]
sumpt = p1pt + p2pt
return ebt.point_to_ser(sumpt)
def getG(compressed=True):
scalar = "\x00"*31 + "\x01"
return binascii.unhexlify(
ebt.EC_KEY(scalar).get_public_key(compressed=compressed))
def sign(tx):
#transaction signing is handled by the wallet for Electrum
raise NotImplementedError("sign " + not_supported_string)
def get_version_byte(inp):
leadingzbytes = len(re.match('^1*', inp).group(0))
data = b'\x00' * leadingzbytes + b58check_to_bin(inp)
return ord(data[0])
def b58check_to_bin(addr):
return ebt.DecodeBase58Check(addr)[1:]
def changebase(inp, frm=256, to=58):
"""Implementation of base58 (*not* b58check) conversion
only. Used in message channel verifiable nick construction.
Explicitly disabling any other conversion for now.
"""
if not (frm==256 and to==58):
raise NotImplementedError
return ebt.base_encode(inp, 58)
def address_to_script(addr):
return etr.Transaction.pay_script(ebt.TYPE_ADDRESS, addr)
def script_to_address(script):
bin_script = binascii.unhexlify(script)
res = etr.get_address_from_output_script(bin_script)
if not res[0] == ebt.TYPE_ADDRESS:
raise ValueError("Invalid script for bitcoin address")
return res[1]
def bin_dbl_sha256(x):
return ebt.sha256(ebt.sha256(x))
def dbl_sha256(x):
return binascii.hexlify(bin_dbl_sha256(x))
def verify_tx_input(tx, i, script, sig, pub):
pub, sig, script = (binascii.unhexlify(x) for x in [pub, sig, script])
t = etr.Transaction(tx)
t.deserialize()
#to prepare for verification (to do the txhash for modtx)
#we need to have the "address" field set in the input.
typ, addr = etr.get_address_from_output_script(script)
if not typ == ebt.TYPE_ADDRESS:
#Don't support non-p2sh, non-p2pkh for now
log.debug("Invalid script")
return False
t.inputs()[i]["address"] = addr
txforsig = etr.Hash(t.tx_for_sig(i).decode('hex'))
ecdsa_pub = get_ecdsa_verifying_key(pub)
if not ecdsa_pub:
return False
try:
verified = ecdsa_pub.verify_digest(sig, txforsig,
sigdecode = sigdecode_der)
except BadSignatureError, BadDigestError:
return False
return True
def get_ecdsa_verifying_key(pub):
#some shenanigans required to validate a transaction sig; see
#python.ecdsa PR #54. This will be a lot simpler when that's merged.
#https://github.com/warner/python-ecdsa/pull/54/files
if not pub[0] in ["\x02", "\x03"]:
log.debug("Invalid pubkey")
return None
is_even = pub.startswith('\x02')
x = string_to_number(pub[1:])
order = SECP256k1.order
p = SECP256k1.curve.p()
alpha = (pow(x, 3, p) + (SECP256k1.curve.a() * x) + SECP256k1.curve.b()) % p
beta = square_root_mod_prime(alpha, p)
if is_even == bool(beta & 1):
y = p - beta
else:
y = beta
if not point_is_valid(SECP256k1.generator, x, y):
return None
point = Point(SECP256k1.curve, x, y, order)
return VerifyingKey.from_public_point(point, SECP256k1,
hashfunc=hashlib.sha256)
def ecdsa_verify(msg, sig, pub, usehex=True):
sig = base64.b64decode(sig)
if usehex:
pub = binascii.unhexlify(pub)
verif_key = get_ecdsa_verifying_key(pub)
return verif_key.verify_digest(sig,
ebt.Hash(ebt.msg_magic(msg)),
sigdecode = sigdecode_der)
def ecdsa_sign(msg, priv, usehex=True):
if usehex:
priv = binascii.unhexlify(priv)
compressed = False
if len(priv) == 33 and priv[-1]=="\x01":
compressed = True
signkey = ebt.EC_KEY(priv[:32])
private_key = ebt.MySigningKey.from_secret_exponent(signkey.secret,
curve=SECP256k1)
sig = private_key.sign_digest_deterministic(ebt.Hash(ebt.msg_magic(msg)),
hashfunc=hashlib.sha256,
sigencode = sigencode_der)
return base64.b64encode(sig)
def serialize(txobj):
#It is a rather chunky matter to re-use electrum.transaction code
#to do serialization, it has a very different approach. Hence some
#code duplication here with bitcoin-joinmarket. However we use the
#number encoding functions from Electrum. Also, this is always in hex.
o = []
o.append(ebt.int_to_hex(txobj["version"], 4))
o.append(ebt.var_int(len(txobj["ins"])))
for inp in txobj["ins"]:
binhash = binascii.unhexlify(inp["outpoint"]["hash"])
binhash = binhash[::-1]
o.append(binascii.hexlify(binhash))
o.append(ebt.int_to_hex(inp["outpoint"]["index"], 4))
o.append(ebt.var_int(len(inp["script"])/2) + inp["script"])
o.append(ebt.int_to_hex(inp["sequence"], 4))
o.append(ebt.var_int(len(txobj["outs"])))
for out in txobj["outs"]:
o.append(ebt.int_to_hex(out["value"], 8))
o.append(ebt.var_int(len(out["script"])/2) + out["script"])
o.append(ebt.int_to_hex(txobj["locktime"], 4))
return ''.join(o)
def deserialize_script(scriptSig):
#Assumes P2PKH scriptSig
d = {}
etr.parse_scriptSig(d, binascii.unhexlify(scriptSig))
return (d["signatures"][0], d["pubkeys"][0])
def deserialize(txhex):
t = etr.deserialize(txhex)
#translation from Electrum deserialization
#to pybitcointools form as used in joinmarket
#pybitcointools structure:
#obj = {"ins": [..], "outs": [..], "locktime": int}
#where ins elements are:
#{"outpoint": {"hash": bigendian32,"index": int},
#"script": hex,"sequence": int}
#and outs elements are:
#{"script": hex, "value": int}
#
#while electrum.transaction.deserialize returns object
#like:
#{"version": int, "inputs": [..], "outputs": [..], "lockTime": int}
obj = {}
obj["version"] = t["version"]
obj["locktime"] = t["lockTime"]
obj["ins"] = []
obj["outs"] = []
for i in t["inputs"]:
outpoint = {"hash": i["prevout_hash"], "index": i["prevout_n"]}
scr = i["scriptSig"]
sequence = i["sequence"]
obj["ins"].append({"outpoint": outpoint, "script": scr, "sequence": sequence})
for i in t["outputs"]:
obj["outs"].append({"script": i["scriptPubKey"], "value": i["value"]})
return obj
def privkey_to_pubkey(privkey, usehex=True):
if usehex:
privkey = binascii.unhexlify(privkey)
if len(privkey)==33 and privkey[-1] == "\x01":
compressed = True
privkey = privkey[:32]
elif len(privkey)==32:
compressed=False
else:
raise ValueError("Invalid private key")
sec = ebt.SecretToASecret(privkey, compressed=compressed,
addrtype=BTC_P2PK_VBYTE["mainnet"])
retval = ebt.public_key_from_private_key(sec)
if usehex:
return retval
return binascii.unhexlify(retval)
privtopub = privkey_to_pubkey
def privkey_to_address(privkey, magicbyte=0):
pubkey = privkey_to_pubkey(privkey)
return pubkey_to_address(pubkey, magicbyte)
privtoaddr = privkey_to_address
def pubkey_to_address(pub, magicbyte=0):
h160 = ebt.hash_160(pub.decode('hex'))
return ebt.hash_160_to_bc_address(h160, addrtype=magicbyte)
pubtoaddr = pubkey_to_address
def from_wif_privkey(privkey, vbyte=0):
#converts a WIF compressed privkey to a hex private key
return binascii.hexlify(ebt.ASecretToSecret(privkey, addrtype=vbyte))
def txhash(txhex):
t = etr.Transaction(txhex)
return t.hash()
#A simple copy-paste for now; move into support.py perhaps? TODO
def estimate_tx_size(ins, outs, txtype='p2pkh'):
'''Estimate transaction size.
Assuming p2pkh:
out: 8+1+3+2+20=34, in: 1+32+4+1+1+~73+1+1+33=147,
ver:4,seq:4, +2 (len in,out)
total ~= 34*len_out + 147*len_in + 10 (sig sizes vary slightly)
'''
if txtype == 'p2pkh':
return 10 + ins * 147 + 34 * outs
else:
raise NotImplementedError("Non p2pkh transaction size estimation not" +
"yet implemented")
def mktx(ins, outs):
#Copy-paste from bitcoin-joinmarket
txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []}
for i in ins:
if isinstance(i, dict) and "outpoint" in i:
txobj["ins"].append(i)
else:
if isinstance(i, dict) and "output" in i:
i = i["output"]
txobj["ins"].append({
"outpoint": {"hash": i[:64],
"index": int(i[65:])},
"script": "",
"sequence": 4294967295
})
for o in outs:
if not isinstance(o, dict):
addr = o[:o.find(':')]
val = int(o[o.find(':') + 1:])
o = {}
if re.match('^[0-9a-fA-F]*$', addr):
o["script"] = addr
else:
o["address"] = addr
o["value"] = val
outobj = {}
if "address" in o:
outobj["script"] = address_to_script(o["address"])
elif "script" in o:
outobj["script"] = o["script"]
else:
raise Exception("Could not find 'address' or 'script' in output.")
outobj["value"] = o["value"]
txobj["outs"].append(outobj)
return serialize(txobj)
def set_commitment_file(file_location):
global PODLE_COMMIT_FILE
PODLE_COMMIT_FILE = file_location
def test_btc():
#Sign and verify test (for message signing in joinmarket handshake)
print("Using interface " + interface)
priv = dbl_sha256("hello") + "01"
x = ecdsa_sign("helloxxx", priv)
log.debug("Got: " + x)
y = ecdsa_verify("helloxxx", x, privkey_to_pubkey(priv))
log.debug("Sig ver: " + str(y))
assert y
#address/script conversion test
test_addr = "1LT6rwv26bV7mgvRosoSCyGM7ttVRsYidP"
#Electrum has no support for testnet!
#test_test_addr = "mgvipZr8kX7fZFQU7QsKTCJT9QCfaiswV7"
assert script_to_address(address_to_script(test_addr))==test_addr
assert get_version_byte(test_addr)==0
#Transaction creation test.
raw_valid_tx = "01000000064cdfe43ad43b187b738644363144784a09bf6d408012409cf9934591109a789b060000006b483045022100d4309edbb8253e62fb59462f2ff5c3445923e0299bf1a15ac5f7db3da5752bee022066f3f219de7e6ee56c3d600da757ec1051cbd11b42969b8935ae35642b6a2e84012102e94b49525342110266a1dc7651221507318c4cb914ede004b3098650e9b951b6ffffffffc2a9b3e8285c2e7aaee2ea50f792172c920c43a675fa8e8d70976727c8752adf030000006a47304402202763d8ad9e41c99c5af587c69d267493773dc9567519a64db8b707af5daf07f0022011729c6d241ad5abe48687d084644bd442b5f9038db04fb28da674126183aca5012102d2cbeb9386fd201bc6eecf27b2858f7bc27462cd9b43ae464e9ef3281f97a3e0ffffffffa787e89792a93111ff08f5a083234c7c2410bd69b6eef42be0fc5f026a3a1cf0030000006b483045022100c3b86d7acadf1be3d8ea6706daedb842b09732621e830440481370d423703741022009fd0f90a07babd481f1011ec883b2aa248c6a4a433599c5b203c6b93fc03b67012103f9a47d3958281b6749921fdf6d9edde0176342c00ced7caacab9ab3a64795086ffffffff23fb90cebcb1784a7a4a0a35489356ba64cf95c0afdc5a0f0184dc22668ff41f050000006b483045022100ea698e5952e23ffdf6d58bdc73e91c555867e3ad99ac9b583f492882395ace9a0220705abe597972d45923fe0515695dd7b99dcfa50e69d49c03a8126180fd263bc70121036532aa886851548a5b62bff29b4c36bfdc33e68c7dbee8efb4b440e50c5ebc6effffffffd401de8afd8fd323ab6abd9db1d261ac69e7c1d2be7f1a40004e7659b7d6cd9b030000006b483045022100b09c4e7f227f2f86d1965edbc4c92b9058243300f3bc62a3169591aacb60ca4d0220390d0d7ae2ee7dab200e166337c65d4a62b576dc4fa138ce40efd240c57346fc0121034cd59665d736d927d9613c7624f8d616d483b06ab8993446f6119f18e22731feffffffff38b8b3ae5fe9ef09c9f1583c9d6cc128bbd2639d49aca97b7686a74ba91bb32a040000006a4730440220105d93aba953edf008cc5b16ac81c10d97db6e59a3e13062ceef7cc1fbffd2ad022027b14b4162d70c4448bec7cb086b4e52880b51b282de98019ec3038153e25ed0012102cdbfb52b3e164203845f72391a3a58205834a3ad473a9d9878488dc1594aa0d4ffffffff087edb0000000000001976a914a1e5f40c6171e91183533f16bbda35e45182bcfa88ac80d97800000000001976a91482985ea6f877d70692072af967af305005fc86fd88ac80d97800000000001976a914a698b206b9f654974afd2056c85c52f88e4c2b2488ac9970af05000000001976a914b05dbb0ede1191e2871209affd8a5922e0a3275288ac80d97800000000001976a914619b3b22b7b66220d22907b8600724aecc49f03488acabc80000000000001976a914911c8c57eb12aa2c1cdce92f82c7e0405a2f3c6988ac80d97800000000001976a91464cd0ed04862f2b7101e9394285d2b3066e5e4dc88ac13b14100000000001976a9143f81fa4fd890845882fbb5226539d9643c99f0f488ac00000000"
rvtxid = "4489a8cc933cb4e94915ead5b57b4aa707212c1f7b317187b500491e068c7887"
if interface == "joinmarket-electrum":
t = etr.Transaction(raw_valid_tx)
assert rvtxid == t.hash()
#Transaction deserialization/serialization test
#Electrum requires this call to fill out Transactionfields
t.deserialize()
#log.debug("Got inputs: " + str(t.inputs))
ourdeser = deserialize(t.raw)
ourraw = serialize(ourdeser)
#log.debug("Recreated: \n" + ourraw)
assert ourraw == raw_valid_tx
#double check round trip too
assert deserialize(ourraw) == ourdeser
txinslist = t.inputs()
elif interface == "joinmarket-joinmarket":
assert serialize(deserialize(raw_valid_tx)) == raw_valid_tx
t = deserialize(raw_valid_tx)
txinslist = t["ins"]
else:
raise NotImplementedError("No such interface?")
#Transaction signature verification tests.
#All currently assuming 100% p2pkh.
for i, tin in enumerate(txinslist):
if interface == "joinmarket-electrum":
script = address_to_script(tin["address"])
sig = tin["signatures"][0]
pub = tin["pubkeys"][0]
elif interface == "joinmarket-joinmarket":
log.debug("Joinmarket working with this script: " + tin["script"])
scriptSig = tin["script"]
#We need to parse out the pubkey, convert to address, then convert
#to a pubkeyscript; this assumes p2pkh. Note that this is handled
#internally by the joinmarket blockchain/maker/taker code, so only
#for tests.
pub = scriptSig[-66:]
script = address_to_script(pubkey_to_address(pub))
log.debug("Converted to this addr script: " + script)
#drop the length bytes from the start of sig and pub
sig = scriptSig[2:-68]
else:
raise NotImplementedError("No such interface?")
log.debug("Got sig, script, pub: " + " ".join([sig, script, pub]))
assert verify_tx_input(raw_valid_tx, i, script, sig, pub)
log.debug("Sig at: " + str(i) + " OK.")
#Note there are no transaction signing tests, as
#this is done by the wallet in this interface.
log.debug("All tests passed.")

230
client/client_protocol.py

@ -0,0 +1,230 @@
#! /usr/bin/env python
from __future__ import print_function
from twisted.python.log import startLogging, err
from twisted.internet import protocol, reactor
from twisted.protocols import amp
from twisted.internet.protocol import ClientFactory
from twisted.internet.endpoints import TCP4ClientEndpoint
import commands
from sys import stdout
import json
import random
import string
import time
import hashlib
import os
from joinmarketclient import (Taker, Wallet, jm_single, get_irc_mchannels,
load_program_config, get_log)
import btc
jlog = get_log()
class JMProtocolError(Exception):
pass
class JMTakerClientProtocol(amp.AMP):
def __init__(self, factory, taker, nick_priv=None):
self.taker = taker
self.factory = factory
self.orderbook = None
self.supported_messages = ["JM_UP", "JM_SETUP_DONE", "JM_FILL_RESPONSE",
"JM_OFFERS", "JM_SIG_RECEIVED",
"JM_REQUEST_MSGSIG",
"JM_REQUEST_MSGSIG_VERIFY", "JM_INIT_PROTO"]
if not nick_priv:
self.nick_priv = hashlib.sha256(os.urandom(16)).hexdigest() + '01'
else:
self.nick_priv = nick_priv
def checkClientResponse(self, response):
"""A generic check of client acceptance; any failure
is considered criticial.
"""
if 'accepted' not in response or not response['accepted']:
reactor.stop()
def connectionMade(self):
"""Upon confirmation of network connection
to daemon, request message channel initialization
with relevant config data for our message channels
"""
#needed only for channel naming convention
blockchain_source = jm_single().config.get("BLOCKCHAIN",
"blockchain_source")
network = jm_single().config.get("BLOCKCHAIN", "network")
irc_configs = get_irc_mchannels()
minmakers = jm_single().config.getint("POLICY", "minimum_makers")
maker_timeout_sec = jm_single().maker_timeout_sec
d = self.callRemote(commands.JMInit,
bcsource=blockchain_source,
network=network,
irc_configs=json.dumps(irc_configs),
minmakers=minmakers,
maker_timeout_sec=maker_timeout_sec)
d.addCallback(self.checkClientResponse)
def send_data(self, cmd, data):
JMProtocol.send_data(self, cmd, data)
def set_nick(self):
self.nick_pubkey = btc.privtopub(self.nick_priv)
self.nick_pkh_raw = hashlib.sha256(self.nick_pubkey).digest()[
:self.nick_hashlen]
self.nick_pkh = btc.changebase(self.nick_pkh_raw, 256, 58)
#right pad to maximum possible; b58 is not fixed length.
#Use 'O' as one of the 4 not included chars in base58.
self.nick_pkh += 'O' * (self.nick_maxencoded - len(self.nick_pkh))
#The constructed length will be 1 + 1 + NICK_MAX_ENCODED
self.nick = self.nick_header + str(self.jm_version) + self.nick_pkh
jm_single().nickname = self.nick
@commands.JMInitProto.responder
def on_JM_INIT_PROTO(self, nick_hash_length, nick_max_encoded,
joinmarket_nick_header, joinmarket_version):
"""Daemon indicates init-ed status and passes back protocol constants.
Use protocol settings to set actual nick from nick private key,
then call setup to instantiate message channel connections in the daemon.
"""
self.nick_hashlen = nick_hash_length
self.nick_maxencoded = nick_max_encoded
self.nick_header = joinmarket_nick_header
self.jm_version = joinmarket_version
self.set_nick()
d = self.callRemote(commands.JMStartMC,
nick=self.nick)
d.addCallback(self.checkClientResponse)
return {'accepted': True}
@commands.JMUp.responder
def on_JM_UP(self):
d = self.callRemote(commands.JMSetup,
role="TAKER",
n_counterparties=4) #TODO this number should be set
d.addCallback(self.checkClientResponse)
return {'accepted': True}
@commands.JMSetupDone.responder
def on_JM_SETUP_DONE(self):
jlog.info("JM daemon setup complete")
#The daemon is ready and has requested the orderbook
#from the pit; we can request the entire orderbook
#and filter it as we choose.
reactor.callLater(jm_single().maker_timeout_sec, self.get_offers)
return {'accepted': True}
@commands.JMFillResponse.responder
def on_JM_FILL_RESPONSE(self, success, ioauth_data):
"""Receives the entire set of phase 1 data (principally utxos)
from the counterparties and passes through to the Taker for
tx construction, if successful. Then passes back the phase 2
initiating data to the daemon.
"""
ioauth_data = json.loads(ioauth_data)
if not success:
jlog.info("Makers didnt respond blah blah")
return {'accepted': True}
else:
jlog.info("Makers responded with: " + json.dumps(ioauth_data))
retval = self.taker.receive_utxos(ioauth_data)
if not retval[0]:
jlog.info("Taker is not continuing, phase 2 abandoned.")
jlog.info("Reason: " + str(retval[1]))
return {'accepted': False}
else:
nick_list, txhex = retval[1:]
reactor.callLater(0, self.make_tx, nick_list, txhex)
return {'accepted': True}
@commands.JMOffers.responder
def on_JM_OFFERS(self, orderbook):
self.orderbook = json.loads(orderbook)
jlog.info("Got the orderbook: " + str(self.orderbook))
retval = self.taker.initialize(self.orderbook)
#format of retval is:
#True, self.cjamount, commitment, revelation, self.filtered_orderbook)
if not retval[0]:
jlog.info("Taker not continuing after receipt of orderbook")
return
amt, cmt, rev, foffers = retval[1:]
d = self.callRemote(commands.JMFill,
amount=amt,
commitment=str(cmt),
revelation=str(rev),
filled_offers=json.dumps(foffers))
d.addCallback(self.checkClientResponse)
return {'accepted': True}
@commands.JMSigReceived.responder
def on_JM_SIG_RECEIVED(self, nick, sig):
retval = self.taker.on_sig(nick, sig)
if retval:
#flag indicating completion; but Taker
#handles tx pushing, just update state
self.state = 4
return {'accepted': True}
@commands.JMRequestMsgSig.responder
def on_JM_REQUEST_MSGSIG(self, nick, cmd, msg, msg_to_be_signed, hostid):
sig = btc.ecdsa_sign(str(msg_to_be_signed), self.nick_priv)
msg_to_return = str(msg) + " " + self.nick_pubkey + " " + sig
d = self.callRemote(commands.JMMsgSignature,
nick=nick,
cmd=cmd,
msg_to_return=msg_to_return,
hostid=hostid)
d.addCallback(self.checkClientResponse)
return {'accepted': True}
@commands.JMRequestMsgSigVerify.responder
def on_JM_REQUEST_MSGSIG_VERIFY(self, msg, fullmsg, sig, pubkey, nick,
hashlen, max_encoded, hostid):
jlog.info("Got a request to verify a signature")
verif_result = True
if not btc.ecdsa_verify(str(msg), sig, pubkey):
jlog.debug("nick signature verification failed, ignoring.")
verif_result = False
#check that nick matches hash of pubkey
nick_pkh_raw = hashlib.sha256(pubkey).digest()[:hashlen]
nick_stripped = nick[2:2 + max_encoded]
#strip right padding
nick_unpadded = ''.join([x for x in nick_stripped if x != 'O'])
if not nick_unpadded == btc.changebase(nick_pkh_raw, 256, 58):
jlog.debug("Nick hash check failed, expected: " + str(nick_unpadded)
+ ", got: " + str(btc.changebase(nick_pkh_raw, 256, 58)))
verif_result = False
jlog.info("Sending a verifcation result: " + str(verif_result))
d = self.callRemote(commands.JMMsgSignatureVerify,
verif_result=verif_result,
nick=nick,
fullmsg=fullmsg,
hostid=hostid)
d.addCallback(self.checkClientResponse)
return {'accepted': True}
def get_offers(self):
d = self.callRemote(commands.JMRequestOffers)
d.addCallback(self.checkClientResponse)
def make_tx(self, nick_list, txhex):
d = self.callRemote(commands.JMMakeTx,
nick_list= json.dumps(nick_list),
txhex=txhex)
d.addCallback(self.checkClientResponse)
class JMTakerClientProtocolFactory(protocol.ClientFactory):
protocol = JMTakerClientProtocol
def __init__(self, taker):
self.taker = taker
def buildProtocol(self, addr):
return JMTakerClientProtocol(self, self.taker)
def start_reactor(host, port, factory, ish=True):
reactor.connectTCP(host, port, factory)
reactor.run(installSignalHandlers=ish)

92
client/commands.py

@ -0,0 +1,92 @@
from twisted.protocols.amp import Integer, String, Unicode, Boolean, Command
class DaemonNotReady(Exception):
pass
class JMCommand(Command):
#a default response type
response = [('accepted', Boolean())]
#commands from client to daemon
class JMInit(JMCommand):
arguments = [('bcsource', String()),
('network', String()),
('irc_configs', String()),
('minmakers', Integer()),
('maker_timeout_sec', Integer())]
errors = {DaemonNotReady: 'daemon is not ready'}
class JMStartMC(JMCommand):
arguments = [('nick', String())]
class JMSetup(JMCommand):
arguments = [('role', String()),
('n_counterparties', Integer())]
class JMRequestOffers(JMCommand):
arguments = []
class JMFill(JMCommand):
arguments = [('amount', Integer()),
('commitment', String()),
('revelation', String()),
('filled_offers', String())]
class JMMakeTx(JMCommand):
arguments = [('nick_list', String()),
('txhex', String())]
class JMMsgSignature(JMCommand):
arguments = [('nick', String()),
('cmd', String()),
('msg_to_return', String()),
('hostid', String())]
class JMMsgSignatureVerify(JMCommand):
arguments = [('verif_result', Boolean()),
('nick', String()),
('fullmsg', String()),
('hostid', String())]
#commands from daemon to client
class JMInitProto(JMCommand):
arguments = [('nick_hash_length', Integer()),
('nick_max_encoded', Integer()),
('joinmarket_nick_header', String()),
('joinmarket_version', Integer())]
class JMUp(JMCommand):
arguments = []
class JMSetupDone(JMCommand):
arguments = []
class JMOffers(JMCommand):
arguments = [('orderbook', String())]
class JMFillResponse(JMCommand):
arguments = [('success', Boolean()),
('ioauth_data', String())]
class JMSigReceived(JMCommand):
arguments = [('nick', String()),
('sig', String())]
class JMRequestMsgSig(JMCommand):
arguments = [('nick', String()),
('cmd', String()),
('msg', String()),
('msg_to_be_signed', String()),
('hostid', String())]
class JMRequestMsgSigVerify(JMCommand):
arguments = [('msg', String()),
('fullmsg', String()),
('sig', String()),
('pubkey', String()),
('nick', String()),
('hashlen', Integer()),
('max_encoded', Integer()),
('hostid', String())]

376
client/configure.py

@ -0,0 +1,376 @@
from __future__ import print_function
import io
import logging
import threading
import os
import binascii
import sys
from ConfigParser import SafeConfigParser, NoOptionError
import btc
from joinmarketclient.jsonrpc import JsonRpc
from joinmarketclient.support import get_log, joinmarket_alert, core_alert, debug_silence
from joinmarketclient.podle import set_commitment_file
log = get_log()
class AttributeDict(object):
"""
A class to convert a nested Dictionary into an object with key-values
accessibly using attribute notation (AttributeDict.attribute) instead of
key notation (Dict["key"]). This class recursively sets Dicts to objects,
allowing you to recurse down nested dicts (like: AttributeDict.attr.attr)
"""
def __init__(self, **entries):
self.add_entries(**entries)
def add_entries(self, **entries):
for key, value in entries.items():
if type(value) is dict:
self.__dict__[key] = AttributeDict(**value)
else:
self.__dict__[key] = value
def __setattr__(self, name, value):
if name == 'nickname' and value:
logFormatter = logging.Formatter(
('%(asctime)s [%(threadName)-12.12s] '
'[%(levelname)-5.5s] %(message)s'))
logsdir = os.path.join(os.path.dirname(
global_singleton.config_location), "logs")
fileHandler = logging.FileHandler(
logsdir + '/{}.log'.format(value))
fileHandler.setFormatter(logFormatter)
log.addHandler(fileHandler)
super(AttributeDict, self).__setattr__(name, value)
def __getitem__(self, key):
"""
Provides dict-style access to attributes
"""
return getattr(self, key)
global_singleton = AttributeDict()
global_singleton.JM_VERSION = 5
global_singleton.nickname = None
global_singleton.BITCOIN_DUST_THRESHOLD = 2730
global_singleton.DUST_THRESHOLD = 10 * global_singleton.BITCOIN_DUST_THRESHOLD
global_singleton.bc_interface = None
global_singleton.maker_timeout_sec = 60
global_singleton.debug_file_lock = threading.Lock()
global_singleton.ordername_list = ["reloffer", "absoffer"]
global_singleton.debug_file_handle = None
global_singleton.blacklist_file_lock = threading.Lock()
global_singleton.core_alert = core_alert
global_singleton.joinmarket_alert = joinmarket_alert
global_singleton.debug_silence = debug_silence
global_singleton.config = SafeConfigParser()
#This is reset to a full path after load_program_config call
global_singleton.config_location = 'joinmarket.cfg'
#as above
global_singleton.commit_file_location = 'cmttools/commitments.json'
global_singleton.wait_for_commitments = 0
def jm_single():
return global_singleton
# FIXME: Add rpc_* options here in the future!
required_options = {'BLOCKCHAIN': ['blockchain_source', 'network'],
'MESSAGING': ['host', 'channel', 'port'],
'POLICY': ['absurd_fee_per_kb', 'taker_utxo_retries',
'taker_utxo_age', 'taker_utxo_amtpercent']}
defaultconfig = \
"""
[BLOCKCHAIN]
blockchain_source = blockr
#options: blockr, bitcoin-rpc, regtest
# for instructions on bitcoin-rpc read
# https://github.com/chris-belcher/joinmarket/wiki/Running-JoinMarket-with-Bitcoin-Core-full-node
network = mainnet
rpc_host = localhost
rpc_port = 8332
rpc_user = bitcoin
rpc_password = password
[MESSAGING]
host = irc.cyberguerrilla.org
channel = joinmarket-pit
port = 6697
usessl = true
socks5 = false
socks5_host = localhost
socks5_port = 9050
#for tor
#host = 6dvj6v5imhny3anf.onion
#onion / i2p have their own ports on CGAN
#port = 6698
#usessl = true
#socks5 = true
[TIMEOUT]
maker_timeout_sec = 30
unconfirm_timeout_sec = 90
confirm_timeout_hours = 6
[POLICY]
# for dust sweeping, try merge_algorithm = gradual
# for more rapid dust sweeping, try merge_algorithm = greedy
# for most rapid dust sweeping, try merge_algorithm = greediest
# but don't forget to bump your miner fees!
merge_algorithm = default
# the fee estimate is based on a projection of how many satoshis
# per kB are needed to get in one of the next N blocks, N set here
# as the value of 'tx_fees'. This estimate is high if you set N=1,
# so we choose N=3 for a more reasonable figure,
# as our default. Note that for clients not using a local blockchain
# instance, we retrieve an estimate from the API at blockcypher.com, currently.
tx_fees = 3
# For users getting transaction fee estimates over an API
# (currently blockcypher, could be others), place a sanity
# check limit on the satoshis-per-kB to be paid. This limit
# is also applied to users using Core, even though Core has its
# own sanity check limit, which is currently 1,000,000 satoshis.
absurd_fee_per_kb = 150000
# the range of confirmations passed to the `listunspent` bitcoind RPC call
# 1st value is the inclusive minimum, defaults to one confirmation
# 2nd value is the exclusive maximum, defaults to most-positive-bignum (Google Me!)
# leaving it unset or empty defers to bitcoind's default values, ie [1, 9999999]
#listunspent_args = []
# that's what you should do, unless you have a specific reason, eg:
# !!! WARNING !!! CONFIGURING THIS WHILE TAKING LIQUIDITY FROM
# !!! WARNING !!! THE PUBLIC ORDERBOOK LEAKS YOUR INPUT MERGES
# spend from unconfirmed transactions: listunspent_args = [0]
# display only unconfirmed transactions: listunspent_args = [0, 1]
# defend against small reorganizations: listunspent_args = [3]
# who is at risk of reorganization?: listunspent_args = [0, 2]
# NB: using 0 for the 1st value with scripts other than wallet-tool could cause
# spends from unconfirmed inputs, which may then get malleated or double-spent!
# other counterparties are likely to reject unconfirmed inputs... don't do it.
#options: self, random-peer, not-self, random-maker
# self = broadcast transaction with your own ip
# random-peer = everyone who took part in the coinjoin has a chance of broadcasting
# not-self = never broadcast with your own ip
# random-maker = every peer on joinmarket has a chance of broadcasting, including yourself
tx_broadcast = self
minimum_makers = 2
#THE FOLLOWING SETTINGS ARE REQUIRED TO DEFEND AGAINST SNOOPERS.
#DON'T ALTER THEM UNLESS YOU UNDERSTAND THE IMPLICATIONS.
# number of retries allowed for a specific utxo, to prevent DOS/snooping.
# Lower settings make snooping more expensive, but also prevent honest users
# from retrying if an error occurs.
taker_utxo_retries = 3
# number of confirmations required for the commitment utxo mentioned above.
# this effectively rate-limits a snooper.
taker_utxo_age = 5
# percentage of coinjoin amount that the commitment utxo must have
# as a minimum BTC amount. Thus 20 means a 1BTC coinjoin requires the
# utxo to be at least 0.2 btc.
taker_utxo_amtpercent = 20
#Set to 1 to accept broadcast PoDLE commitments from other bots, and
#add them to your blacklist (only relevant for Makers).
#There is no way to spoof these values, so the only "risk" is that
#someone fills your blacklist file with a lot of data.
accept_commitment_broadcasts = 1
#Location of your commitments.json file (stores commitments you've used
#and those you want to use in future), relative to root joinmarket directory.
commit_file_location = cmttools/commitments.json
"""
def get_irc_mchannels():
fields = [("host", str), ("port", int), ("channel", str), ("usessl", str),
("socks5", str), ("socks5_host", str), ("socks5_port", str)]
configdata = {}
for f, t in fields:
vals = jm_single().config.get("MESSAGING", f).split(",")
if t == str:
vals = [x.strip() for x in vals]
else:
vals = [t(x) for x in vals]
configdata[f] = vals
configs = []
for i in range(len(configdata['host'])):
newconfig = dict([(x, configdata[x][i]) for x in configdata])
newconfig['btcnet'] = get_network()
configs.append(newconfig)
return configs
def get_config_irc_channel(channel_name):
channel = "#" + channel_name
if get_network() == 'testnet':
channel += '-test'
return channel
def get_network():
"""Returns network name"""
return global_singleton.config.get("BLOCKCHAIN", "network")
def get_p2sh_vbyte():
return btc.BTC_P2SH_VBYTE[get_network()]
def get_p2pk_vbyte():
return btc.BTC_P2PK_VBYTE[get_network()]
def validate_address(addr):
try:
ver = btc.get_version_byte(addr)
except AssertionError:
return False, 'Checksum wrong. Typo in address?'
except Exception:
return False, "Invalid bitcoin address"
if ver != get_p2pk_vbyte() and ver != get_p2sh_vbyte():
return False, 'Wrong address version. Testnet/mainnet confused?'
if len(btc.b58check_to_bin(addr)) != 20:
return False, "Address has correct checksum but wrong length."
return True, 'address validated'
def donation_address(reusable_donation_pubkey=None):
if not reusable_donation_pubkey:
reusable_donation_pubkey = ('02be838257fbfddabaea03afbb9f16e852'
'9dfe2de921260a5c46036d97b5eacf2a')
sign_k = binascii.hexlify(os.urandom(32))
c = btc.sha256(btc.multiply(sign_k, reusable_donation_pubkey, True))
sender_pubkey = btc.add_pubkeys(
[reusable_donation_pubkey, btc.privtopub(c + '01', True)], True)
sender_address = btc.pubtoaddr(sender_pubkey, get_p2pk_vbyte())
log.debug('sending coins to ' + sender_address)
return sender_address, sign_k
def check_utxo_blacklist(commitment, persist=False):
"""Compare a given commitment (H(P2) for PoDLE)
with the persisted blacklist log file;
if it has been used before, return False (disallowed),
else return True.
If flagged, persist the usage of this commitment to the blacklist file.
"""
#TODO format error checking?
fname = "blacklist"
if jm_single().config.get("BLOCKCHAIN", "blockchain_source") == 'regtest':
fname += "_" + jm_single().nickname
with jm_single().blacklist_file_lock:
if os.path.isfile(fname):
with open(fname, "rb") as f:
blacklisted_commitments = [x.strip() for x in f.readlines()]
else:
blacklisted_commitments = []
if commitment in blacklisted_commitments:
return False
elif persist:
blacklisted_commitments += [commitment]
with open(fname, "wb") as f:
f.write('\n'.join(blacklisted_commitments))
f.flush()
#If the commitment is new and we are *not* persisting, nothing to do
#(we only add it to the list on sending io_auth, which represents actual
#usage).
return True
def load_program_config(config_path=None, bs=None):
global_singleton.config.readfp(io.BytesIO(defaultconfig))
if not config_path:
config_path = os.getcwd()
global_singleton.config_location = os.path.join(
config_path, global_singleton.config_location)
loadedFiles = global_singleton.config.read([global_singleton.config_location
])
#Hack required for electrum; must be able to enforce a different
#blockchain interface even in default/new load.
if bs:
global_singleton.config.set("BLOCKCHAIN", "blockchain_source", bs)
# Create default config file if not found
if len(loadedFiles) != 1:
with open(global_singleton.config_location, "w") as configfile:
configfile.write(defaultconfig)
# check for sections
for s in required_options:
if s not in global_singleton.config.sections():
raise Exception(
"Config file does not contain the required section: " + s)
# then check for specific options
for k, v in required_options.iteritems():
for o in v:
if o not in global_singleton.config.options(k):
raise Exception(
"Config file does not contain the required option: " + o)
try:
global_singleton.maker_timeout_sec = global_singleton.config.getint(
'TIMEOUT', 'maker_timeout_sec')
except NoOptionError:
log.debug('TIMEOUT/maker_timeout_sec not found in .cfg file, '
'using default value')
# configure the interface to the blockchain on startup
global_singleton.bc_interface = get_blockchain_interface_instance(
global_singleton.config)
#set the location of the commitments file
try:
global_singleton.commit_file_location = global_singleton.config.get(
"POLICY", "commit_file_location")
except NoOptionError:
log.debug("No commitment file location in config, using default "
"location cmttools/commitments.json")
set_commitment_file(os.path.join(config_path,
global_singleton.commit_file_location))
def get_blockchain_interface_instance(_config):
# todo: refactor joinmarket module to get rid of loops
# importing here is necessary to avoid import loops
from joinmarketclient.blockchaininterface import BitcoinCoreInterface, \
RegtestBitcoinCoreInterface, BlockrInterface, ElectrumWalletInterface
from joinmarketclient.blockchaininterface import CliJsonRpc
source = _config.get("BLOCKCHAIN", "blockchain_source")
network = get_network()
testnet = network == 'testnet'
if source == 'bitcoin-rpc':
rpc_host = _config.get("BLOCKCHAIN", "rpc_host")
rpc_port = _config.get("BLOCKCHAIN", "rpc_port")
rpc_user = _config.get("BLOCKCHAIN", "rpc_user")
rpc_password = _config.get("BLOCKCHAIN", "rpc_password")
rpc = JsonRpc(rpc_host, rpc_port, rpc_user, rpc_password)
bc_interface = BitcoinCoreInterface(rpc, network)
elif source == 'json-rpc':
bitcoin_cli_cmd = _config.get("BLOCKCHAIN",
"bitcoin_cli_cmd").split(' ')
rpc = CliJsonRpc(bitcoin_cli_cmd, testnet)
bc_interface = BitcoinCoreInterface(rpc, network)
elif source == 'regtest':
rpc_host = _config.get("BLOCKCHAIN", "rpc_host")
rpc_port = _config.get("BLOCKCHAIN", "rpc_port")
rpc_user = _config.get("BLOCKCHAIN", "rpc_user")
rpc_password = _config.get("BLOCKCHAIN", "rpc_password")
rpc = JsonRpc(rpc_host, rpc_port, rpc_user, rpc_password)
bc_interface = RegtestBitcoinCoreInterface(rpc)
elif source == 'blockr':
bc_interface = BlockrInterface(testnet)
elif source == 'electrum':
bc_interface = ElectrumWalletInterface(testnet)
else:
raise ValueError("Invalid blockchain source")
return bc_interface

119
client/jsonrpc.py

@ -0,0 +1,119 @@
from __future__ import absolute_import, print_function
# Copyright (C) 2013,2015 by Daniel Kraft <d@domob.eu>
# Copyright (C) 2014 by phelix / blockchained.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import base64
import httplib
import json
class JsonRpcError(Exception):
"""
The called method returned an error in the JSON-RPC response.
"""
def __init__(self, obj):
self.code = obj["code"]
self.message = obj["message"]
class JsonRpcConnectionError(Exception):
"""
Error thrown when the RPC connection itself failed. This means
that the server is either down or the connection settings
are wrong.
"""
pass
class JsonRpc(object):
"""
Simple implementation of a JSON-RPC client that is used
to connect to Bitcoin.
"""
def __init__(self, host, port, user, password):
self.host = host
self.port = port
self.authstr = "%s:%s" % (user, password)
self.queryId = 1
def queryHTTP(self, obj):
"""
Send an appropriate HTTP query to the server. The JSON-RPC
request should be (as object) in 'obj'. If the call succeeds,
the resulting JSON object is returned. In case of an error
with the connection (not JSON-RPC itself), an exception is raised.
"""
headers = {"User-Agent": "joinmarket",
"Content-Type": "application/json",
"Accept": "application/json"}
headers["Authorization"] = "Basic %s" % base64.b64encode(self.authstr)
body = json.dumps(obj)
try:
conn = httplib.HTTPConnection(self.host, self.port)
conn.request("POST", "", body, headers)
response = conn.getresponse()
if response.status == 401:
conn.close()
raise JsonRpcConnectionError(
"authentication for JSON-RPC failed")
# All of the codes below are 'fine' from a JSON-RPC point of view.
if response.status not in [200, 404, 500]:
conn.close()
raise JsonRpcConnectionError("unknown error in JSON-RPC")
data = response.read()
conn.close()
return json.loads(data)
except JsonRpcConnectionError as exc:
raise exc
except Exception as exc:
raise JsonRpcConnectionError("JSON-RPC connection failed. Err:" +
repr(exc))
def call(self, method, params):
"""
Call a method over JSON-RPC.
"""
currentId = self.queryId
self.queryId += 1
request = {"method": method, "params": params, "id": currentId}
response = self.queryHTTP(request)
if response["id"] != currentId:
raise JsonRpcConnectionError("invalid id returned by query")
if response["error"] is not None:
raise JsonRpcError(response["error"])
return response["result"]

267
client/old_mnemonic.py

@ -0,0 +1,267 @@
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# list of words from http://en.wiktionary.org/wiki/Wiktionary:Frequency_lists/Contemporary_poetry
words = [
"like", "just", "love", "know", "never", "want", "time", "out", "there",
"make", "look", "eye", "down", "only", "think", "heart", "back", "then",
"into", "about", "more", "away", "still", "them", "take", "thing", "even",
"through", "long", "always", "world", "too", "friend", "tell", "try",
"hand", "thought", "over", "here", "other", "need", "smile", "again",
"much", "cry", "been", "night", "ever", "little", "said", "end", "some",
"those", "around", "mind", "people", "girl", "leave", "dream", "left",
"turn", "myself", "give", "nothing", "really", "off", "before", "something",
"find", "walk", "wish", "good", "once", "place", "ask", "stop", "keep",
"watch", "seem", "everything", "wait", "got", "yet", "made", "remember",
"start", "alone", "run", "hope", "maybe", "believe", "body", "hate",
"after", "close", "talk", "stand", "own", "each", "hurt", "help", "home",
"god", "soul", "new", "many", "two", "inside", "should", "true", "first",
"fear", "mean", "better", "play", "another", "gone", "change", "use",
"wonder", "someone", "hair", "cold", "open", "best", "any", "behind",
"happen", "water", "dark", "laugh", "stay", "forever", "name", "work",
"show", "sky", "break", "came", "deep", "door", "put", "black", "together",
"upon", "happy", "such", "great", "white", "matter", "fill", "past",
"please", "burn", "cause", "enough", "touch", "moment", "soon", "voice",
"scream", "anything", "stare", "sound", "red", "everyone", "hide", "kiss",
"truth", "death", "beautiful", "mine", "blood", "broken", "very", "pass",
"next", "forget", "tree", "wrong", "air", "mother", "understand", "lip",
"hit", "wall", "memory", "sleep", "free", "high", "realize", "school",
"might", "skin", "sweet", "perfect", "blue", "kill", "breath", "dance",
"against", "fly", "between", "grow", "strong", "under", "listen", "bring",
"sometimes", "speak", "pull", "person", "become", "family", "begin",
"ground", "real", "small", "father", "sure", "feet", "rest", "young",
"finally", "land", "across", "today", "different", "guy", "line", "fire",
"reason", "reach", "second", "slowly", "write", "eat", "smell", "mouth",
"step", "learn", "three", "floor", "promise", "breathe", "darkness", "push",
"earth", "guess", "save", "song", "above", "along", "both", "color",
"house", "almost", "sorry", "anymore", "brother", "okay", "dear", "game",
"fade", "already", "apart", "warm", "beauty", "heard", "notice", "question",
"shine", "began", "piece", "whole", "shadow", "secret", "street", "within",
"finger", "point", "morning", "whisper", "child", "moon", "green", "story",
"glass", "kid", "silence", "since", "soft", "yourself", "empty", "shall",
"angel", "answer", "baby", "bright", "dad", "path", "worry", "hour", "drop",
"follow", "power", "war", "half", "flow", "heaven", "act", "chance", "fact",
"least", "tired", "children", "near", "quite", "afraid", "rise", "sea",
"taste", "window", "cover", "nice", "trust", "lot", "sad", "cool", "force",
"peace", "return", "blind", "easy", "ready", "roll", "rose", "drive",
"held", "music", "beneath", "hang", "mom", "paint", "emotion", "quiet",
"clear", "cloud", "few", "pretty", "bird", "outside", "paper", "picture",
"front", "rock", "simple", "anyone", "meant", "reality", "road", "sense",
"waste", "bit", "leaf", "thank", "happiness", "meet", "men", "smoke",
"truly", "decide", "self", "age", "book", "form", "alive", "carry",
"escape", "damn", "instead", "able", "ice", "minute", "throw", "catch",
"leg", "ring", "course", "goodbye", "lead", "poem", "sick", "corner",
"desire", "known", "problem", "remind", "shoulder", "suppose", "toward",
"wave", "drink", "jump", "woman", "pretend", "sister", "week", "human",
"joy", "crack", "grey", "pray", "surprise", "dry", "knee", "less", "search",
"bleed", "caught", "clean", "embrace", "future", "king", "son", "sorrow",
"chest", "hug", "remain", "sat", "worth", "blow", "daddy", "final",
"parent", "tight", "also", "create", "lonely", "safe", "cross", "dress",
"evil", "silent", "bone", "fate", "perhaps", "anger", "class", "scar",
"snow", "tiny", "tonight", "continue", "control", "dog", "edge", "mirror",
"month", "suddenly", "comfort", "given", "loud", "quickly", "gaze", "plan",
"rush", "stone", "town", "battle", "ignore", "spirit", "stood", "stupid",
"yours", "brown", "build", "dust", "hey", "kept", "pay", "phone", "twist",
"although", "ball", "beyond", "hidden", "nose", "taken", "fail", "float",
"pure", "somehow", "wash", "wrap", "angry", "cheek", "creature",
"forgotten", "heat", "rip", "single", "space", "special", "weak",
"whatever", "yell", "anyway", "blame", "job", "choose", "country", "curse",
"drift", "echo", "figure", "grew", "laughter", "neck", "suffer", "worse",
"yeah", "disappear", "foot", "forward", "knife", "mess", "somewhere",
"stomach", "storm", "beg", "idea", "lift", "offer", "breeze", "field",
"five", "often", "simply", "stuck", "win", "allow", "confuse", "enjoy",
"except", "flower", "seek", "strength", "calm", "grin", "gun", "heavy",
"hill", "large", "ocean", "shoe", "sigh", "straight", "summer", "tongue",
"accept", "crazy", "everyday", "exist", "grass", "mistake", "sent", "shut",
"surround", "table", "ache", "brain", "destroy", "heal", "nature", "shout",
"sign", "stain", "choice", "doubt", "glance", "glow", "mountain", "queen",
"stranger", "throat", "tomorrow", "city", "either", "fish", "flame",
"rather", "shape", "spin", "spread", "ash", "distance", "finish", "image",
"imagine", "important", "nobody", "shatter", "warmth", "became", "feed",
"flesh", "funny", "lust", "shirt", "trouble", "yellow", "attention", "bare",
"bite", "money", "protect", "amaze", "appear", "born", "choke",
"completely", "daughter", "fresh", "friendship", "gentle", "probably",
"six", "deserve", "expect", "grab", "middle", "nightmare", "river",
"thousand", "weight", "worst", "wound", "barely", "bottle", "cream",
"regret", "relationship", "stick", "test", "crush", "endless", "fault",
"itself", "rule", "spill", "art", "circle", "join", "kick", "mask",
"master", "passion", "quick", "raise", "smooth", "unless", "wander",
"actually", "broke", "chair", "deal", "favorite", "gift", "note", "number",
"sweat", "box", "chill", "clothes", "lady", "mark", "park", "poor",
"sadness", "tie", "animal", "belong", "brush", "consume", "dawn", "forest",
"innocent", "pen", "pride", "stream", "thick", "clay", "complete", "count",
"draw", "faith", "press", "silver", "struggle", "surface", "taught",
"teach", "wet", "bless", "chase", "climb", "enter", "letter", "melt",
"metal", "movie", "stretch", "swing", "vision", "wife", "beside", "crash",
"forgot", "guide", "haunt", "joke", "knock", "plant", "pour", "prove",
"reveal", "steal", "stuff", "trip", "wood", "wrist", "bother", "bottom",
"crawl", "crowd", "fix", "forgive", "frown", "grace", "loose", "lucky",
"party", "release", "surely", "survive", "teacher", "gently", "grip",
"speed", "suicide", "travel", "treat", "vein", "written", "cage", "chain",
"conversation", "date", "enemy", "however", "interest", "million", "page",
"pink", "proud", "sway", "themselves", "winter", "church", "cruel", "cup",
"demon", "experience", "freedom", "pair", "pop", "purpose", "respect",
"shoot", "softly", "state", "strange", "bar", "birth", "curl", "dirt",
"excuse", "lord", "lovely", "monster", "order", "pack", "pants", "pool",
"scene", "seven", "shame", "slide", "ugly", "among", "blade", "blonde",
"closet", "creek", "deny", "drug", "eternity", "gain", "grade", "handle",
"key", "linger", "pale", "prepare", "swallow", "swim", "tremble", "wheel",
"won", "cast", "cigarette", "claim", "college", "direction", "dirty",
"gather", "ghost", "hundred", "loss", "lung", "orange", "present", "swear",
"swirl", "twice", "wild", "bitter", "blanket", "doctor", "everywhere",
"flash", "grown", "knowledge", "numb", "pressure", "radio", "repeat",
"ruin", "spend", "unknown", "buy", "clock", "devil", "early", "false",
"fantasy", "pound", "precious", "refuse", "sheet", "teeth", "welcome",
"add", "ahead", "block", "bury", "caress", "content", "depth", "despite",
"distant", "marry", "purple", "threw", "whenever", "bomb", "dull", "easily",
"grasp", "hospital", "innocence", "normal", "receive", "reply", "rhyme",
"shade", "someday", "sword", "toe", "visit", "asleep", "bought", "center",
"consider", "flat", "hero", "history", "ink", "insane", "muscle", "mystery",
"pocket", "reflection", "shove", "silently", "smart", "soldier", "spot",
"stress", "train", "type", "view", "whether", "bus", "energy", "explain",
"holy", "hunger", "inch", "magic", "mix", "noise", "nowhere", "prayer",
"presence", "shock", "snap", "spider", "study", "thunder", "trail", "admit",
"agree", "bag", "bang", "bound", "butterfly", "cute", "exactly", "explode",
"familiar", "fold", "further", "pierce", "reflect", "scent", "selfish",
"sharp", "sink", "spring", "stumble", "universe", "weep", "women",
"wonderful", "action", "ancient", "attempt", "avoid", "birthday", "branch",
"chocolate", "core", "depress", "drunk", "especially", "focus", "fruit",
"honest", "match", "palm", "perfectly", "pillow", "pity", "poison", "roar",
"shift", "slightly", "thump", "truck", "tune", "twenty", "unable", "wipe",
"wrote", "coat", "constant", "dinner", "drove", "egg", "eternal", "flight",
"flood", "frame", "freak", "gasp", "glad", "hollow", "motion", "peer",
"plastic", "root", "screen", "season", "sting", "strike", "team", "unlike",
"victim", "volume", "warn", "weird", "attack", "await", "awake", "built",
"charm", "crave", "despair", "fought", "grant", "grief", "horse", "limit",
"message", "ripple", "sanity", "scatter", "serve", "split", "string",
"trick", "annoy", "blur", "boat", "brave", "clearly", "cling", "connect",
"fist", "forth", "imagination", "iron", "jock", "judge", "lesson", "milk",
"misery", "nail", "naked", "ourselves", "poet", "possible", "princess",
"sail", "size", "snake", "society", "stroke", "torture", "toss", "trace",
"wise", "bloom", "bullet", "cell", "check", "cost", "darling", "during",
"footstep", "fragile", "hallway", "hardly", "horizon", "invisible",
"journey", "midnight", "mud", "nod", "pause", "relax", "shiver", "sudden",
"value", "youth", "abuse", "admire", "blink", "breast", "bruise",
"constantly", "couple", "creep", "curve", "difference", "dumb", "emptiness",
"gotta", "honor", "plain", "planet", "recall", "rub", "ship", "slam",
"soar", "somebody", "tightly", "weather", "adore", "approach", "bond",
"bread", "burst", "candle", "coffee", "cousin", "crime", "desert",
"flutter", "frozen", "grand", "heel", "hello", "language", "level",
"movement", "pleasure", "powerful", "random", "rhythm", "settle", "silly",
"slap", "sort", "spoken", "steel", "threaten", "tumble", "upset", "aside",
"awkward", "bee", "blank", "board", "button", "card", "carefully",
"complain", "crap", "deeply", "discover", "drag", "dread", "effort",
"entire", "fairy", "giant", "gotten", "greet", "illusion", "jeans", "leap",
"liquid", "march", "mend", "nervous", "nine", "replace", "rope", "spine",
"stole", "terror", "accident", "apple", "balance", "boom", "childhood",
"collect", "demand", "depression", "eventually", "faint", "glare", "goal",
"group", "honey", "kitchen", "laid", "limb", "machine", "mere", "mold",
"murder", "nerve", "painful", "poetry", "prince", "rabbit", "shelter",
"shore", "shower", "soothe", "stair", "steady", "sunlight", "tangle",
"tease", "treasure", "uncle", "begun", "bliss", "canvas", "cheer", "claw",
"clutch", "commit", "crimson", "crystal", "delight", "doll", "existence",
"express", "fog", "football", "gay", "goose", "guard", "hatred",
"illuminate", "mass", "math", "mourn", "rich", "rough", "skip", "stir",
"student", "style", "support", "thorn", "tough", "yard", "yearn",
"yesterday", "advice", "appreciate", "autumn", "bank", "beam", "bowl",
"capture", "carve", "collapse", "confusion", "creation", "dove", "feather",
"girlfriend", "glory", "government", "harsh", "hop", "inner", "loser",
"moonlight", "neighbor", "neither", "peach", "pig", "praise", "screw",
"shield", "shimmer", "sneak", "stab", "subject", "throughout", "thrown",
"tower", "twirl", "wow", "army", "arrive", "bathroom", "bump", "cease",
"cookie", "couch", "courage", "dim", "guilt", "howl", "hum", "husband",
"insult", "led", "lunch", "mock", "mostly", "natural", "nearly", "needle",
"nerd", "peaceful", "perfection", "pile", "price", "remove", "roam",
"sanctuary", "serious", "shiny", "shook", "sob", "stolen", "tap", "vain",
"void", "warrior", "wrinkle", "affection", "apologize", "blossom", "bounce",
"bridge", "cheap", "crumble", "decision", "descend", "desperately", "dig",
"dot", "flip", "frighten", "heartbeat", "huge", "lazy", "lick", "odd",
"opinion", "process", "puzzle", "quietly", "retreat", "score", "sentence",
"separate", "situation", "skill", "soak", "square", "stray", "taint",
"task", "tide", "underneath", "veil", "whistle", "anywhere", "bedroom",
"bid", "bloody", "burden", "careful", "compare", "concern", "curtain",
"decay", "defeat", "describe", "double", "dreamer", "driver", "dwell",
"evening", "flare", "flicker", "grandma", "guitar", "harm", "horrible",
"hungry", "indeed", "lace", "melody", "monkey", "nation", "object",
"obviously", "rainbow", "salt", "scratch", "shown", "shy", "stage", "stun",
"third", "tickle", "useless", "weakness", "worship", "worthless",
"afternoon", "beard", "boyfriend", "bubble", "busy", "certain", "chin",
"concrete", "desk", "diamond", "doom", "drawn", "due", "felicity", "freeze",
"frost", "garden", "glide", "harmony", "hopefully", "hunt", "jealous",
"lightning", "mama", "mercy", "peel", "physical", "position", "pulse",
"punch", "quit", "rant", "respond", "salty", "sane", "satisfy", "savior",
"sheep", "slept", "social", "sport", "tuck", "utter", "valley", "wolf",
"aim", "alas", "alter", "arrow", "awaken", "beaten", "belief", "brand",
"ceiling", "cheese", "clue", "confidence", "connection", "daily",
"disguise", "eager", "erase", "essence", "everytime", "expression", "fan",
"flag", "flirt", "foul", "fur", "giggle", "glorious", "ignorance", "law",
"lifeless", "measure", "mighty", "muse", "north", "opposite", "paradise",
"patience", "patient", "pencil", "petal", "plate", "ponder", "possibly",
"practice", "slice", "spell", "stock", "strife", "strip", "suffocate",
"suit", "tender", "tool", "trade", "velvet", "verse", "waist", "witch",
"aunt", "bench", "bold", "cap", "certainly", "click", "companion",
"creator", "dart", "delicate", "determine", "dish", "dragon", "drama",
"drum", "dude", "everybody", "feast", "forehead", "former", "fright",
"fully", "gas", "hook", "hurl", "invite", "juice", "manage", "moral",
"possess", "raw", "rebel", "royal", "scale", "scary", "several", "slight",
"stubborn", "swell", "talent", "tea", "terrible", "thread", "torment",
"trickle", "usually", "vast", "violence", "weave", "acid", "agony",
"ashamed", "awe", "belly", "blend", "blush", "character", "cheat", "common",
"company", "coward", "creak", "danger", "deadly", "defense", "define",
"depend", "desperate", "destination", "dew", "duck", "dusty", "embarrass",
"engine", "example", "explore", "foe", "freely", "frustrate", "generation",
"glove", "guilty", "health", "hurry", "idiot", "impossible", "inhale",
"jaw",
"kingdom", "mention", "mist", "moan", "mumble", "mutter", "observe", "ode",
"pathetic", "pattern", "pie", "prefer", "puff", "rape", "rare", "revenge",
"rude", "scrape", "spiral", "squeeze", "strain", "sunset", "suspend",
"sympathy", "thigh", "throne", "total", "unseen", "weapon", "weary"
]
n = 1626
# Note about US patent no 5892470: Here each word does not represent a given digit.
# Instead, the digit represented by a word is variable, it depends on the previous word.
def mn_encode(message):
assert len(message) % 8 == 0
out = []
for i in range(len(message) / 8):
word = message[8 * i:8 * i + 8]
x = int(word, 16)
w1 = (x % n)
w2 = ((x / n) + w1) % n
w3 = ((x / n / n) + w2) % n
out += [words[w1], words[w2], words[w3]]
return out
def mn_decode(wlist):
out = ''
for i in range(len(wlist) / 3):
word1, word2, word3 = wlist[3 * i:3 * i + 3]
w1 = words.index(word1)
w2 = (words.index(word2)) % n
w3 = (words.index(word3)) % n
x = w1 + n * ((w2 - w1) % n) + n * n * ((w3 - w2) % n)
out += '%08x' % x
return out

659
client/podle.py

@ -0,0 +1,659 @@
#!/usr/bin/env python
from __future__ import print_function
#Proof Of Discrete Logarithm Equivalence
#For algorithm steps, see https://gist.github.com/AdamISZ/9cbba5e9408d23813ca8
import os
import hashlib
import json
import binascii
PODLE_COMMIT_FILE = None
from btc import (multiply, add_pubkeys, getG, podle_PublicKey, podle_PrivateKey,
encode, decode, N,
podle_PublicKey_class, podle_PrivateKey_class)
def set_commitment_file(file_loc):
global PODLE_COMMIT_FILE
PODLE_COMMIT_FILE = file_loc
def get_commitment_file():
return PODLE_COMMIT_FILE
class PoDLEError(Exception):
pass
class PoDLE(object):
"""See the comment to PoDLE.generate_podle for the
mathematical structure. This class encapsulates the
input data, the commitment and the opening (the "proof").
"""
def __init__(self,
u=None,
priv=None,
P=None,
P2=None,
s=None,
e=None,
used=False):
#This class allows storing of utxo in format "txid:n" only for
#convenience of storage/access; it doesn't check or use the data.
#Arguments must be provided in hex.
self.u = u
if not priv:
if P:
#Construct a pubkey from raw hex
self.P = podle_PublicKey(binascii.unhexlify(P))
else:
self.P = None
else:
if P:
raise PoDLEError("Pubkey should not be provided with privkey")
#any other formatting abnormality will just throw in PrivateKey
if len(priv) == 66 and priv[-2:] == '01':
priv = priv[:-2]
self.priv = podle_PrivateKey(binascii.unhexlify(priv))
self.P = self.priv.pubkey
if P2:
self.P2 = podle_PublicKey(binascii.unhexlify(P2))
else:
self.P2 = None
#These sig values should be passed in hex.
if s:
self.s = binascii.unhexlify(s)
if e:
self.e = binascii.unhexlify(e)
#Optionally maintain usage state (boolean)
self.used = used
#the H(P2) value
self.commitment = None
def mark_used(self):
self.used = True
def mark_unused(self):
self.used = False
def get_commitment(self):
"""Set the commitment to sha256(serialization of public key P2)
Return in hex to calling function
"""
if not self.P2:
raise PoDLEError("Cannot construct commitment, no P2 available")
if not isinstance(self.P2, podle_PublicKey_class):
raise PoDLEError("Cannot construct commitment, P2 is not a pubkey")
self.commitment = hashlib.sha256(self.P2.serialize()).digest()
return binascii.hexlify(self.commitment)
def generate_podle(self, index=0, k=None):
"""Given a raw private key, in hex format,
construct a commitment sha256(P2), which is
the hash of the value x*J, where x is the private
key as a raw scalar, and J is a NUMS alternative
basepoint on the Elliptic Curve; we use J(i) where i
is an index, so as to be able to create multiple
commitments against the same privkey. The procedure
for generating the J(i) value is shown in getNUMS().
Also construct a signature (s,e) of Schnorr type,
which will serve as a zero knowledge proof that the
private key of P2 is the same as the private key of P (=x*G).
Signature is constructed as:
s = k + x*e
where k is a standard 32 byte nonce and:
e = sha256(k*G || k*J || P || P2)
Possibly Joinmarket specific comment:
Users *should* generate with lower indices first,
since verifiers will give preference to lower indices
(each verifier may have their own policy about how high
an index to allow, which really means how many reuses of utxos
to allow in Joinmarket).
Returns a commitment of form H(P2) which, note, will depend
on the index choice. Repeated calls will reset the commitment
and the associated signature data that can be used to open
the commitment.
"""
#TODO nonce could be rfc6979?
if not k:
k = os.urandom(32)
J = getNUMS(index)
KG = podle_PrivateKey(k).pubkey
KJ = multiply(k, J.serialize(), False, return_serialized=False)
self.P2 = getP2(self.priv, J)
self.get_commitment()
self.e = hashlib.sha256(''.join([x.serialize(
) for x in [KG, KJ, self.P, self.P2]])).digest()
k_int = decode(k, 256)
priv_int = decode(self.priv.private_key, 256)
e_int = decode(self.e, 256)
sig_int = (k_int + priv_int * e_int) % N
self.s = encode(sig_int, 256, minlen=32)
return self.reveal()
def reveal(self):
"""Encapsulate all the data representing the proof
in a dict for client functions. Data output in hex.
"""
if not all([self.u, self.P, self.P2, self.s, self.e]):
raise PoDLEError("Cannot generate proof, data is missing")
if not self.commitment:
self.get_commitment()
Phex, P2hex, shex, ehex, commit = [
binascii.hexlify(x)
for x in [self.P.serialize(), self.P2.serialize(), self.s, self.e,
self.commitment]
]
return {'used': str(self.used),
'utxo': self.u,
'P': Phex,
'P2': P2hex,
'commit': commit,
'sig': shex,
'e': ehex}
def serialize_revelation(self, separator='|'):
state_dict = self.reveal()
ser_list = []
for k in ['utxo', 'P', 'P2', 'sig', 'e']:
ser_list += [state_dict[k]]
ser_string = separator.join(ser_list)
return ser_string
@classmethod
def deserialize_revelation(cls, ser_rev, separator='|'):
ser_list = ser_rev.split(separator)
if len(ser_list) != 5:
raise PoDLEError("Failed to deserialize, wrong format")
utxo, P, P2, s, e = ser_list
return {'utxo': utxo, 'P': P, 'P2': P2, 'sig': s, 'e': e}
def verify(self, commitment, index_range):
"""For an object created without a private key,
check that the opened commitment verifies for at least
one NUMS point as defined by the range in index_range
"""
if not all([self.P, self.P2, self.s, self.e]):
raise PoDLE("Verify called without sufficient data")
if not self.get_commitment() == commitment:
return False
for J in [getNUMS(i) for i in index_range]:
sig_priv = podle_PrivateKey(self.s)
sG = sig_priv.pubkey
sJ = multiply(self.s, J.serialize(), False)
e_int = decode(self.e, 256)
minus_e = encode(-e_int % N, 256, minlen=32)
minus_e_P = multiply(minus_e, self.P.serialize(), False)
minus_e_P2 = multiply(minus_e, self.P2.serialize(), False)
KGser = add_pubkeys([sG.serialize(), minus_e_P], False)
KJser = add_pubkeys([sJ, minus_e_P2], False)
#check 2: e =?= H(K_G || K_J || P || P2)
e_check = hashlib.sha256(KGser + KJser + self.P.serialize() +
self.P2.serialize()).digest()
if e_check == self.e:
return True
#commitment fails for any NUMS in the provided range
return False
def getNUMS(index=0):
"""Taking secp256k1's G as a seed,
either in compressed or uncompressed form,
append "index" as a byte, and append a second byte "counter"
try to create a new NUMS base point from the sha256 of that
bytestring. Loop counter and alternate compressed/uncompressed
until finding a valid curve point. The first such point is
considered as "the" NUMS base point alternative for this index value.
The search process is of course deterministic/repeatable, so
it's fine to just store a list of all the correct values for
each index, but for transparency left in code for initialization
by any user.
The NUMS generator generated is returned as a secp256k1.PublicKey.
"""
assert index in range(256)
nums_point = None
for G in [getG(True), getG(False)]:
seed = G + chr(index)
for counter in range(256):
seed_c = seed + chr(counter)
hashed_seed = hashlib.sha256(seed_c).digest()
#Every x-coord on the curve has two y-values, encoded
#in compressed form with 02/03 parity byte. We just
#choose the former.
claimed_point = "\x02" + hashed_seed
try:
nums_point = podle_PublicKey(claimed_point)
return nums_point
except:
continue
assert False, "It seems inconceivable, doesn't it?" # pragma: no cover
def verify_all_NUMS(write=False):
"""Check that the algorithm produces the expected NUMS
values; more a sanity check than anything since if the file
is modified, all of it could be; this function is mostly
for testing, but runs fast with pre-computed context so can
be run in user code too.
"""
nums_points = {}
for i in range(256):
nums_points[i] = binascii.hexlify(getNUMS(i).serialize())
if write:
with open("nums_basepoints.txt", "wb") as f:
from pprint import pformat
f.write(pformat(nums_points))
assert nums_points == precomp_NUMS, "Precomputed NUMS points are not valid!"
def getP2(priv, nums_pt):
"""Given a secp256k1.PrivateKey priv and a
secp256k1.PublicKey nums_pt, an alternate
generator point (note: it's in no sense a
pubkey, its privkey is unknowable - that's
just the most easy way to manipulate it in the
library), calculate priv*nums_pt
"""
priv_raw = priv.private_key
return multiply(priv_raw,
nums_pt.serialize(),
False,
return_serialized=False)
def get_podle_commitments():
"""Returns set of commitments used as a list:
[H(P2),..] (hex) and a dict of all existing external commitments.
It is presumed that each H(P2) can
be used only once (this may not literally be true, but represents
good joinmarket "citizenship").
This is stored as part of the data in PODLE_COMMIT_FILE
Since takers request transactions serially there should be no
locking requirement here. Multiple simultaneous taker bots
would require extra attention.
"""
if not os.path.isfile(PODLE_COMMIT_FILE):
return ([], {})
with open(PODLE_COMMIT_FILE, "rb") as f:
c = json.loads(f.read())
if 'used' not in c.keys() or 'external' not in c.keys():
raise PoDLEError("Incorrectly formatted file: " + PODLE_COMMIT_FILE)
return (c['used'], c['external'])
def add_external_commitments(ecs):
"""To allow external functions to add
PoDLE commitments that were calculated elsewhere;
the format of each entry in ecs must be:
{txid:N:{'P':pubkey, 'reveal':{1:{'P2':P2,'s':s,'e':e}, 2:{..},..}}}
"""
update_commitments(external_to_add=ecs)
def update_commitments(commitment=None,
external_to_remove=None,
external_to_add=None):
"""Optionally add the commitment commitment to the list of 'used',
and optionally remove the available external commitment
whose key value is the utxo in external_to_remove,
persist updated entries to disk.
"""
c = {}
if os.path.isfile(PODLE_COMMIT_FILE):
with open(PODLE_COMMIT_FILE, "rb") as f:
try:
c = json.loads(f.read())
except ValueError:
print("the file: " + PODLE_COMMIT_FILE + " is not valid json.")
sys.exit(0)
if 'used' in c:
commitments = c['used']
else:
commitments = []
if 'external' in c:
external = c['external']
else:
external = {}
if commitment:
commitments.append(commitment)
#remove repeats
commitments = list(set(commitments))
if external_to_remove:
external = {
k: v
for k, v in external.items() if k not in external_to_remove
}
if external_to_add:
external.update(external_to_add)
to_write = {}
to_write['used'] = commitments
to_write['external'] = external
with open(PODLE_COMMIT_FILE, "wb") as f:
f.write(json.dumps(to_write, indent=4))
def generate_podle(priv_utxo_pairs, tries=1, allow_external=None, k=None):
"""Given a list of privkeys, try to generate a
PoDLE which is not yet used more than tries times.
This effectively means satisfying two criteria:
(1) the generated commitment is not in the list of used
commitments
(2) the index required to generate is not greater than 'tries'.
Note that each retry means using a different generator
(see notes in PoDLE.generate_podle)
Once used, add the commitment to the list of used.
If we fail to find an unused commitment with this algorithm,
we fallback to sourcing an unused commitment from the "external"
section of the commitments file; if we succeed in finding an unused
one there, use it and add it to the list of used commitments.
If still nothing available, return None.
"""
used_commitments, external_commitments = get_podle_commitments()
for priv, utxo in priv_utxo_pairs:
for i in range(tries):
#Note that we will return the *lowest* index
#which is still available.
p = PoDLE(u=utxo, priv=priv)
c = p.generate_podle(i, k=k)
if c['commit'] in used_commitments:
continue
#persist for future checks
update_commitments(commitment=c['commit'])
return c
if allow_external:
filtered_external = dict([(x, external_commitments[x])
for x in allow_external])
for u, ec in filtered_external.iteritems():
#use as many as were provided in the file, up to a max of tries
m = min([len(ec['reveal'].keys()), tries])
for i in [str(x) for x in range(m)]:
p = PoDLE(u=u,
P=ec['P'],
P2=ec['reveal'][i]['P2'],
s=ec['reveal'][i]['s'],
e=ec['reveal'][i]['e'])
if p.get_commitment() not in used_commitments:
update_commitments(commitment=p.get_commitment())
return p.reveal()
#If none of the entries in the 'reveal' list for this external
#commitment were available, they've all been used up, so
#remove this entry
if m == len(ec['reveal'].keys()):
update_commitments(external_to_remove=u)
#Failed to find any non-used valid commitment:
return None
def verify_podle(Pser, P2ser, sig, e, commitment, index_range=range(10)):
verifying_podle = PoDLE(P=Pser, P2=P2ser, s=sig, e=e)
#check 1: Hash(P2ser) =?= commitment
if not verifying_podle.verify(commitment, index_range):
return False
return True
precomp_NUMS = {
0: '0296f47ec8e6d6a9c3379c2ce983a6752bcfa88d46f2a6ffe0dd12c9ae76d01a1f',
1: '023f9976b86d3f1426638da600348d96dc1f1eb0bd5614cc50db9e9a067c0464a2',
2: '023745b000f6db094a794d9ee08637d714393cd009f86087438ac3804e929bfe89',
3: '023346660dcb1f8d56e44d23f93c3ad79761cdd5f4972a638e9e15517832f6a165',
4: '02ec91c86964dcbb077c8193156f3cfa91476d5adfcfcf64913a4b082c75d5bca7',
5: '02bbc5c4393395a38446e2bd4d638b7bfd864afb5ffaf4bed4caf797df0e657434',
6: '02967efd39dc59e6f060bf3bd0080e8ecf4a22b9d1754924572b3e51ce2cde2096',
7: '02cfce8a7f9b8a1735c4d827cd84e3f2a444de1d1f7ed419d23c88d72de341357f',
8: '0206d6d6b1d88936bb6013ae835716f554d864954ea336e3e0141fefb2175b82f9',
9: '021b739f21b981c2dcbaf9af4d89223a282939a92aee079e94a46c273759e5b42e',
10: '025d72106845e03c3747f1416e539c5aa0712d858e7762807fdc4f3757fd980631',
11: '02e7d4defb5d287734a0f96c2b390aa14f5f38e80c5a5e592e4ce10d55a5f5246b',
12: '023c1bf301bcfa0f097f1a3931c68b4fd39b77a28cc7b61b2b1e0b7ca6d332493c',
13: '0283ac2cdd6b362c90665802c264ee8e6342318070943717faee62ef9addeff3e9',
14: '02cb9f6164cd2acdf071caef9deab870fc3d390a09b37ba7af8e91139b817ce807',
15: '02f0a3a3e22c5b04b6fe97430d68f33861c3e9be412220dc2a24485ea5d55d94db',
16: '02860ca3475757d90d999e6553e62c07fce5a6598d060cceeead08c8689b928095',
17: '0246c8eabc38ce6a93868369d5900d84f36b2407eecb81286a25eb22684355b41d',
18: '026aa6379d74e6cd6c721aef82a34341d1d15f0c96600566ad3fa8e9c43cbb5505',
19: '02fdeacb3b4d15e0aae1a1d257b4861bcc9addb5dc3780a13eb982eb656f73d741',
20: '021a83ecfaeb2c057f66a6b0d4a42bff3fe5fda11fe2eea9734f45f255444cddc0',
21: '02d93580f3e0c2ec8ea461492415cc6a4be00c50969e2c32a2135e7d04f112309a',
22: '0292c57be6c3e6ba8b44cf5e619529cf75e9c6b795ddecd383fb78f9059812cb3f',
23: '02480f099771d0034d657f6b00cd17c7315b033b19bed9ca95897bc8189928dd47',
24: '02ac0701cdc6f96c63752c01dc8400eab19431dfa15f85a7314b1e9a3df69a4a66',
25: '026a304ceb69e37d655c1ef100d7ad23192867151983ab0d168af96afe7f1997f6',
26: '023b9ff8e4a853b29ecae1e8312fae53863e86b8f8cb3155f31f7325ffb2baf02c',
27: '021894ce66d61c33e439f38a36d92c0e45bf28dbc7e30bfb4d7135b87fc8e890e1',
28: '02d9e7680e583cf904774d4c19f36cb3d238b6c770e1e7db03f444dc8b15b29687',
29: '024350c7ff5b2bf2c58e3b17a792716d0e76cff7ad537375d1abc6e249466b25a3',
30: '02c6577e1cdcbcfadb0ae037d01fbf6d74786eecdb9d1ee277d9ba69b969728cfe',
31: '029f395b4c7b20bcb6120b57bee6d2f7353cd0aa9fe246176064068c1bd9b714d1',
32: '02d180786087720b827bf04ae800547102470a1e43de314203e90228c586b481a1',
33: '023548173a673965c18d994028bc6d5f5df1f60dccf9368b0eae34f8cff3106943',
34: '02118124c53b86fdade932c4304ad347a19ce0af79a9ab885d7d3a6358a396e360',
35: '02930bcdee5887fa5a258335d6948017e6d7f2665b32dcc76a84d5ca7cd604d89b',
36: '0267e79a47058758a8ee240afd941e0ae8b4f175f29a3cf195ad6ff0e6d02955b1',
37: '027e53d9fb04f1bb69324245306d26aa60172fd13d8fe27809b093222226914de6',
38: '02ef09fbdcd22e1be4f0d4b2d13a141051b18009d7001f6828c6a40b145c9df23e',
39: '028742fd08c60ba13e78913581db19af2f708c7ec53364589f6cbcf9d1c8b5105f',
40: '020ce14308d2f516bf4f9e0944fb104907adef8f4c319bfcc3afab73e874a9ce4a',
41: '027635f125f05a2548201f74c4bbdcbe89561204117bd8b82dfae29c85a576a58e',
42: '02fe878f3ae59747ee8e9c34876b86851d5396124e1411f86fe5c58f08f413a549',
43: '02f2a6af33bd08ab41a010d785694e9682fa1cc65733f30a53c40541d1c1bfb660',
44: '02cbe9d18b6d5fc9993ef862892e5b2b1ea5d2710a4f208672c0f7c36a08bb5686',
45: '023fb079b25c0a8241465fb55802f22ebb354e6da81f7dabfe214ddbd9d3dfcd5a',
46: '021a5b234b9a10fc5f08ed9c1a136a250e92156adc12109a97dd7467276d6848a8',
47: '0240fbe9363d50585da40aef95f311fc2795550e787f62421cd9b6e2f719bb9547',
48: '02a245fbbc00f1d6feb72a9e1d3fd0033522839d33440aea64f52e8bccee616be8',
49: '02fd1e94bb23a4306de64064841165e3db497ae5b246dabff738eb3e6ea51685a7',
50: '0298362705914c839e45505369e54faefbb3aaebb4c486b4d6e59ca03304f3552c',
51: '021b8109a23b858114d287273620dd920029d84b90f63af273c1c78492b1a70105',
52: '028df6ce4fec30229cddb86c62606cff80e95cb8277028277f3dcc8ac9f98eef9d',
53: '02ed02925d806df4ac764769d11743093708808157fb2933eb19af5399dcfd500c',
54: '02ce88da0e81988bd8f5d63ad06898a355f7dc7f46bb08cf5f1e9bc5c3752ad13c',
55: '02f4868cc8285cd8d74d4213d18d53d5f410d50223818f1be6fe8090904e03743d',
56: '02770cecdf18aa2115b6e5c4295468f2e92a53068dc4295d0e5d0890b71d1a2fcc',
57: '02b5d4dce8932de37c6ef13a7f063f164dfd07f7399e8e815e22b5af420608fd2a',
58: '0284ad07924dbac50a72455aec3ddba50b1ed71e678ba935bb5d95c8a8232b1353',
59: '02cb8c916a6f9bc39c8825f5b0378bb1b0a0679e191843aa4db2195b81f14c87e0',
60: '0235aa30ec3df8dd193a132dbaf3b351af879c59504ed8b7b5ad5f1f1ea712854f',
61: '02df91206e955cefe7bcda4555fc6ad761b0e98d464629f098d4483306851704e9',
62: '02ed4f1fccd47e66a8d74e58b4f6e31b5172b628fc0dacdb408128c914eb80f506',
63: '0263991bb62aaca78a128917f5c4e15183f98aefddf04070c5ca537186f1c1a97a',
64: '02ffe2b017882d57db27864446ad7b21d3855ae64bddf74d46e3a611bf903580be',
65: '02d647aba2c01eecd0fac7e82580dd8b92d66db7341d1b65a5e4b01234f1fbb2cd',
66: '023134ff85401dba9aff426d3f3ba292ea59684b8c48ea0b495660797a839246a6',
67: '02827880fe0410c9ea84f75a629f8f8e6eed1f26528af421cf23b8ecf93b6b4b7b',
68: '02859b3f9f1f5ba6aa0787f8d3f3f2f21b4932c40bc36b6669383e3bbd19654a5f',
69: '02a7d204dfc3eed44abd0419202e280f6772efd5acf9fd34331b8f47c81c6dab19',
70: '02e15d11b443a9340ac31a8c5774ce34cd347834470c8d68c959828fae3a7eb0c6',
71: '029931f65e46627d60519bfd08bd8a1bb3d8d2921f7f8c9ef31f4bfcdd8028ead2',
72: '02e5415ba78743d736018f19757ee0e1ca5d4a4fb1d0464cd3eea8d89b34dd37b8',
73: '027ea7860afc3de502d056d9a19ca330f16cd61cfefbeb768df68a882d1f8f15f5',
74: '026c19becac43626582622e2b7e86ebd8056f40aa8ab031e70f4deae8cab34503f',
75: '02098dab044c888ddebe6713fcb8481f178e3ba42d63310b08d8234e20fe1de13f',
76: '02ed6af1a2bebcb381ce92f87638267b1afefe7a1cdce16253f5bf9f99a84ce4b2',
77: '023d8493f9e72cd3212166de50940d980f603ae429309abb29e15cccc1983efe37',
78: '025c07d7513b1bae52a0089a4faee127148e2ba5a651983083aedc1ae8403cf1eb',
79: '0285a93a8c8e6134b3a53c5bd1b5b7d24e7911763ea887847c5d66af172ed17f10',
80: '02fea28fb142aa95fcd44398c9482a3c185ec22fee8f24ad6b2297ac7478423f21',
81: '02f9840a1635ae3fa131405526974d40d2edee17adf58278956373ce6c69757c2a',
82: '023579e441a7dcdbd36a2932c64fa3318023b1f3d04daab148622b7646246a6d7c',
83: '02bcbc2933f90a88996c1363c8d3a7004e0c6b75040041201fb022e45acb0af6a7',
84: '02cd52e0d28f5564fc2bf842fa63dfefbcf2bb5fe0325703c132be5cd14cca7291',
85: '021e648e261b93fedd3439352899c0fa1acedd1f68ab508050a13ed3cbbc93c2ff',
86: '0295f9caea5f57d11b12ddee154a36a14921a8980fa76726e48e1d76443d4e306f',
87: '02396edf4c18283dd3ef68a2c57b642bd87ae9f8b6be5e5fe4a41c5b86c5db8eb2',
88: '0264f323ca3eee79385c9bfd35cd4cf576e51722f38dd98531d531a29913e5170d',
89: '02facd3f63f543e0ab9b13323340113acbe8ed3bafdfabdc80626cdd15386c80f3',
90: '02b6762640f96367fbf65eecfafcee5c6f7d6a42b706113053bb36a882659d3e65',
91: '02ed63f2eca15d9b338fcdb9b3efa3b326e173a1390706258829680f7973fa851c',
92: '026f6d47d0d48ff13d64ec6a1db2dc51173cee86ab8010a17809b3fe01483d9fc5',
93: '02814e7cae580a1ef86d6ee9b2f9f26fe771e8ea47acf11153b04680ada9cd3042',
94: '020e46225fb3ee8f04d08ffbe12d9092ff7f7227f9cb55709890c669e8a1c97963',
95: '028194469e8d6ee660e95d6125ba0152ad5c24bf7e452adf80db7062d6926851c4',
96: '02b3e1f5754562635ebeecfd32edb0d84a79b2f0c270bac153e60dd29334dc2663',
97: '02afff20730724a2d422f330e962362e7831753545ac0a931dd94be011ccf93e9c',
98: '02a9cfdf0471a34babfc2f6201dbc79530f3f319204daedb7ec05effc2bdfc5a74',
99: '02838fe450f2dd0c460b5fae90ec2feb5b7f001f9cd14c01a475c492cf16ea594b',
100: '02aacc3145d04972d0527c4458629d328219feda92bef6ef6025878e3a252e105a',
101: '02720fe09616d4325d3c4c702a0aeafbbbff95ef962af531c5ae9461ec81fdf8c5',
102: '02e6408f24461a6c484f6c4493c992d303211d5e4297d34afede719a2b70c96c14',
103: '02b9ecf2d3fdf2611c6d4be441a0f9a3810dadae39feb3c0d855748cc2dd98a968',
104: '027a32d12a536af038631890a9b90ee20b219c9c8231a95b1cde24c143d8173fec',
105: '02d26c98fb50b57b7defdf1e8062a52b2a859ba42f3d1760ee8ff99c4e9eb3ec03',
106: '02df85556e8d1e97a8093e4d9950905ebced0ea9a1e49728713df1974eeb455774',
107: '021fe1dbada397155a80225b59b4fb9a32450a991b2d9d11d8500e98344927c856',
108: '0211ccd0980a9ab6f4bb82fdc2e2d1ddace063a7bc1914a6ab4d02b0fa1ca746ec',
109: '0264bd41f41aad19f8bfd290fd3af346ebbf80efd33f515854f82bd57e9740f7aa',
110: '0226d5fb607cadb8720e900ce9afb9607386ad7b767e4ab3a4e0966223324b92eb',
111: '02b3bbf2e2ceae25701bd3b78ba13bea3f0dfed7581b8a8a67c66de9fd96ee41e2',
112: '024b8dd765e385d0e04772f3dbf1b1a82abc2de3e5740baac1f6306cd9fd45fe99',
113: '022153f6a884ae893ebb0642a84d624c0b62894d7cb9e2a48a3a0c4696e593f9db',
114: '0245e22b6388cb14c9c8dbcac94853bdf1e81816c07e926a82b96fc958aa874626',
115: '02cba97826b089c695b1acffdcdbf1484beec5eb95853fea1535d6d7bdb4e678b0',
116: '02ed006fbab2d18adbd96d2f1de6b83948e2a47acc8d2f92d7af9ba01ffae58276',
117: '02513592f4434ee62802d3965f847684693000830107c72cd8de4b34e05b532dae',
118: '028adc75647453a247bd44855abb56b60794aaed5ce21c9898e62adac7adcfbe8e',
119: '02a712d5dc572086359f1688e8e7b9a5f7fc3079644aea27cdddb382208fee885b',
120: '029abf8551218c9076f6d344baa099041fe73e5e844aac6e5c20240834105cdf60',
121: '027d480071a2d128c51e84c380467e1ac8435f05b985bbfee0099d35b4121fb0ca',
122: '02a7f2e4253fa0d833beca742e210c0d59a4ffc8559764766dcffb1aa3e4961826',
123: '023521309a6bdfafdf7bdae574a5f6010eb992e4bae46d8f83c478eac137889270',
124: '02b99fe8623aa19ca2bed6fe435ae95c5072a40193913bebe5466f675c92a31db7',
125: '02dc035112a2b4881917ea1db159e7f35ee9d98d31533e1285ca150ce84e538e4f',
126: '0291a07ecce8061561624de7348135b9081c5edd61541b24fa002fb6c074318fec',
127: '020d8a5253d7e0166aa37680a5f64cab0cdad2cdc4c0e8ae61d310df4c4f7386eb',
128: '026285db47fee60b5ad54cbd4c27a4e0cd723b86a920f03b12dc9b8c5f19f06448',
129: '020f94a9df4302f701b4629f74d401484daf84c7aabaf533f8c21c1626009e923c',
130: '027bb78af54b01ddad4e96b51a4e024105b373aab7e1a6ec16279967fcbbb096b4',
131: '02e1b20c0da3b8c991f8909fd0d31874be00e9fcb130d7c28b8ad53326cdf13755',
132: '02bbdd4dfc047f216e2cbff789bcf850423bedf2006d959963f75621810fecf0d9',
133: '024e1fe4b23feda8651a467090e0ce7e8b8db2ccb1c27d52255c76754aa1940d1b',
134: '0241aad8f575556c49c4fefae178c2c38541962bfff2ca84ebecea9f661ccf3536',
135: '02bcf6203d725ca0640bd045389e854e00087c54ba01fd739c6ef685b22f89340c',
136: '0202178e6b3a9b498399aa392b32dc9010f1eea322a6d439ad0c8cacf2008b3e34',
137: '026db3289d470df0fdf04f5f608fae2d7ec4ddbd3de2603f6685789520bdee01fc',
138: '0239bcfc796488129e3b2f01e6fbbda2f1b357b602e94b5091b44c916e9806dc34',
139: '020513bc4a618d32d784083f13d46e6c6d547f01b24942351760f6dc42e2bb7167',
140: '0204d2495e4fc20e0571ab2fcb4c1989fdda4542923aa97fe1a77a11c79ade1964',
141: '021eaa6af99ea4f1143a45a1b5af7b2d3c3e8810f358be6261248c5ba2492a7b4e',
142: '02799849e87e3862170add5b28a3b7e54b04cc60c2cec39de7eca9bfdfaaf930a8',
143: '02639bced287084268136c5b6e9e22f743b6c8f813e6aabe39521715bfa4a46ab8',
144: '0283c8b21fc038c1fbeedfae0b3abc4dbde672b0dcfda540f9fcfcf8c6e6d29fc3',
145: '02b284f4510535ff98e683f25c08b7ae7dd19f7b861e70a202469ddfb2877bc729',
146: '0256af1c82cde40ffd03564368b8256a5e48ef056df2655013f0b1aa15de1de8d2',
147: '02964b55eab2f19518ee735cae2f7f780bfab480bcbd360f7a90a2904301203366',
148: '02f046486f4a473f2226f6bd120aafc55a5c8651f3eb0855aa6a821f69f3016cc6',
149: '02eb8dfb7c59fbf24671e258ca5e8eda3ea74c5f0455eed4987cfda79f4fcf823f',
150: '020fac2c37cc273d982c07b2719a3694348629d5bdaebc22967fb9d0e1d7f01842',
151: '025c0c8ff9a102f99f700081526d2b93b9d51caf81dcf4d02e93cf83b4a7ff5c92',
152: '02a118f5fa9c5ef02707e021f9cb8056e69018ef145bec80ead4e09c06a60050c1',
153: '029ea72333d1908bb082bffec9da8824883df76a89709ab090df86c45be4abf784',
154: '02bacc52256e5221dbfc9a3f22e30fa8e86ddd38e3877e3dc41de91bdcf989b00b',
155: '02bc8b37dc66e2296ae706c896f5b86bd335f724cfa9783e41b9dc5e901b42b1de',
156: '02eca1099cea9bcab80820d6b64aec16dce1efa0e997b589f6dba3a8fd391fb100',
157: '027f1c1bb99bd1a0e486f415f8960d45614a6fcac8cedc260e07197733844827d0',
158: '021fc54df458bcfafc8a83d4759224c49c4b338cf23cd9825d6e9cdeffc276375b',
159: '027d4fff88da831999ba9b21e19baf747dc26ea76651146e463d4f3e51c586ee91',
160: '02e49c0fef0ebc52908cdcea4d913a42e5f24439fffdfaa21cc55a6add0ad9d122',
161: '0208b5e8e5035fdb62517d4ebab0696775dbfbdba8ff80f2031c1156cda195a2ab',
162: '0202e990bab267fff1575d6acc76fe2041f4196f4b17678872f9c160d930e5be35',
163: '02c73fcedd9f6eabc8fe4e1e7211cdb0f28967391d200147d46e4077d2915c262d',
164: '0261490abc5f14387ef585f42d99dbddb0837b166694d4af521086a1ffd46e5640',
165: '02b46a143e4e0af20a12c39f3105aca57ca79f9332df67619ee859b5d9bffb6d6d',
166: '0299f53c064d068f003f8871acae31b84ddda9d8dbe516d02dc170c70314ee2af7',
167: '023305144dccba65c67001474ee1135aa96432f386b5eb27582393b2ed4bfc185d',
168: '02e044b70ff7e9c784b3c40d09bdfadd4a037e692b0b3aa9ab6bb91203f86a0b37',
169: '02ded067a2e44282b0d731a28ffbd03ca6046c5b1a262887ea7cab4810050fbb8c',
170: '02e00e4c9198194d92a93059bce61f8249e1006eee287aa94fe51bb207462e5492',
171: '0241b89d9164f4c07595ca99b7d73cad2b20ac39847cf703dff1d7d6add339ebeb',
172: '02eba24cd4946e149025a9bf7759df5362245bf7c53c5a3205be0c92c59db8d5dc',
173: '026bd40c611246a789521c46d758a80337ff40bb298a964612b2af74039211727a',
174: '02b9095e071e4edfddf8afb0e176536957509d23f90fb7175ad086b4098e731c73',
175: '0214ad0014dfddc5c7eb0801b97268c1b7e03d64215d6b9d5ed80b468089e4a01d',
176: '02c455b8e38103ade8794fb51a1656e1439b42bdf79afd17a9df8542153914a7cf',
177: '02cc89d6437fdcf711a76eb16f4014f2e21b71740afc8b3ec13ccb60a45b12d815',
178: '0208eee5857dda0ae1c721e6ed4c74044add4e1ce66f105413e9ef1cccbdca87ad',
179: '02edc663693827cad44d004ac24753bfc3167f81ff4074bb862453376593229c0f',
180: '0202a4b7fb31e30b6d8f90a5442ef31f800902ea7a9511e24437b7a0ef516f79a9',
181: '02ff05472c2019ac2c9ab8b7fcb0604a94b7379c350306be262144588ea252d0f4',
182: '02b131bb594a1270d231e18459e484c49f3eca3b3b2291c9be81c01dc8a4037fa1',
183: '02f50125277ea19f633e93868cf8e8a4cd76b21eedf8e3ef59de43f40d73a01d01',
184: '027aab228a7d6f87003b01fb9c0b9bcfb2098adbc76f5f9b856aedd28077fc4471',
185: '02925200e4f74bea719a99f4a0b05165b9af475f2187381bd0b79cad4d5f2593b6',
186: '02c311f1750c6d5c364b71c3b0f369f6959d34a3718da695c5b227ecf1a4669bf6',
187: '02cb030c71169d0a1ae30ffba92311bc06bb64b27570598dedabdea0b24631a0ca',
188: '02e64669898eecff7aa887307be696a694f61559e7ca41119677b7e94f37cd2914',
189: '028fe93e32c24df7f8aaf8d777335fd9ce9f9b5c121dec2ab1ff21575c047497e7',
190: '026f08c1c3cb4cff5cdbd7985db4a8ebf0ebc0924530b0fa118d095c4667efeb52',
191: '02afe08dbba6c999efb73aeae1da0ad8b143a1b51759caffd3ed2de4494adc47fb',
192: '02e99aec0b5e869b3885a3b9f527fd3c546dde83d41a5a156703d0da5e10e04743',
193: '02b7e5f4cb9233107bf7a47789dca4eb811af108822f2d4bd03dec13251ec45984',
194: '023b971e135daa0b851797b17e3a1cc5ac8a9a6207a2e784a0fe36732a00407b49',
195: '02b1742739bfbb528b2a2731cb5d5f1bd03f4fa9c94607837e586c7c6f6589be4a',
196: '022cd1b023bb2afc68ee27b40f8deb1d1c6d7b7aa97c32c444f1ceebd449dbeb22',
197: '02704e21f8bf38158d7e8100e297adfc930c14c8791beee9b907407f4ca654d95b',
198: '02caabeb678374ca75bd815c370b2e37fb0470591557219d6289b1b1e655ed80c6',
199: '026aa8d45112aa0da335054194c739e04787526250493f5a0eaaa8a346541d1a0f',
200: '022fb12408355439bbee33066bbeefcffb0bdc9cfd1950510fd2a42bdc4eaa1d53',
201: '02639fe47769f7694ca6dbfd934762472391d70b23868a58e11d2bd46373e1df29',
202: '02f75360f52df674247c5f005b3451ee47becf3204862154d4e7ee97a0e40df3d2',
203: '0230241e27d0d3ad727d26472541fcd48f2bb128db5611237fa9f33f86ede8d5c9',
204: '0255d5a0aa37a226c001f6b7f19e2bddb10aeaa0652430b8defe35c3f03dfb3c0e',
205: '024e6faa398b0acf8a8dfdd9d21e0a46a22d07cd0fcffd89749f74f94f9993f4d9',
206: '020c1a256587306f58f274cc2238f651bbfadfd42436e6eb8f318ac08fae04e7ae',
207: '025858b8188da173e8b01b8713b154ffae8b2d2eb8f9670362877102cf0c0c4f28',
208: '02dc7509c77d7fa61c08c5525fb151bf4fe12deb1989a3be560a63105dae2ecd2e',
209: '02a272df6dab1c22c209b45b601737c0077acb7869bb9fe264c991b4ef199e337d',
210: '025168f2fdd730b4c33b57d3956e6a40dd27a4f32db70d9f9b5898fa2bed3de342',
211: '028133baac70bc2c2ebe8a22af04b5faedd070e276c90e2f910bb9bf89441a80db',
212: '029064628ebd6e97a945c1d52641a27bff3c4f59659e657b88d23c2ce1c4d04644',
213: '023cf20c4e8675bce999a0128602fe21699db651540f3dcbe7a4ef2126243ba17a',
214: '02cc685739a4b20e2d52ddf256e597c06b7eb69e65d009820c6744b739c7215340',
215: '02d061544ce21398af3e0e6c329ce49976a9ecd804ebc543f4c16f6a32798f37c2',
216: '029fe49ff440f23c69360a92d249db429bdc3601fc8a5a3fc1aa894de817c05490',
217: '0222c8c4e90585f9816b5801bad43fb608857269fdaaefbe2b5b85903231685679',
218: '0296b72ed4968860b733fb99846698df2e95c65af281b3ef8b5ab90e2d5de966cb',
219: '02c27565a7fd5d1f4bcbe969bddbace99553fb65cb7750965350ff230b1f09f97d',
220: '02e1254be9833236609bf44c62ef6da7188a44bbe2d53a72cf39a38ef9f99bb783',
221: '0280663ce16afadc77e00ade780da53e7c11b02a66cbf36837ef7d9d2488f23417',
222: '02ad8b11e62c6753917307bdde89a42896e0070d33f6f93c608d82f6d041b814a4',
223: '02ce1d943dfc14654266507def2b7b9940bffceb4f54d709a149f99962083398fc',
224: '023ea7eb26248c05beb4e4d8ba9f9785d5fd1a55d3137c90f40b807b60aa4262df',
225: '0211c802fec9b31710d3849e2c1700cea5374ae422e54551946d96fc240c63fba0',
226: '02204ad97ebe2ec30d6db1bfc1e1d4660331909668634c3cd928b5c369a6013367',
227: '020251bf4271d359a082cdad23d9a5cd48916d78eed010fe1e7d9711cd420b3cdf',
228: '0292b9757195350676e447e49425f887d3df7e27774bb3e0aab5b528da0a1a0340',
229: '022be18362b2a167199a76f6065358063b1167d5bbcfe7652fc55f93a5ebd42e89',
230: '02e6b1e618efe5f468bdb40f5ec167ed4fa7636849c4ff4ddab0199c903b37306c',
231: '02a6676873de91890ecae000c575e46e4a9629865fb1662606da5e9c1fdcd55d5c',
232: '02c088a3c96b13413caa5f32a8f4640e76ec0a37990577d679d2062e859547f058',
233: '023e9703ed6209d5a25e0ecb34e04c22f274f37845aa2a4e2f2343e39928360e25',
234: '02977d845787c4690152827bfd15e801044c84d33430a7ed928499e828cf131d14',
235: '0224ea648555445d1305aaf6bd74fda3041b2a10bf7900a4c067462b01c6dc25f1',
236: '02dfd472c98ece1dc2a18c1bebf98a09990fba673e725c029928937247022b9d24',
237: '02a2a03933d06617adcf0f4ad692e95d463a5fa9938e8d451e5d6271f4a5af8bb4',
238: '02ca24fa8d7aa53f7f5b4e1ca16eb6fd9b9cfb0162a332abb7a88ddf8e964c99bc',
239: '02bbce92d1db3ef0c9c09793b760fd3b929c9168e4dff396c618fa0ed3cf6a5edb',
240: '028af15d26d3b297f4d2aeaf308632b60251accf87aa8470b3d4d1ef2dabb99209',
241: '021b81c0e878389231339fd9d622a736fc9d36de93a58ea6a4bc38fef86672278a',
242: '021adc24309f605c7a5af106e8b930feaec0bec6545fb4c70b83ebe5cf341cab2d',
243: '020462a3ff101ac379f87f43190459b7494f4128ea30035877ce22a35afb995e34',
244: '02f1019851779a6d0db09e8abeba3b9a07b6931b43b0d973cfe261a96b4516cca4',
245: '02d7023276f01ff22a9efeadd5b539d1d9ceb80ebf6813e6042a49c946a82f366f',
246: '021594f45af3a21e0210a2ca4cbc3e95ea95db5aca3561fc1f759cb7f104dd0f62',
247: '021398309b6c293c0dc28cdd7e55ad06306b59cb9c10d947df565e4a90f095a62a',
248: '029f39d84383200e841187c5b0564e3b01a2ba019b86221c0c1dd3eae1b4dabb26',
249: '0252ec719852f71c2d58886dd6ace6461a64677a368b7b8e220da005ac977abdc8',
250: '0237f0d7de84b2cc6d2109b7241c3d49479066a09d1412c7a4734192715b021e06',
251: '021e9e0e4784d15a29721c9a33fbcfb0af305d559c98a38dcf0ce647edd2c50caa',
252: '02e705994a78f7942726209947d62d64edd062acfa8a708c21ac65de71e7ae71df',
253: '0295f1cafd97e026341af3670ef750de4c44c82e6882f65908ec167d93d7056806',
254: '023a0d381598e185bbff88494dc54e0a083d3b9ce9c8c4b86b5a4c9d5f949b1828',
255: '02a0a8694820c794852110e5939a2c03f8482f81ed57396042c6b34557f6eb430a'
}

674
client/slowaes.py

@ -0,0 +1,674 @@
#!/usr/bin/python
#
# aes.py: implements AES - Advanced Encryption Standard
# from the SlowAES project, http://code.google.com/p/slowaes/
#
# Copyright (c) 2008 Josh Davis ( http://www.josh-davis.org ),
# Alex Martelli ( http://www.aleax.it )
#
# Ported from C code written by Laurent Haan ( http://www.progressive-coding.com )
#
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/
#
import math
import os
def append_PKCS7_padding(s):
"""return s padded to a multiple of 16-bytes by PKCS7 padding"""
numpads = 16 - (len(s) % 16)
return s + numpads * chr(numpads)
def strip_PKCS7_padding(s):
"""return s stripped of PKCS7 padding"""
if len(s) % 16 or not s:
raise ValueError("String of len %d can't be PCKS7-padded" % len(s))
numpads = ord(s[-1])
if numpads > 16:
raise ValueError("String ending with %r can't be PCKS7-padded" % s[-1])
if not all(numpads == x for x in map(ord, s[-numpads:-1])):
raise ValueError("Invalid PKCS7 padding")
return s[:-numpads]
class AES(object):
# valid key sizes
keySize = dict(SIZE_128=16, SIZE_192=24, SIZE_256=32)
# Rijndael S-box
sbox = [0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67,
0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59,
0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7,
0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1,
0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05,
0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83,
0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29,
0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b,
0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa,
0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c,
0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc,
0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec,
0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19,
0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee,
0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49,
0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4,
0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6,
0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70,
0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9,
0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e,
0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1,
0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0,
0x54, 0xbb, 0x16]
# Rijndael Inverted S-box
rsbox = [0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3,
0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f,
0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54,
0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b,
0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24,
0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8,
0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d,
0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda,
0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab,
0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3,
0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1,
0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41,
0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6,
0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9,
0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d,
0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,
0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0,
0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07,
0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60,
0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f,
0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5,
0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b,
0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55,
0x21, 0x0c, 0x7d]
def getSBoxValue(self, num):
"""Retrieves a given S-Box Value"""
return self.sbox[num]
def getSBoxInvert(self, num):
"""Retrieves a given Inverted S-Box Value"""
return self.rsbox[num]
@staticmethod
def rotate(word):
""" Rijndael's key schedule rotate operation.
Rotate a word eight bits to the left: eg, rotate(1d2c3a4f) == 2c3a4f1d
Word is an char list of size 4 (32 bits overall).
"""
return word[1:] + word[:1]
# Rijndael Rcon
Rcon = [0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97,
0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72,
0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66,
0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04,
0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d,
0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3,
0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61,
0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a,
0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,
0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc,
0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5,
0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a,
0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d,
0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c,
0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35,
0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4,
0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc,
0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08,
0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a,
0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d,
0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2,
0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74,
0xe8, 0xcb]
def getRconValue(self, num):
"""Retrieves a given Rcon Value"""
return self.Rcon[num]
def core(self, word, iteration):
"""Key schedule core."""
# rotate the 32-bit word 8 bits to the left
word = self.rotate(word)
# apply S-Box substitution on all 4 parts of the 32-bit word
for i in range(4):
word[i] = self.getSBoxValue(word[i])
# XOR the output of the rcon operation with i to the first part
# (leftmost) only
word[0] = word[0] ^ self.getRconValue(iteration)
return word
def expandKey(self, key, size, expandedKeySize):
"""Rijndael's key expansion.
Expands an 128,192,256 key into an 176,208,240 bytes key
expandedKey is a char list of large enough size,
key is the non-expanded key.
"""
# current expanded keySize, in bytes
currentSize = 0
rconIteration = 1
expandedKey = [0] * expandedKeySize
# set the 16, 24, 32 bytes of the expanded key to the input key
for j in range(size):
expandedKey[j] = key[j]
currentSize += size
while currentSize < expandedKeySize:
# assign the previous 4 bytes to the temporary value t
t = expandedKey[currentSize - 4:currentSize]
# every 16,24,32 bytes we apply the core schedule to t
# and increment rconIteration afterwards
if currentSize % size == 0:
t = self.core(t, rconIteration)
rconIteration += 1
# For 256-bit keys, we add an extra sbox to the calculation
if size == self.keySize["SIZE_256"] and (
(currentSize % size) == 16):
for l in range(4):
t[l] = self.getSBoxValue(t[l])
# We XOR t with the four-byte block 16,24,32 bytes before the new
# expanded key. This becomes the next four bytes in the expanded
# key.
for m in range(4):
expandedKey[currentSize] = expandedKey[currentSize - size] ^ \
t[m]
currentSize += 1
return expandedKey
@staticmethod
def addRoundKey(state, roundKey):
"""Adds (XORs) the round key to the state."""
for i in range(16):
state[i] ^= roundKey[i]
return state
@staticmethod
def createRoundKey(expandedKey, roundKeyPointer):
"""Create a round key.
Creates a round key from the given expanded key and the
position within the expanded key.
"""
roundKey = [0] * 16
for i in range(4):
for j in range(4):
roundKey[j * 4 + i] = expandedKey[roundKeyPointer + i * 4 + j]
return roundKey
@staticmethod
def galois_multiplication(a, b):
"""Galois multiplication of 8 bit characters a and b."""
p = 0
for counter in range(8):
if b & 1: p ^= a
hi_bit_set = a & 0x80
a <<= 1
# keep a 8 bit
a &= 0xFF
if hi_bit_set:
a ^= 0x1b
b >>= 1
return p
#
# substitute all the values from the state with the value in the SBox
# using the state value as index for the SBox
#
def subBytes(self, state, isInv):
if isInv:
getter = self.getSBoxInvert
else:
getter = self.getSBoxValue
for i in range(16):
state[i] = getter(state[i])
return state
# iterate over the 4 rows and call shiftRow() with that row
def shiftRows(self, state, isInv):
for i in range(4):
state = self.shiftRow(state, i * 4, i, isInv)
return state
# each iteration shifts the row to the left by 1
@staticmethod
def shiftRow(state, statePointer, nbr, isInv):
for i in range(nbr):
if isInv:
state[statePointer:statePointer + 4] = \
state[statePointer + 3:statePointer + 4] + \
state[statePointer:statePointer + 3]
else:
state[statePointer:statePointer + 4] = \
state[statePointer + 1:statePointer + 4] + \
state[statePointer:statePointer + 1]
return state
# galois multiplication of the 4x4 matrix
def mixColumns(self, state, isInv):
# iterate over the 4 columns
for i in range(4):
# construct one column by slicing over the 4 rows
column = state[i:i + 16:4]
# apply the mixColumn on one column
column = self.mixColumn(column, isInv)
# put the values back into the state
state[i:i + 16:4] = column
return state
# galois multiplication of 1 column of the 4x4 matrix
def mixColumn(self, column, isInv):
if isInv:
mult = [14, 9, 13, 11]
else:
mult = [2, 1, 1, 3]
cpy = list(column)
g = self.galois_multiplication
column[0] = g(cpy[0], mult[0]) ^ g(cpy[3], mult[1]) ^ \
g(cpy[2], mult[2]) ^ g(cpy[1], mult[3])
column[1] = g(cpy[1], mult[0]) ^ g(cpy[0], mult[1]) ^ \
g(cpy[3], mult[2]) ^ g(cpy[2], mult[3])
column[2] = g(cpy[2], mult[0]) ^ g(cpy[1], mult[1]) ^ \
g(cpy[0], mult[2]) ^ g(cpy[3], mult[3])
column[3] = g(cpy[3], mult[0]) ^ g(cpy[2], mult[1]) ^ \
g(cpy[1], mult[2]) ^ g(cpy[0], mult[3])
return column
# applies the 4 operations of the forward round in sequence
def aes_round(self, state, roundKey):
state = self.subBytes(state, False)
state = self.shiftRows(state, False)
state = self.mixColumns(state, False)
state = self.addRoundKey(state, roundKey)
return state
# applies the 4 operations of the inverse round in sequence
def aes_invRound(self, state, roundKey):
state = self.shiftRows(state, True)
state = self.subBytes(state, True)
state = self.addRoundKey(state, roundKey)
state = self.mixColumns(state, True)
return state
# Perform the initial operations, the standard round, and the final
# operations of the forward aes, creating a round key for each round
def aes_main(self, state, expandedKey, nbrRounds):
state = self.addRoundKey(state, self.createRoundKey(expandedKey, 0))
i = 1
while i < nbrRounds:
state = self.aes_round(state,
self.createRoundKey(expandedKey, 16 * i))
i += 1
state = self.subBytes(state, False)
state = self.shiftRows(state, False)
state = self.addRoundKey(
state, self.createRoundKey(expandedKey, 16 * nbrRounds))
return state
# Perform the initial operations, the standard round, and the final
# operations of the inverse aes, creating a round key for each round
def aes_invMain(self, state, expandedKey, nbrRounds):
state = self.addRoundKey(
state, self.createRoundKey(expandedKey, 16 * nbrRounds))
i = nbrRounds - 1
while i > 0:
state = self.aes_invRound(state,
self.createRoundKey(expandedKey, 16 * i))
i -= 1
state = self.shiftRows(state, True)
state = self.subBytes(state, True)
state = self.addRoundKey(state, self.createRoundKey(expandedKey, 0))
return state
# encrypts a 128 bit input block against the given key of size specified
def encrypt(self, iput, key, size):
output = [0] * 16
# the number of rounds
nbrRounds = 0
# the 128 bit block to encode
block = [0] * 16
# set the number of rounds
if size == self.keySize["SIZE_128"]:
nbrRounds = 10
elif size == self.keySize["SIZE_192"]:
nbrRounds = 12
elif size == self.keySize["SIZE_256"]:
nbrRounds = 14
else:
return None
# the expanded keySize
expandedKeySize = 16 * (nbrRounds + 1)
# Set the block values, for the block:
# a0,0 a0,1 a0,2 a0,3
# a1,0 a1,1 a1,2 a1,3
# a2,0 a2,1 a2,2 a2,3
# a3,0 a3,1 a3,2 a3,3
# the mapping order is a0,0 a1,0 a2,0 a3,0 a0,1 a1,1 ... a2,3 a3,3
#
# iterate over the columns
for i in range(4):
# iterate over the rows
for j in range(4):
block[(i + (j * 4))] = iput[(i * 4) + j]
# expand the key into an 176, 208, 240 bytes key
# the expanded key
expandedKey = self.expandKey(key, size, expandedKeySize)
# encrypt the block using the expandedKey
block = self.aes_main(block, expandedKey, nbrRounds)
# unmap the block again into the output
for k in range(4):
# iterate over the rows
for l in range(4):
output[(k * 4) + l] = block[(k + (l * 4))]
return output
# decrypts a 128 bit input block against the given key of size specified
def decrypt(self, iput, key, size):
output = [0] * 16
# the number of rounds
nbrRounds = 0
# the 128 bit block to decode
block = [0] * 16
# set the number of rounds
if size == self.keySize["SIZE_128"]:
nbrRounds = 10
elif size == self.keySize["SIZE_192"]:
nbrRounds = 12
elif size == self.keySize["SIZE_256"]:
nbrRounds = 14
else:
return None
# the expanded keySize
expandedKeySize = 16 * (nbrRounds + 1)
# Set the block values, for the block:
# a0,0 a0,1 a0,2 a0,3
# a1,0 a1,1 a1,2 a1,3
# a2,0 a2,1 a2,2 a2,3
# a3,0 a3,1 a3,2 a3,3
# the mapping order is a0,0 a1,0 a2,0 a3,0 a0,1 a1,1 ... a2,3 a3,3
# iterate over the columns
for i in range(4):
# iterate over the rows
for j in range(4):
block[(i + (j * 4))] = iput[(i * 4) + j]
# expand the key into an 176, 208, 240 bytes key
expandedKey = self.expandKey(key, size, expandedKeySize)
# decrypt the block using the expandedKey
block = self.aes_invMain(block, expandedKey, nbrRounds)
# unmap the block again into the output
for k in range(4):
# iterate over the rows
for l in range(4):
output[(k * 4) + l] = block[(k + (l * 4))]
return output
class AESModeOfOperation(object):
aes = AES()
# structure of supported modes of operation
modeOfOperation = dict(OFB=0, CFB=1, CBC=2)
# converts a 16 character string into a number array
def convertString(self, string, start, end, mode):
if end - start > 16: end = start + 16
if mode == self.modeOfOperation["CBC"]:
ar = [0] * 16
else:
ar = []
i = start
j = 0
while len(ar) < end - start:
ar.append(0)
while i < end:
ar[j] = ord(string[i])
j += 1
i += 1
return ar
# Mode of Operation Encryption
# stringIn - Input String
# mode - mode of type modeOfOperation
# hexKey - a hex key of the bit length size
# size - the bit length of the key
# hexIV - the 128 bit hex Initilization Vector
def encrypt(self, stringIn, mode, key, size, IV):
if len(key) % size:
return None
if len(IV) % 16:
return None
# the AES input/output
plaintext = []
iput = [0] * 16
output = []
ciphertext = [0] * 16
# the output cipher string
cipherOut = []
# char firstRound
firstRound = True
if stringIn is not None:
for j in range(int(math.ceil(float(len(stringIn)) / 16))):
start = j * 16
end = j * 16 + 16
if end > len(stringIn):
end = len(stringIn)
plaintext = self.convertString(stringIn, start, end, mode)
# print 'PT@%s:%s' % (j, plaintext)
if mode == self.modeOfOperation["CFB"]:
if firstRound:
output = self.aes.encrypt(IV, key, size)
firstRound = False
else:
output = self.aes.encrypt(iput, key, size)
for i in range(16):
if len(plaintext) - 1 < i:
ciphertext[i] = 0 ^ output[i]
elif len(output) - 1 < i:
ciphertext[i] = plaintext[i] ^ 0
elif len(plaintext) - 1 < i and len(output) < i:
ciphertext[i] = 0 ^ 0
else:
ciphertext[i] = plaintext[i] ^ output[i]
for k in range(end - start):
cipherOut.append(ciphertext[k])
iput = ciphertext
elif mode == self.modeOfOperation["OFB"]:
if firstRound:
output = self.aes.encrypt(IV, key, size)
firstRound = False
else:
output = self.aes.encrypt(iput, key, size)
for i in range(16):
if len(plaintext) - 1 < i:
ciphertext[i] = 0 ^ output[i]
elif len(output) - 1 < i:
ciphertext[i] = plaintext[i] ^ 0
elif len(plaintext) - 1 < i and len(output) < i:
ciphertext[i] = 0 ^ 0
else:
ciphertext[i] = plaintext[i] ^ output[i]
for k in range(end - start):
cipherOut.append(ciphertext[k])
iput = output
elif mode == self.modeOfOperation["CBC"]:
for i in range(16):
if firstRound:
iput[i] = plaintext[i] ^ IV[i]
else:
iput[i] = plaintext[i] ^ ciphertext[i]
# print 'IP@%s:%s' % (j, iput)
firstRound = False
ciphertext = self.aes.encrypt(iput, key, size)
# always 16 bytes because of the padding for CBC
for k in range(16):
cipherOut.append(ciphertext[k])
return mode, len(stringIn), cipherOut
# Mode of Operation Decryption
# cipherIn - Encrypted String
# originalsize - The unencrypted string length - required for CBC
# mode - mode of type modeOfOperation
# key - a number array of the bit length size
# size - the bit length of the key
# IV - the 128 bit number array Initilization Vector
def decrypt(self, cipherIn, originalsize, mode, key, size, IV):
# cipherIn = unescCtrlChars(cipherIn)
if len(key) % size:
return None
if len(IV) % 16:
return None
# the AES input/output
ciphertext = []
iput = []
output = []
plaintext = [0] * 16
# the output plain text string
stringOut = ''
# char firstRound
firstRound = True
if cipherIn is not None:
for j in range(int(math.ceil(float(len(cipherIn)) / 16))):
start = j * 16
end = j * 16 + 16
if j * 16 + 16 > len(cipherIn):
end = len(cipherIn)
ciphertext = cipherIn[start:end]
if mode == self.modeOfOperation["CFB"]:
if firstRound:
output = self.aes.encrypt(IV, key, size)
firstRound = False
else:
output = self.aes.encrypt(iput, key, size)
for i in range(16):
if len(output) - 1 < i:
plaintext[i] = 0 ^ ciphertext[i]
elif len(ciphertext) - 1 < i:
plaintext[i] = output[i] ^ 0
elif len(output) - 1 < i and len(ciphertext) < i:
plaintext[i] = 0 ^ 0
else:
plaintext[i] = output[i] ^ ciphertext[i]
for k in range(end - start):
stringOut += chr(plaintext[k])
iput = ciphertext
elif mode == self.modeOfOperation["OFB"]:
if firstRound:
output = self.aes.encrypt(IV, key, size)
firstRound = False
else:
output = self.aes.encrypt(iput, key, size)
for i in range(16):
if len(output) - 1 < i:
plaintext[i] = 0 ^ ciphertext[i]
elif len(ciphertext) - 1 < i:
plaintext[i] = output[i] ^ 0
elif len(output) - 1 < i and len(ciphertext) < i:
plaintext[i] = 0 ^ 0
else:
plaintext[i] = output[i] ^ ciphertext[i]
for k in range(end - start):
stringOut += chr(plaintext[k])
iput = output
elif mode == self.modeOfOperation["CBC"]:
output = self.aes.decrypt(ciphertext, key, size)
for i in range(16):
if firstRound:
plaintext[i] = IV[i] ^ output[i]
else:
plaintext[i] = iput[i] ^ output[i]
firstRound = False
if originalsize is not None and originalsize < end:
for k in range(originalsize - start):
stringOut += chr(plaintext[k])
else:
for k in range(end - start):
stringOut += chr(plaintext[k])
iput = ciphertext
return stringOut
def encryptData(key, data, mode=AESModeOfOperation.modeOfOperation["CBC"]):
"""encrypt `data` using `key`
`key` should be a string of bytes.
returned cipher is a string of bytes prepended with the initialization
vector.
"""
key = map(ord, key)
if mode == AESModeOfOperation.modeOfOperation["CBC"]:
data = append_PKCS7_padding(data)
keysize = len(key)
assert keysize in AES.keySize.values(), 'invalid key size: %s' % keysize
# create a new iv using random data
iv = [ord(i) for i in os.urandom(16)]
moo = AESModeOfOperation()
(mode, length, ciph) = moo.encrypt(data, mode, key, keysize, iv)
# With padding, the original length does not need to be known. It's a bad
# idea to store the original message length.
# prepend the iv.
return ''.join(map(chr, iv)) + ''.join(map(chr, ciph))
def decryptData(key, data, mode=AESModeOfOperation.modeOfOperation["CBC"]):
"""decrypt `data` using `key`
`key` should be a string of bytes.
`data` should have the initialization vector prepended as a string of
ordinal values.
"""
key = map(ord, key)
keysize = len(key)
assert keysize in AES.keySize.values(), 'invalid key size: %s' % keysize
# iv is first 16 bytes
iv = map(ord, data[:16])
data = map(ord, data[16:])
moo = AESModeOfOperation()
decr = moo.decrypt(data, None, mode, key, keysize, iv)
if mode == AESModeOfOperation.modeOfOperation["CBC"]:
decr = strip_PKCS7_padding(decr)
return decr
if __name__ == "__main__":
moo = AESModeOfOperation()
cleartext = "This is a test!"
cypherkey = [143, 194, 34, 208, 145, 203, 230, 143, 177, 246, 97, 206, 145,
92, 255, 84]
iv = [103, 35, 148, 239, 76, 213, 47, 118, 255, 222, 123, 176, 106, 134, 98,
92]
mode, orig_len, ciph = moo.encrypt(cleartext, moo.modeOfOperation["CBC"],
cypherkey, moo.aes.keySize["SIZE_128"],
iv)
print 'm=%s, ol=%s (%s), ciph=%s' % (mode, orig_len, len(cleartext), ciph)
decr = moo.decrypt(ciph, orig_len, mode, cypherkey,
moo.aes.keySize["SIZE_128"], iv)
print decr

410
client/socks.py

@ -0,0 +1,410 @@
"""SocksiPy - Python SOCKS module.
Version 1.00
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
"""
import socket
import struct
import random
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
PROXY_TYPE_HTTP = 3
_defaultproxy = None
_orgsocket = socket.socket
class ProxyError(IOError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class GeneralProxyError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks5AuthError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks5Error(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks4Error(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class HTTPError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
_generalerrors = ("success", "invalid data", "not connected", "not available",
"bad proxy type", "bad input")
_socks5errors = ("succeeded", "general SOCKS server failure",
"connection not allowed by ruleset", "Network unreachable",
"Host unreachable", "Connection refused", "TTL expired",
"Command not supported", "Address type not supported",
"Unknown error")
_socks5autherrors = ("succeeded", "authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password", "unknown error")
_socks4errors = (
"request granted", "request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
"request rejected because the client program and identd report different user-ids",
"unknown error")
def setdefaultproxy(proxytype=None,
addr=None,
port=None,
rdns=True,
username=str(random.randrange(10000000, 99999999)),
password=str(random.randrange(10000000, 99999999))):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
"""
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
def __init__(self,
family=socket.AF_INET,
type=socket.SOCK_STREAM,
proto=0,
_sock=None):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy is not None:
self.__proxy = _defaultproxy
else:
self.__proxy = (None, None, None, None, None, None)
self.__proxysockname = None
self.__proxypeername = None
def __recvall(self, bytes):
"""__recvall(bytes) -> data
Receive EXACTLY the number of bytes requested from the socket.
Blocks until the required number of bytes have been received.
"""
data = ""
while len(data) < bytes:
data = data + self.recv(bytes - len(data))
return data
def setproxy(self,
proxytype=None,
addr=None,
port=None,
rdns=True,
username=None,
password=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be preformed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password)
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
if (self.__proxy[4] is not None) and (self.__proxy[5] is not None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
self.sendall("\x05\x02\x00\x02")
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
self.sendall("\x05\x01\x00")
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
if chosenauth[0] != "\x05":
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
# Check the chosen authentication method
if chosenauth[1] == "\x00":
# No authentication is required
pass
elif chosenauth[1] == "\x02":
# Okay, we need to perform a basic username/password
# authentication.
self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] +
chr(len(self.__proxy[5])) + self.__proxy[5])
authstat = self.__recvall(2)
if authstat[0] != "\x01":
# Bad response
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if authstat[1] != "\x00":
# Authentication failed
self.close()
raise Socks5AuthError, (3, _socks5autherrors[3])
# Authentication succeeded
else:
# Reaching here is always bad
self.close()
if chosenauth[1] == "\xFF":
raise Socks5AuthError((2, _socks5autherrors[2]))
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
req = "\x05\x01\x00"
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
ipaddr = socket.inet_aton(destaddr)
req = req + "\x01" + ipaddr
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
req = req + "\x03" + chr(len(destaddr)) + destaddr
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
req = req + "\x01" + ipaddr
req += struct.pack(">H", destport)
self.sendall(req)
# Get the response
resp = self.__recvall(4)
if resp[0] != "\x05":
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
elif resp[1] != "\x00":
# Connection failed
self.close()
raise Socks5Error(_socks5errors[min(9, ord(resp[1]))])
# Get the bound address/port
elif resp[3] == "\x01":
boundaddr = self.__recvall(4)
elif resp[3] == "\x03":
resp = resp + self.recv(1)
boundaddr = self.__recvall(resp[4])
else:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr is not None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def getproxysockname(self):
"""getsockname() -> address info
Returns the bound IP address and port number at the proxy.
"""
return self.__proxysockname
def getproxypeername(self):
"""getproxypeername() -> address info
Returns the IP and port number of the proxy.
"""
return _orgsocket.getpeername(self)
def getpeername(self):
"""getpeername() -> address info
Returns the IP address and port number of the destination
machine (note: getproxypeername returns the proxy)
"""
return self.__proxypeername
def __negotiatesocks4(self, destaddr, destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
# Check if the destination address provided is an IP address
rmtrslv = False
try:
ipaddr = socket.inet_aton(destaddr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if self.__proxy[3]:
ipaddr = "\x00\x00\x00\x01"
rmtrslv = True
else:
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
# Construct the request packet
req = "\x04\x01" + struct.pack(">H", destport) + ipaddr
# The username parameter is considered userid for SOCKS4
if self.__proxy[4] is not None:
req = req + self.__proxy[4]
req += "\x00"
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if rmtrslv:
req = req + destaddr + "\x00"
self.sendall(req)
# Get the response from the server
resp = self.__recvall(8)
if resp[0] != "\x00":
# Bad data
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if resp[1] != "\x5A":
# Server returned an error
self.close()
if ord(resp[1]) in (91, 92, 93):
self.close()
raise Socks4Error((ord(resp[1]), _socks4errors[ord(resp[1]) -
90]))
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(
">H", resp[2:4])[0])
if rmtrslv is not None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def __negotiatehttp(self, destaddr, destport):
"""__negotiatehttp(self,destaddr,destport)
Negotiates a connection through an HTTP server.
"""
# If we need to resolve locally, we do this now
if not self.__proxy[3]:
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" +
"Host: " + destaddr + "\r\n\r\n")
# We read the response until we get the string "\r\n\r\n"
resp = self.recv(1)
while resp.find("\r\n\r\n") == -1:
resp = resp + self.recv(1)
# We just need the first line to check if the connection
# was successful
statusline = resp.splitlines()[0].split(" ", 2)
if statusline[0] not in ("HTTP/1.0", "HTTP/1.1"):
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
try:
statuscode = int(statusline[1])
except ValueError:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if statuscode != 200:
self.close()
raise HTTPError((statuscode, statusline[2]))
self.__proxysockname = ("0.0.0.0", 0)
self.__proxypeername = (addr, destport)
def connect(self, destpair):
"""connect(self,despair)
Connects to the specified destination through a proxy.
destpar - A tuple of the IP/DNS address and the port number.
(identical to socket's connect).
To select the proxy server use setproxy().
"""
# Do a minimal input check first
if (type(destpair) in
(list, tuple) == False) or (len(destpair) < 2) or (
type(destpair[0]) != str) or (type(destpair[1]) != int):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] is not None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks5(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
if self.__proxy[2] is not None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] is not None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] is None:
_orgsocket.connect(self, (destpair[0], destpair[1]))
else:
raise GeneralProxyError((4, _generalerrors[4]))

429
client/support.py

@ -0,0 +1,429 @@
from __future__ import absolute_import, print_function
import sys
import logging
import pprint
import random
from decimal import Decimal
from math import exp
# todo: this was the date format used in the original debug(). Use it?
# logging.basicConfig(filename='logs/joinmarket.log',
# stream=sys.stdout,
# level=logging.DEBUG,
# format='%(asctime)s %(message)s',
# dateformat='[%Y/%m/%d %H:%M:%S] ')
logFormatter = logging.Formatter(
"%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s")
log = logging.getLogger('joinmarket')
log.setLevel(logging.DEBUG)
ORDER_KEYS = ['counterparty', 'oid', 'ordertype', 'minsize', 'maxsize', 'txfee',
'cjfee']
joinmarket_alert = ['']
core_alert = ['']
debug_silence = [False]
#consoleHandler = logging.StreamHandler(stream=sys.stdout)
class JoinMarketStreamHandler(logging.StreamHandler):
def __init__(self, stream):
super(JoinMarketStreamHandler, self).__init__(stream)
def emit(self, record):
if joinmarket_alert[0]:
print('JoinMarket Alert Message: ' + joinmarket_alert[0])
if core_alert[0]:
print('Core Alert Message: ' + core_alert[0])
if not debug_silence[0]:
super(JoinMarketStreamHandler, self).emit(record)
consoleHandler = JoinMarketStreamHandler(stream=sys.stdout)
consoleHandler.setFormatter(logFormatter)
log.addHandler(consoleHandler)
# log = logging.getLogger('joinmarket')
# log.addHandler(logging.NullHandler())
log.debug('hello joinmarket')
def get_log():
"""
provides joinmarket logging instance
:return: log instance
"""
return log
"""
Random functions - replacing some NumPy features
NOTE THESE ARE NEITHER CRYPTOGRAPHICALLY SECURE
NOR PERFORMANT NOR HIGH PRECISION!
Only for sampling purposes
"""
def rand_norm_array(mu, sigma, n):
# use normalvariate instead of gauss for thread safety
return [random.normalvariate(mu, sigma) for _ in range(n)]
def rand_exp_array(lamda, n):
# 'lambda' is reserved (in case you are triggered by spelling errors)
return [random.expovariate(1.0 / lamda) for _ in range(n)]
def rand_pow_array(power, n):
# rather crude in that uses a uniform sample which is a multiple of 1e-4
# for basis of formula, see: http://mathworld.wolfram.com/RandomNumber.html
return [y**(1.0 / power)
for y in [x * 0.0001 for x in random.sample(
xrange(10000), n)]]
def rand_weighted_choice(n, p_arr):
"""
Choose a value in 0..n-1
with the choice weighted by the probabilities
in the list p_arr. Note that there will be some
floating point rounding errors, but see the note
at the top of this section.
"""
if abs(sum(p_arr) - 1.0) > 1e-4:
raise ValueError("Sum of probabilities must be 1")
if len(p_arr) != n:
raise ValueError("Need: " + str(n) + " probabilities.")
cum_pr = [sum(p_arr[:i + 1]) for i in xrange(len(p_arr))]
r = random.random()
return sorted(cum_pr + [r]).index(r)
# End random functions
def chunks(d, n):
return [d[x:x + n] for x in xrange(0, len(d), n)]
def select(unspent, value):
"""Default coin selection algorithm.
"""
value = int(value)
high = [u for u in unspent if u["value"] >= value]
high.sort(key=lambda u: u["value"])
low = [u for u in unspent if u["value"] < value]
low.sort(key=lambda u: -u["value"])
if len(high):
return [high[0]]
i, tv = 0, 0
while tv < value and i < len(low):
tv += low[i]["value"]
i += 1
if tv < value:
raise Exception("Not enough funds")
return low[:i]
def select_gradual(unspent, value):
"""
UTXO selection algorithm for gradual dust reduction
If possible, combines outputs, picking as few as possible of the largest
utxos less than the target value; if the target value is larger than the
sum of all smaller utxos, uses the smallest utxo larger than the value.
"""
value, key = int(value), lambda u: u["value"]
high = sorted([u for u in unspent if key(u) >= value], key=key)
low = sorted([u for u in unspent if key(u) < value], key=key)
lowsum = reduce(lambda x, y: x + y, map(key, low), 0)
if value > lowsum:
if len(high) == 0:
raise Exception('Not enough funds')
else:
return [high[0]]
else:
start, end, total = 0, 0, 0
while total < value:
total += low[end]['value']
end += 1
while total >= value + low[start]['value']:
total -= low[start]['value']
start += 1
return low[start:end]
def select_greedy(unspent, value):
"""
UTXO selection algorithm for greedy dust reduction, but leaves out
extraneous utxos, preferring to keep multiple small ones.
"""
value, key, cursor = int(value), lambda u: u['value'], 0
utxos, picked = sorted(unspent, key=key), []
for utxo in utxos: # find the smallest consecutive sum >= value
value -= key(utxo)
if value == 0: # perfect match! (skip dilution stage)
return utxos[0:cursor + 1] # end is non-inclusive
elif value < 0: # overshot
picked += [utxo] # definitely need this utxo
break # proceed to dilution
cursor += 1
for utxo in utxos[cursor - 1::-1]: # dilution loop
value += key(utxo) # see if we can skip this one
if value > 0: # no, that drops us below the target
picked += [utxo] # so we need this one too
value -= key(utxo) # 'backtrack' the counter
if len(picked) > 0:
return picked
raise Exception('Not enough funds') # if all else fails, we do too
def select_greediest(unspent, value):
"""
UTXO selection algorithm for speediest dust reduction
Combines the shortest run of utxos (sorted by size, from smallest) which
exceeds the target value; if the target value is larger than the sum of
all smaller utxos, uses the smallest utxo larger than the target value.
"""
value, key = int(value), lambda u: u["value"]
high = sorted([u for u in unspent if key(u) >= value], key=key)
low = sorted([u for u in unspent if key(u) < value], key=key)
lowsum = reduce(lambda x, y: x + y, map(key, low), 0)
if value > lowsum:
if len(high) == 0:
raise Exception('Not enough funds')
else:
return [high[0]]
else:
end, total = 0, 0
while total < value:
total += low[end]['value']
end += 1
return low[0:end]
def calc_cj_fee(ordertype, cjfee, cj_amount):
if ordertype == 'absoffer':
real_cjfee = int(cjfee)
elif ordertype == 'reloffer':
real_cjfee = int((Decimal(cjfee) * Decimal(cj_amount)).quantize(Decimal(
1)))
else:
raise RuntimeError('unknown order type: ' + str(ordertype))
return real_cjfee
def weighted_order_choose(orders, n):
"""
Algorithm for choosing the weighting function
it is an exponential
P(f) = exp(-(f - fmin) / phi)
P(f) - probability of order being chosen
f - order fee
fmin - minimum fee in the order book
phi - scaling parameter, 63% of the distribution is within
define number M, related to the number of counterparties in this coinjoin
phi has a value such that it contains up to the Mth order
unless M < orderbook size, then phi goes up to the last order
"""
minfee = orders[0][1]
M = int(3 * n)
if len(orders) > M:
phi = orders[M][1] - minfee
else:
phi = orders[-1][1] - minfee
fee = [o[1] for o in orders]
if phi > 0:
weight = [exp(-(1.0 * f - minfee) / phi) for f in fee]
else:
weight = [1.0] * len(fee)
weight = [x / sum(weight) for x in weight]
log.debug('phi=' + str(phi) + ' weights = ' + str(weight))
chosen_order_index = rand_weighted_choice(len(orders), weight)
return orders[chosen_order_index]
def cheapest_order_choose(orders, n):
"""
Return the cheapest order from the orders.
"""
return orders[0]
def pick_order(orders, n):
print("Considered orders:")
for i, o in enumerate(orders):
print(" %2d. %20s, CJ fee: %6s, tx fee: %6d" %
(i, o[0]['counterparty'], str(o[0]['cjfee']), o[0]['txfee']))
pickedOrderIndex = -1
if i == 0:
print("Only one possible pick, picking it.")
return orders[0]
while pickedOrderIndex == -1:
try:
pickedOrderIndex = int(raw_input('Pick an order between 0 and ' +
str(i) + ': '))
except ValueError:
pickedOrderIndex = -1
continue
if 0 <= pickedOrderIndex < len(orders):
return orders[pickedOrderIndex]
pickedOrderIndex = -1
def choose_orders(offers, cj_amount, n, chooseOrdersBy, ignored_makers=None):
if ignored_makers is None:
ignored_makers = []
#Filter ignored makers and inappropriate amounts
orders = [o for o in offers if o['counterparty'] not in ignored_makers]
orders = [o for o in orders if o['minsize'] < cj_amount]
orders = [o for o in orders if o['maxsize'] > cj_amount]
orders_fees = [(
o, calc_cj_fee(o['ordertype'], o['cjfee'], cj_amount) - o['txfee'])
for o in orders]
counterparties = set([o['counterparty'] for o in orders])
if n > len(counterparties):
log.debug(('ERROR not enough liquidity in the orderbook n=%d '
'suitable-counterparties=%d amount=%d totalorders=%d') %
(n, len(counterparties), cj_amount, len(orders)))
# TODO handle not enough liquidity better, maybe an Exception
return None, 0
"""
restrict to one order per counterparty, choose the one with the lowest
cjfee this is done in advance of the order selection algo, so applies to
all of them. however, if orders are picked manually, allow duplicates.
"""
feekey = lambda x: x[1]
if chooseOrdersBy != pick_order:
orders_fees = sorted(
dict((v[0]['counterparty'], v)
for v in sorted(orders_fees,
key=feekey,
reverse=True)).values(),
key=feekey)
else:
orders_fees = sorted(orders_fees, key=feekey) #sort by ascending cjfee
log.debug('considered orders = \n' + '\n'.join([str(o) for o in orders_fees
]))
total_cj_fee = 0
chosen_orders = []
for i in range(n):
chosen_order, chosen_fee = chooseOrdersBy(orders_fees, n)
# remove all orders from that same counterparty
orders_fees = [o
for o in orders_fees
if o[0]['counterparty'] != chosen_order['counterparty']]
chosen_orders.append(chosen_order)
total_cj_fee += chosen_fee
log.debug('chosen orders = \n' + '\n'.join([str(o) for o in chosen_orders]))
result = dict([(o['counterparty'], o) for o in chosen_orders])
return result, total_cj_fee
def choose_sweep_orders(db,
total_input_value,
txfee,
n,
chooseOrdersBy,
ignored_makers=None):
"""
choose an order given that we want to be left with no change
i.e. sweep an entire group of utxos
solve for cjamount when mychange = 0
for an order with many makers, a mixture of absoffer and reloffer
mychange = totalin - cjamount - total_txfee - sum(absfee) - sum(relfee*cjamount)
=> 0 = totalin - mytxfee - sum(absfee) - cjamount*(1 + sum(relfee))
=> cjamount = (totalin - mytxfee - sum(absfee)) / (1 + sum(relfee))
"""
total_txfee = txfee * n
if ignored_makers is None:
ignored_makers = []
def calc_zero_change_cj_amount(ordercombo):
sumabsfee = 0
sumrelfee = Decimal('0')
sumtxfee_contribution = 0
for order in ordercombo:
sumtxfee_contribution += order['txfee']
if order['ordertype'] == 'absoffer':
sumabsfee += int(order['cjfee'])
elif order['ordertype'] == 'reloffer':
sumrelfee += Decimal(order['cjfee'])
else:
raise RuntimeError('unknown order type: {}'.format(order[
'ordertype']))
my_txfee = max(total_txfee - sumtxfee_contribution, 0)
cjamount = (total_input_value - my_txfee - sumabsfee) / (1 + sumrelfee)
cjamount = int(cjamount.quantize(Decimal(1)))
return cjamount, int(sumabsfee + sumrelfee * cjamount)
log.debug('choosing sweep orders for total_input_value = ' + str(
total_input_value) + ' n=' + str(n))
sqlorders = db.execute('SELECT * FROM orderbook WHERE minsize <= ?;',
(total_input_value,)).fetchall()
orderlist = [dict([(k, o[k]) for k in ORDER_KEYS])
for o in sqlorders if o['counterparty'] not in ignored_makers]
log.debug('orderlist = \n' + '\n'.join([str(o) for o in orderlist]))
orders_fees = [(o, calc_cj_fee(o['ordertype'], o['cjfee'],
total_input_value)) for o in orderlist]
feekey = lambda x: x[1]
# sort from smallest to biggest cj fee
orders_fees = sorted(orders_fees, key=feekey)
chosen_orders = []
while len(chosen_orders) < n:
for i in range(n - len(chosen_orders)):
if len(orders_fees) < n - len(chosen_orders):
log.debug('ERROR not enough liquidity in the orderbook')
# TODO handle not enough liquidity better, maybe an Exception
return None, 0, 0
chosen_order, chosen_fee = chooseOrdersBy(orders_fees, n)
log.debug('chosen = ' + str(chosen_order))
# remove all orders from that same counterparty
orders_fees = [
o
for o in orders_fees
if o[0]['counterparty'] != chosen_order['counterparty']
]
chosen_orders.append(chosen_order)
# calc cj_amount and check its in range
cj_amount, total_fee = calc_zero_change_cj_amount(chosen_orders)
for c in list(chosen_orders):
minsize = c['minsize']
maxsize = c['maxsize']
if cj_amount > maxsize or cj_amount < minsize:
chosen_orders.remove(c)
log.debug('chosen orders = \n' + '\n'.join([str(o) for o in chosen_orders]))
result = dict([(o['counterparty'], o) for o in chosen_orders])
log.debug('cj amount = ' + str(cj_amount))
return result, cj_amount, total_fee
def debug_dump_object(obj, skip_fields=None):
if skip_fields is None:
skip_fields = []
log.debug('Class debug dump, name:' + obj.__class__.__name__)
for k, v in obj.__dict__.iteritems():
if k in skip_fields:
continue
if k == 'password' or k == 'given_password':
continue
log.debug('key=' + k)
if isinstance(v, str):
log.debug('string: len:' + str(len(v)))
log.debug(v)
elif isinstance(v, dict) or isinstance(v, list):
log.debug(pprint.pformat(v))
else:
log.debug(str(v))

562
client/taker.py

@ -0,0 +1,562 @@
#! /usr/bin/env python
from __future__ import print_function
import base64
import pprint
import random
import sys
import time
import copy
import btc
from joinmarketclient.configure import jm_single, get_p2pk_vbyte, donation_address
from joinmarketclient.support import (get_log, calc_cj_fee, weighted_order_choose,
choose_orders)
from joinmarketclient.wallet import estimate_tx_fee
from joinmarketclient.podle import (generate_podle, get_podle_commitments,
PoDLE, PoDLEError)
jlog = get_log()
class JMTakerError(Exception):
pass
#Taker is now a class to do 1 coinjoin
class Taker(object):
def __init__(self,
wallet,
mixdepth,
amount,
n_counterparties,
order_chooser=weighted_order_choose,
external_addr=None,
sign_method=None,
callbacks=None):
self.wallet = wallet
self.mixdepth = mixdepth
self.cjamount = amount
self.my_cj_addr = external_addr
self.order_chooser = order_chooser
self.n_counterparties = n_counterparties
self.ignored_makers = None
self.outputs = []
self.cjfee_total = 0
self.maker_txfee_contributions = 0
self.txfee_default = 5000
self.txid = None
#allow custom wallet-based clients to use their own signing code;
#currently only setting "wallet" is allowed, calls wallet.sign_tx(tx)
self.sign_method = sign_method
if callbacks:
self.filter_orders_callback, self.taker_info_callback = callbacks
else:
self.filter_orders_callback = None
self.taker_info_callback = self.default_taker_info_callback
def default_taker_info_callback(self, infotype, msg):
jlog.debug(infotype + ":" + msg)
def initialize(self, orderbook):
"""Once the daemon is active and has returned the current orderbook,
select offers and prepare a commitment, then send it to the protocol
to fill offers.
"""
if not self.filter_orderbook(orderbook):
return (False,)
#choose coins to spend
if not self.prepare_my_bitcoin_data():
return (False,)
#Prepare a commitment
commitment, revelation, errmsg = self.make_commitment()
if not commitment:
self.taker_info_callback("ABORT", errmsg)
return (False,)
else:
self.taker_info_callback("INFO", errmsg)
return (True, self.cjamount, commitment, revelation, self.orderbook)
def filter_orderbook(self, orderbook):
self.orderbook, self.total_cj_fee = choose_orders(
orderbook, self.cjamount, self.n_counterparties, self.order_chooser,
self.ignored_makers)
if self.filter_orders_callback:
accepted = self.filter_orders_callback([self.orderbook,
self.total_cj_fee])
if not accepted:
return False
return True
def prepare_my_bitcoin_data(self):
"""Get a coinjoin address and a change address; prepare inputs
appropriate for this transaction"""
if not self.my_cj_addr:
try:
self.my_cj_addr = self.wallet.get_external_addr(self.mixdepth + 1)
except:
self.taker_error_callback("ABORT", "Failed to get an address")
return False
self.my_change_addr = None
if self.cjamount != 0:
try:
self.my_change_addr = self.wallet.get_internal_addr(self.mixdepth)
except:
self.taker_error_callback("ABORT", "Failed to get a change address")
return False
#TODO sweep, doesn't apply here
self.total_txfee = 2 * self.txfee_default * self.n_counterparties
total_amount = self.cjamount + self.total_cj_fee + self.total_txfee
jlog.debug('total estimated amount spent = ' + str(total_amount))
#adjust the required amount upwards to anticipate an increase in
#transaction fees after re-estimation; this is sufficiently conservative
#to make failures unlikely while keeping the occurence of failure to
#find sufficient utxos extremely rare. Indeed, a doubling of 'normal'
#txfee indicates undesirable behaviour on maker side anyway.
try:
self.input_utxos = self.wallet.select_utxos(self.mixdepth,
total_amount)
except Exception as e:
self.taker_error_callback("ABORT",
"Unable to select sufficient coins: " + repr(e))
return False
self.utxos = {None: self.input_utxos.keys()}
return True
def receive_utxos(self, ioauth_data):
"""Triggered when the daemon returns utxo data from
makers who responded; this is the completion of phase 1
of the protocol
"""
rejected_counterparties = []
#Enough data, but need to authorize against the btc pubkey first.
for nick, nickdata in ioauth_data.iteritems():
utxo_list, auth_pub, cj_addr, change_addr, btc_sig, maker_pk = nickdata
if not self.auth_counterparty(btc_sig, auth_pub, maker_pk):
print("Counterparty encryption verification failed, aborting")
#This counterparty must be rejected
rejected_counterparties.append(nick)
for rc in rejected_counterparties:
del ioauth_data[rc]
self.maker_utxo_data = {}
for nick, nickdata in ioauth_data.iteritems():
utxo_list, auth_pub, cj_addr, change_addr, btc_sig, maker_pk = nickdata
self.utxos[nick] = utxo_list
utxo_data = jm_single().bc_interface.query_utxo_set(self.utxos[
nick])
if None in utxo_data:
jlog.debug(('ERROR outputs unconfirmed or already spent. '
'utxo_data={}').format(pprint.pformat(utxo_data)))
# when internal reviewing of makers is created, add it here to
# immediately quit; currently, the timeout thread suffices.
continue
#Complete maker authorization:
#Extract the address fields from the utxos
#Construct the Bitcoin address for the auth_pub field
#Ensure that at least one address from utxos corresponds.
input_addresses = [d['address'] for d in utxo_data]
auth_address = btc.pubkey_to_address(auth_pub, get_p2pk_vbyte())
if not auth_address in input_addresses:
jlog.warn("ERROR maker's (" + nick + ")"
" authorising pubkey is not included "
"in the transaction: " + str(auth_address))
#this will not be added to the transaction, so we will have
#to recheck if we have enough
continue
total_input = sum([d['value'] for d in utxo_data])
real_cjfee = calc_cj_fee(self.orderbook[nick]['ordertype'],
self.orderbook[nick]['cjfee'],
self.cjamount)
change_amount = (total_input - self.cjamount -
self.orderbook[nick]['txfee'] + real_cjfee)
# certain malicious and/or incompetent liquidity providers send
# inputs totalling less than the coinjoin amount! this leads to
# a change output of zero satoshis; this counterparty must be removed.
if change_amount < jm_single().DUST_THRESHOLD:
fmt = ('ERROR counterparty requires sub-dust change. nick={}'
'totalin={:d} cjamount={:d} change={:d}').format
jlog.debug(fmt(nick, total_input, self.cjamount, change_amount))
jlog.warn("Invalid change, too small, nick= " + nick)
continue
self.outputs.append({'address': change_addr,
'value': change_amount})
fmt = ('fee breakdown for {} totalin={:d} '
'cjamount={:d} txfee={:d} realcjfee={:d}').format
jlog.debug(fmt(nick, total_input, self.cjamount, self.orderbook[
nick]['txfee'], real_cjfee))
self.outputs.append({'address': cj_addr, 'value': self.cjamount})
self.cjfee_total += real_cjfee
self.maker_txfee_contributions += self.orderbook[nick]['txfee']
self.maker_utxo_data[nick] = utxo_data
#Apply business logic of how many counterparties are enough:
if len(self.maker_utxo_data.keys()) < jm_single().config.getint(
"POLICY", "minimum_makers"):
return (False,
"Not enough counterparties responded to fill, giving up")
jlog.info('got all parts, enough to build a tx')
self.nonrespondants = list(self.maker_utxo_data.keys())
my_total_in = sum([va['value'] for u, va in self.input_utxos.iteritems()
])
if self.my_change_addr:
#Estimate fee per choice of next/3/6 blocks targetting.
estimated_fee = estimate_tx_fee(
len(sum(self.utxos.values(), [])), len(self.outputs) + 2)
jlog.info("Based on initial guess: " + str(self.total_txfee) +
", we estimated a miner fee of: " + str(estimated_fee))
#reset total
self.total_txfee = estimated_fee
my_txfee = max(self.total_txfee - self.maker_txfee_contributions, 0)
my_change_value = (
my_total_in - self.cjamount - self.cjfee_total - my_txfee)
#Since we could not predict the maker's inputs, we may end up needing
#too much such that the change value is negative or small. Note that
#we have tried to avoid this based on over-estimating the needed amount
#in SendPayment.create_tx(), but it is still a possibility if one maker
#uses a *lot* of inputs.
if self.my_change_addr and my_change_value <= 0:
raise ValueError("Calculated transaction fee of: " + str(
self.total_txfee) +
" is too large for our inputs;Please try again.")
elif self.my_change_addr and my_change_value <= jm_single(
).BITCOIN_DUST_THRESHOLD:
jlog.info("Dynamically calculated change lower than dust: " + str(
my_change_value) + "; dropping.")
self.my_change_addr = None
my_change_value = 0
jlog.info(
'fee breakdown for me totalin=%d my_txfee=%d makers_txfee=%d cjfee_total=%d => changevalue=%d'
% (my_total_in, my_txfee, self.maker_txfee_contributions,
self.cjfee_total, my_change_value))
if self.my_change_addr is None:
if my_change_value != 0 and abs(my_change_value) != 1:
# seems you wont always get exactly zero because of integer
# rounding so 1 satoshi extra or fewer being spent as miner
# fees is acceptable
jlog.debug(('WARNING CHANGE NOT BEING '
'USED\nCHANGEVALUE = {}').format(my_change_value))
else:
self.outputs.append({'address': self.my_change_addr,
'value': my_change_value})
self.utxo_tx = [dict([('output', u)])
for u in sum(self.utxos.values(), [])]
self.outputs.append({'address': self.coinjoin_address(),
'value': self.cjamount})
random.shuffle(self.utxo_tx)
random.shuffle(self.outputs)
tx = btc.mktx(self.utxo_tx, self.outputs)
jlog.debug('obtained tx\n' + pprint.pformat(btc.deserialize(tx)))
self.latest_tx = btc.deserialize(tx)
for index, ins in enumerate(self.latest_tx['ins']):
utxo = ins['outpoint']['hash'] + ':' + str(ins['outpoint']['index'])
if utxo not in self.input_utxos.keys():
continue
# placeholders required
ins['script'] = 'deadbeef'
return (True, self.maker_utxo_data.keys(), tx)
def auth_counterparty(self, btc_sig, auth_pub, maker_pk):
"""Validate the counterpartys claim to own the btc
address/pubkey that will be used for coinjoining
with an ecdsa verification.
"""
if not btc.ecdsa_verify(maker_pk, btc_sig, auth_pub):
jlog.debug('signature didnt match pubkey and message')
return False
return True
def on_sig(self, nick, sigb64):
sig = base64.b64decode(sigb64).encode('hex')
inserted_sig = False
txhex = btc.serialize(self.latest_tx)
# batch retrieval of utxo data
utxo = {}
ctr = 0
for index, ins in enumerate(self.latest_tx['ins']):
utxo_for_checking = ins['outpoint']['hash'] + ':' + str(ins[
'outpoint']['index'])
if (ins['script'] != '' or
utxo_for_checking in self.input_utxos.keys()):
continue
utxo[ctr] = [index, utxo_for_checking]
ctr += 1
utxo_data = jm_single().bc_interface.query_utxo_set([x[
1] for x in utxo.values()])
# insert signatures
for i, u in utxo.iteritems():
if utxo_data[i] is None:
continue
sig_good = btc.verify_tx_input(txhex, u[0], utxo_data[i]['script'],
*btc.deserialize_script(sig))
if sig_good:
jlog.debug('found good sig at index=%d' % (u[0]))
self.latest_tx['ins'][u[0]]['script'] = sig
inserted_sig = True
# check if maker has sent everything possible
self.utxos[nick].remove(u[1])
if len(self.utxos[nick]) == 0:
jlog.debug(('nick = {} sent all sigs, removing from '
'nonrespondant list').format(nick))
self.nonrespondants.remove(nick)
break
if not inserted_sig:
jlog.debug('signature did not match anything in the tx')
# TODO what if the signature doesnt match anything
# nothing really to do except drop it, carry on and wonder why the
# other guy sent a failed signature
tx_signed = True
for ins in self.latest_tx['ins']:
if ins['script'] == '':
tx_signed = False
if not tx_signed:
return False
assert not len(self.nonrespondants)
jlog.debug('all makers have sent their signatures')
self.self_sign_and_push()
return True
def make_commitment(self):
"""The Taker default commitment function, which uses PoDLE.
Alternative commitment types should use a different commit type byte.
This will allow future upgrades to provide different style commitments
by subclassing Taker and changing the commit_type_byte; existing makers
will simply not accept this new type of commitment.
In case of success, return the commitment and its opening.
In case of failure returns (None, None) and constructs a detailed
log for the user to read and discern the reason.
"""
def filter_by_coin_age_amt(utxos, age, amt):
results = jm_single().bc_interface.query_utxo_set(utxos,
includeconf=True)
newresults = []
too_old = []
too_small = []
for i, r in enumerate(results):
#results return "None" if txo is spent; drop this
if not r:
continue
valid_age = r['confirms'] >= age
valid_amt = r['value'] >= amt
if not valid_age:
too_old.append(utxos[i])
if not valid_amt:
too_small.append(utxos[i])
if valid_age and valid_amt:
newresults.append(utxos[i])
return newresults, too_old, too_small
def priv_utxo_pairs_from_utxos(utxos, age, amt):
#returns pairs list of (priv, utxo) for each valid utxo;
#also returns lists "too_old" and "too_small" for any
#utxos that did not satisfy the criteria for debugging.
priv_utxo_pairs = []
new_utxos, too_old, too_small = filter_by_coin_age_amt(utxos.keys(),
age, amt)
new_utxos_dict = {k: v for k, v in utxos.items() if k in new_utxos}
for k, v in new_utxos_dict.iteritems():
addr = v['address']
priv = self.wallet.get_key_from_addr(addr)
if priv: #can be null from create-unsigned
priv_utxo_pairs.append((priv, k))
return priv_utxo_pairs, too_old, too_small
commit_type_byte = "P"
podle_data = None
tries = jm_single().config.getint("POLICY", "taker_utxo_retries")
age = jm_single().config.getint("POLICY", "taker_utxo_age")
#Minor rounding errors don't matter here
amt = int(self.cjamount *
jm_single().config.getint("POLICY",
"taker_utxo_amtpercent") / 100.0)
priv_utxo_pairs, to, ts = priv_utxo_pairs_from_utxos(self.input_utxos,
age, amt)
#Note that we ignore the "too old" and "too small" lists in the first
#pass through, because the same utxos appear in the whole-wallet check.
#For podle data format see: podle.PoDLE.reveal()
#In first round try, don't use external commitments
podle_data = generate_podle(priv_utxo_pairs, tries)
if not podle_data:
#We defer to a second round to try *all* utxos in wallet;
#this is because it's much cleaner to use the utxos involved
#in the transaction, about to be consumed, rather than use
#random utxos that will persist after. At this step we also
#allow use of external utxos in the json file.
if self.wallet.unspent:
priv_utxo_pairs, to, ts = priv_utxo_pairs_from_utxos(
self.wallet.unspent, age, amt)
#Pre-filter the set of external commitments that work for this
#transaction according to its size and age.
dummy, extdict = get_podle_commitments()
if len(extdict.keys()) > 0:
ext_valid, ext_to, ext_ts = filter_by_coin_age_amt(
extdict.keys(), age, amt)
else:
ext_valid = None
podle_data = generate_podle(priv_utxo_pairs, tries, ext_valid)
if podle_data:
jlog.debug("Generated PoDLE: " + pprint.pformat(podle_data))
revelation = PoDLE(u=podle_data['utxo'],
P=podle_data['P'],
P2=podle_data['P2'],
s=podle_data['sig'],
e=podle_data['e']).serialize_revelation()
return (commit_type_byte + podle_data["commit"], revelation,
"Commitment sourced OK")
else:
#we know that priv_utxo_pairs all passed age and size tests, so
#they must have failed the retries test. Summarize this info,
#return error message to caller, and also dump to commitments_debug.txt
errmsg = ""
errmsgheader = ("Failed to source a commitment; this debugging information"
" may help:\n\n")
errmsg += ("1: Utxos that passed age and size limits, but have "
"been used too many times (see taker_utxo_retries "
"in the config):\n")
if len(priv_utxo_pairs) == 0:
errmsg += ("None\n")
else:
for p, u in priv_utxo_pairs:
errmsg += (str(u) + "\n")
errmsg += ("2: Utxos that have less than " + jm_single(
).config.get("POLICY", "taker_utxo_age") + " confirmations:\n")
if len(to) == 0:
errmsg += ("None\n")
else:
for t in to:
errmsg += (str(t) + "\n")
errmsg += ("3: Utxos that were not at least " + \
jm_single().config.get(
"POLICY", "taker_utxo_amtpercent") + "% of the "
"size of the coinjoin amount " + str(
self.cjamount) + "\n")
if len(ts) == 0:
errmsg += ("None\n")
else:
for t in ts:
errmsg += (str(t) + "\n")
errmsg += ('***\n')
errmsg += ("Utxos that appeared in item 1 cannot be used again.\n")
errmsg += (
"Utxos only in item 2 can be used by waiting for more "
"confirmations, (set by the value of taker_utxo_age).\n")
errmsg += ("Utxos only in item 3 are not big enough for this "
"coinjoin transaction, set by the value "
"of taker_utxo_amtpercent.\n")
errmsg += (
"If you cannot source a utxo from your wallet according "
"to these rules, use the tool add-utxo.py to source a "
"utxo external to your joinmarket wallet. Read the help "
"with 'python add-utxo.py --help'\n\n")
errmsg += ("You can also reset the rules in the joinmarket.cfg "
"file, but this is generally inadvisable.\n")
errmsg += (
"***\nFor reference, here are the utxos in your wallet:\n")
errmsg += ("\n" + str(self.wallet.unspent))
with open("commitments_debug.txt", "wb") as f:
errmsgfileheader = ("THIS IS A TEMPORARY FILE FOR DEBUGGING; "
"IT CAN BE SAFELY DELETED ANY TIME.\n")
errmsgfileheader += ("***\n")
f.write(errmsgfileheader + errmsg)
return (None, None, errmsgheader + errmsg)
def get_commitment(self, utxos, amount):
"""Create commitment to fulfil anti-DOS requirement of makers,
storing the corresponding reveal/proof data for next step.
"""
while True:
self.commitment, self.reveal_commitment = self.make_commitment(
self.wallet, utxos, amount)
if (self.commitment) or (jm_single().wait_for_commitments == 0):
break
jlog.debug("Failed to source commitments, waiting 3 minutes")
time.sleep(3 * 60)
if not self.commitment:
jlog.debug(
"Cannot construct transaction, failed to generate "
"commitment, shutting down. Please read commitments_debug.txt "
"for some information on why this is, and what can be "
"done to remedy it.")
#TODO: would like to raw_input here to show the user, but
#interactivity is undesirable here.
#Test only:
if jm_single().config.get("BLOCKCHAIN",
"blockchain_source") == 'regtest':
raise PoDLEError("For testing raising podle exception")
#The timeout/recovery code is designed to handle non-responsive
#counterparties, but this condition means that the current bot
#is not able to create transactions following its *own* rules,
#so shutting down is appropriate no matter what style
#of bot this is.
#These two settings shut down the timeout thread and avoid recovery.
self.all_responded = True
self.end_timeout_thread = True
self.msgchan.shutdown()
def coinjoin_address(self):
if self.my_cj_addr:
return self.my_cj_addr
else:
addr, self.sign_k = donation_address()
return addr
def sign_tx(self, tx, i, priv):
if self.my_cj_addr:
return btc.sign(tx, i, priv)
else:
return btc.sign(tx,
i,
priv,
usenonce=btc.safe_hexlify(self.sign_k))
def self_sign(self):
# now sign it ourselves
tx = btc.serialize(self.latest_tx)
if self.sign_method == "wallet":
#Currently passes addresses of to-be-signed inputs
#to backend wallet; this is correct for Electrum, may need
#different info for other backends.
addrs = {}
for index, ins in enumerate(self.latest_tx['ins']):
utxo = ins['outpoint']['hash'] + ':' + str(ins['outpoint']['index'])
if utxo not in self.input_utxos.keys():
continue
addrs[index] = self.input_utxos[utxo]['address']
tx = self.wallet.sign_tx(btc.serialize(wallet_tx), addrs)
else:
for index, ins in enumerate(self.latest_tx['ins']):
utxo = ins['outpoint']['hash'] + ':' + str(ins['outpoint']['index'])
if utxo not in self.input_utxos.keys():
continue
addr = self.input_utxos[utxo]['address']
tx = self.sign_tx(tx, index, self.wallet.get_key_from_addr(addr))
self.latest_tx = btc.deserialize(tx)
def push(self):
tx = btc.serialize(self.latest_tx)
jlog.debug('\n' + tx)
self.txid = btc.txhash(tx)
jlog.debug('txid = ' + self.txid)
pushed = jm_single().bc_interface.pushtx(tx)
return pushed
def self_sign_and_push(self):
self.self_sign()
return self.push()

465
client/wallet.py

@ -0,0 +1,465 @@
from __future__ import print_function
import json
import os
import pprint
import sys
from decimal import Decimal
from ConfigParser import NoSectionError
from getpass import getpass
import btc
from joinmarketclient.slowaes import decryptData
from joinmarketclient.blockchaininterface import BitcoinCoreInterface, RegtestBitcoinCoreInterface
from joinmarketclient.configure import jm_single, get_network, get_p2pk_vbyte
from joinmarketclient.support import get_log, select_gradual, select_greedy, \
select_greediest, select
log = get_log()
def estimate_tx_fee(ins, outs, txtype='p2pkh'):
'''Returns an estimate of the number of satoshis required
for a transaction with the given number of inputs and outputs,
based on information from the blockchain interface.
'''
tx_estimated_bytes = btc.estimate_tx_size(ins, outs, txtype)
log.debug("Estimated transaction size: "+str(tx_estimated_bytes))
fee_per_kb = jm_single().bc_interface.estimate_fee_per_kb(
jm_single().config.getint("POLICY", "tx_fees"))
absurd_fee = jm_single().config.getint("POLICY", "absurd_fee_per_kb")
if fee_per_kb > absurd_fee:
#This error is considered critical; for safety reasons, shut down.
raise ValueError("Estimated fee per kB greater than absurd value: " + \
str(absurd_fee) + ", quitting.")
log.debug("got estimated tx bytes: "+str(tx_estimated_bytes))
return int((tx_estimated_bytes * fee_per_kb)/Decimal(1000.0))
class AbstractWallet(object):
"""
Abstract wallet for use with JoinMarket
Mostly written with Wallet in mind, the default JoinMarket HD wallet
"""
def __init__(self):
self.max_mix_depth = 0
self.unspent = None
self.utxo_selector = select
try:
config = jm_single().config
if config.get("POLICY", "merge_algorithm") == "gradual":
self.utxo_selector = select_gradual
elif config.get("POLICY", "merge_algorithm") == "greedy":
self.utxo_selector = select_greedy
elif config.get("POLICY", "merge_algorithm") == "greediest":
self.utxo_selector = select_greediest
elif config.get("POLICY", "merge_algorithm") != "default":
raise Exception("Unknown merge algorithm")
except NoSectionError:
pass
def get_key_from_addr(self, addr):
return None
def get_utxos_by_mixdepth(self):
return None
def get_external_addr(self, mixing_depth):
"""
Return an address suitable for external distribution, including funding
the wallet from other sources, or receiving payments or donations.
JoinMarket will never generate these addresses for internal use.
"""
return None
def get_internal_addr(self, mixing_depth):
"""
Return an address for internal usage, as change addresses and when
participating in transactions initiated by other parties.
"""
return None
def update_cache_index(self):
pass
def remove_old_utxos(self, tx):
pass
def add_new_utxos(self, tx, txid):
pass
def select_utxos(self, mixdepth, amount):
utxo_list = self.get_utxos_by_mixdepth()[mixdepth]
unspent = [{'utxo': utxo,
'value': addrval['value']}
for utxo, addrval in utxo_list.iteritems()]
inputs = self.utxo_selector(unspent, amount)
log.debug('for mixdepth={} amount={} selected:'.format(
mixdepth, amount))
log.debug(pprint.pformat(inputs))
return dict([(i['utxo'], {'value': i['value'],
'address': utxo_list[i['utxo']]['address']})
for i in inputs])
def get_balance_by_mixdepth(self):
mix_balance = {}
for m in range(self.max_mix_depth):
mix_balance[m] = 0
for mixdepth, utxos in self.get_utxos_by_mixdepth().iteritems():
mix_balance[mixdepth] = sum(
[addrval['value'] for addrval in utxos.values()])
return mix_balance
class ElectrumWrapWallet(AbstractWallet):
"""A thin wrapper class over Electrum's own
wallet for joinmarket compatibility
"""
def __init__(self, ewallet):
self.ewallet = ewallet
#TODO: populate self.unspent with all utxos in Electrum wallet.
# None is valid for unencrypted electrum wallets;
# calling functions must set the password otherwise
# for private key operations to work
self.password = None
super(ElectrumWrapWallet, self).__init__()
def get_key_from_addr(self, addr):
if self.ewallet.has_password() and self.password is None:
raise Exception("Cannot extract private key without password")
log.debug("in get key from addr")
log.debug("password is: " + str(self.password))
log.debug("address is: " + str(addr))
key = self.ewallet.get_private_key(addr, self.password)
#TODO remove after testing!
log.debug("Got WIF key: " + str(key))
#Convert from wif compressed to hex compressed
#TODO check if compressed
hex_key = btc.from_wif_privkey(key[0], vbyte=get_p2pk_vbyte())
log.debug("Got hex key: " + str(hex_key))
return hex_key
def get_external_addr(self, mixdepth):
addr = self.ewallet.get_unused_address()
log.debug("Retrieved unused: " + addr)
return addr
def get_internal_addr(self, mixdepth):
try:
addrs = self.ewallet.get_change_addresses()[
-self.ewallet.gap_limit_for_change:]
except Exception as e:
log.debug("Failed get change addresses: " + repr(e))
raise
#filter by unused
try:
change_addrs = [addr for addr in addrs if
self.ewallet.get_num_tx(addr) == 0]
except Exception as e:
log.debug("Failed to filter chadr: " + repr(e))
raise
#if no unused Electrum re-uses randomly TODO consider
#(of course, all coins in same mixdepth are in principle linkable,
#so I suspect it is better to stick with Electrum's own model, considering
#gap limit issues)
if not change_addrs:
try:
change_addrs = [random.choice(addrs)]
except Exception as e:
log.debug("Failed random: " + repr(e))
raise
return change_addrs[0]
def sign_tx(self, tx, addrs):
"""tx should be a serialized hex tx.
If self.password is correctly set,
will return the raw transaction with all
inputs from this wallet signed.
"""
if not self.password:
raise Exception("No password, cannot sign")
from electrum.transaction import Transaction
etx = Transaction(tx)
etx.deserialize()
for i in addrs.keys():
del etx._inputs[i]['scriptSig']
self.ewallet.add_input_sig_info(etx._inputs[i], addrs[i])
etx._inputs[i]['address'] = addrs[i]
log.debug("Input is now: " + str(etx._inputs[i]))
self.ewallet.sign_transaction(etx, self.password)
return etx.raw
def sign_message(self, address, message):
#TODO: not currently used, can we use it for auth?
return self.ewallet.sign_message(address, message, self.password)
def get_utxos_by_mixdepth(self):
"""Initial version: all underlying utxos are mixdepth 0.
Format of return is therefore: {0:
{txid:n : {"address": addr, "value": value},
txid:n: {"address": addr, "value": value},..}}
TODO this should use the account feature in Electrum,
which is exactly that from BIP32, to implement
multiple mixdepths.
"""
ubym = {0:{}}
coins = self.ewallet.get_spendable_coins()
log.debug(pprint.pformat(coins))
for c in coins:
utxo = c["prevout_hash"] + ":" + str(c["prevout_n"])
ubym[0][utxo] = {"address": c["address"], "value": c["value"]}
return ubym
class Wallet(AbstractWallet):
def __init__(self,
seedarg,
max_mix_depth=2,
gaplimit=6,
extend_mixdepth=False,
storepassword=False):
super(Wallet, self).__init__()
self.max_mix_depth = max_mix_depth
self.storepassword = storepassword
# key is address, value is (mixdepth, forchange, index) if mixdepth =
# -1 it's an imported key and index refers to imported_privkeys
self.addr_cache = {}
self.unspent = {}
self.spent_utxos = []
self.imported_privkeys = {}
self.seed = self.read_wallet_file_data(seedarg)
if extend_mixdepth and len(self.index_cache) > max_mix_depth:
self.max_mix_depth = len(self.index_cache)
self.gaplimit = gaplimit
master = btc.bip32_master_key(self.seed, (btc.MAINNET_PRIVATE if
get_network() == 'mainnet' else btc.TESTNET_PRIVATE))
m_0 = btc.bip32_ckd(master, 0)
mixing_depth_keys = [btc.bip32_ckd(m_0, c)
for c in range(self.max_mix_depth)]
self.keys = [(btc.bip32_ckd(m, 0), btc.bip32_ckd(m, 1))
for m in mixing_depth_keys]
# self.index = [[0, 0]]*max_mix_depth
self.index = []
for i in range(self.max_mix_depth):
self.index.append([0, 0])
def read_wallet_file_data(self, filename, pwd=None):
self.path = None
self.index_cache = [[0, 0]] * self.max_mix_depth
path = os.path.join('wallets', filename)
if not os.path.isfile(path):
if get_network() == 'testnet':
log.debug('filename interpreted as seed, only available in '
'testnet because this probably has lower entropy')
return filename
else:
raise IOError('wallet file not found')
self.path = path
fd = open(path, 'r')
walletfile = fd.read()
fd.close()
walletdata = json.loads(walletfile)
if walletdata['network'] != get_network():
print ('wallet network(%s) does not match '
'joinmarket configured network(%s)' % (
walletdata['network'], get_network()))
sys.exit(0)
if 'index_cache' in walletdata:
self.index_cache = walletdata['index_cache']
decrypted = False
while not decrypted:
if pwd:
password = pwd
else:
password = getpass('Enter wallet decryption passphrase: ')
password_key = btc.bin_dbl_sha256(password)
encrypted_seed = walletdata['encrypted_seed']
try:
decrypted_seed = decryptData(
password_key,
encrypted_seed.decode('hex')).encode('hex')
# there is a small probability of getting a valid PKCS7
# padding by chance from a wrong password; sanity check the
# seed length
if len(decrypted_seed) == 32:
decrypted = True
else:
raise ValueError
except ValueError:
print('Incorrect password')
if pwd:
raise
decrypted = False
if self.storepassword:
self.password_key = password_key
self.walletdata = walletdata
if 'imported_keys' in walletdata:
for epk_m in walletdata['imported_keys']:
privkey = decryptData(
password_key,
epk_m['encrypted_privkey'].decode( 'hex')).encode('hex')
#Imported keys are stored as 32 byte strings only, so the
#second version below is sufficient, really.
if len(privkey) != 64:
raise Exception(
"Unexpected privkey format; already compressed?:" + privkey)
privkey += "01"
if epk_m['mixdepth'] not in self.imported_privkeys:
self.imported_privkeys[epk_m['mixdepth']] = []
self.addr_cache[btc.privtoaddr(
privkey, magicbyte=get_p2pk_vbyte())] = (epk_m['mixdepth'], -1,
len(self.imported_privkeys[epk_m['mixdepth']]))
self.imported_privkeys[epk_m['mixdepth']].append(privkey)
return decrypted_seed
def update_cache_index(self):
if not self.path:
return
if not os.path.isfile(self.path):
return
fd = open(self.path, 'r')
walletfile = fd.read()
fd.close()
walletdata = json.loads(walletfile)
walletdata['index_cache'] = self.index
walletfile = json.dumps(walletdata)
fd = open(self.path, 'w')
fd.write(walletfile)
fd.close()
def get_key(self, mixing_depth, forchange, i):
return btc.bip32_extract_key(btc.bip32_ckd(
self.keys[mixing_depth][forchange], i))
def get_addr(self, mixing_depth, forchange, i):
return btc.privtoaddr(
self.get_key(mixing_depth, forchange, i), magicbyte=get_p2pk_vbyte())
def get_new_addr(self, mixing_depth, forchange):
index = self.index[mixing_depth]
addr = self.get_addr(mixing_depth, forchange, index[forchange])
self.addr_cache[addr] = (mixing_depth, forchange, index[forchange])
index[forchange] += 1
# self.update_cache_index()
bc_interface = jm_single().bc_interface
if isinstance(bc_interface, BitcoinCoreInterface) or isinstance(
bc_interface, RegtestBitcoinCoreInterface):
# do not import in the middle of sync_wallet()
if bc_interface.wallet_synced:
if bc_interface.rpc('getaccount', [addr]) == '':
log.debug('importing address ' + addr + ' to bitcoin core')
bc_interface.rpc(
'importaddress',
[addr, bc_interface.get_wallet_name(self), False])
return addr
def get_external_addr(self, mixing_depth):
return self.get_new_addr(mixing_depth, 0)
def get_internal_addr(self, mixing_depth):
return self.get_new_addr(mixing_depth, 1)
def get_key_from_addr(self, addr):
if addr not in self.addr_cache:
return None
ac = self.addr_cache[addr]
if ac[1] >= 0:
return self.get_key(*ac)
else:
return self.imported_privkeys[ac[0]][ac[2]]
def remove_old_utxos(self, tx):
removed_utxos = {}
for ins in tx['ins']:
utxo = ins['outpoint']['hash'] + ':' + str(ins['outpoint']['index'])
if utxo not in self.unspent:
continue
removed_utxos[utxo] = self.unspent[utxo]
del self.unspent[utxo]
log.debug('removed utxos, wallet now is \n' + pprint.pformat(
self.get_utxos_by_mixdepth()))
self.spent_utxos += removed_utxos.keys()
return removed_utxos
def add_new_utxos(self, tx, txid):
added_utxos = {}
for index, outs in enumerate(tx['outs']):
addr = btc.script_to_address(outs['script'], get_p2pk_vbyte())
if addr not in self.addr_cache:
continue
addrdict = {'address': addr, 'value': outs['value']}
utxo = txid + ':' + str(index)
added_utxos[utxo] = addrdict
self.unspent[utxo] = addrdict
log.debug('added utxos, wallet now is \n' + pprint.pformat(
self.get_utxos_by_mixdepth()))
return added_utxos
def get_utxos_by_mixdepth(self):
"""
returns a list of utxos sorted by different mix levels
"""
mix_utxo_list = {}
for m in range(self.max_mix_depth):
mix_utxo_list[m] = {}
for utxo, addrvalue in self.unspent.iteritems():
mixdepth = self.addr_cache[addrvalue['address']][0]
if mixdepth not in mix_utxo_list:
mix_utxo_list[mixdepth] = {}
mix_utxo_list[mixdepth][utxo] = addrvalue
log.debug('get_utxos_by_mixdepth = \n' + pprint.pformat(mix_utxo_list))
return mix_utxo_list
class BitcoinCoreWallet(AbstractWallet):
def __init__(self, fromaccount):
super(BitcoinCoreWallet, self).__init__()
if not isinstance(jm_single().bc_interface,
BitcoinCoreInterface):
raise RuntimeError('Bitcoin Core wallet can only be used when '
'blockchain interface is BitcoinCoreInterface')
self.fromaccount = fromaccount
self.max_mix_depth = 1
def get_key_from_addr(self, addr):
self.ensure_wallet_unlocked()
wifkey = jm_single().bc_interface.rpc('dumpprivkey', [addr])
return btc.from_wif_privkey(wifkey, vbyte=get_p2pk_vbyte())
def get_utxos_by_mixdepth(self):
unspent_list = jm_single().bc_interface.rpc('listunspent', [])
result = {0: {}}
for u in unspent_list:
if not u['spendable']:
continue
if self.fromaccount and (
('account' not in u) or u['account'] !=
self.fromaccount):
continue
result[0][u['txid'] + ':' + str(u['vout'])] = {
'address': u['address'],
'value': int(Decimal(str(u['amount'])) * Decimal('1e8'))}
return result
def get_internal_addr(self, mixing_depth):
return jm_single().bc_interface.rpc('getrawchangeaddress', [])
@staticmethod
def ensure_wallet_unlocked():
wallet_info = jm_single().bc_interface.rpc('getwalletinfo', [])
if 'unlocked_until' in wallet_info and wallet_info[
'unlocked_until'] <= 0:
while True:
password = getpass(
'Enter passphrase to unlock wallet: ')
if password == '':
raise RuntimeError('Aborting wallet unlock')
try:
# TODO cleanly unlock wallet after use, not with arbitrary timeout
jm_single().bc_interface.rpc(
'walletpassphrase', [password, 10])
break
except jm_single().JsonRpcError as exc:
if exc.code != -14:
raise exc
# Wrong passphrase, try again.

22
daemon/__init__.py

@ -0,0 +1,22 @@
from __future__ import print_function
import logging
from protocol import *
from .enc_wrapper import as_init_encryption, decode_decrypt, \
encrypt_encode, init_keypair, init_pubkey, get_pubkey, NaclError
from .irc import IRCMessageChannel, B_PER_SEC
from .support import get_log
from .message_channel import MessageChannel, MessageChannelCollection
from .orderbookwatch import OrderbookWatch
import commands
# Set default logging handler to avoid "No handler found" warnings.
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())

99
daemon/enc_wrapper.py

@ -0,0 +1,99 @@
from __future__ import absolute_import, print_function
# A wrapper for public key
# authenticated encryption
# using Diffie Hellman key
# exchange to set up a
# symmetric encryption.
import binascii
import base64
import string
import random
from libnacl import public
class NaclError(Exception):
pass
def init_keypair(fname=None):
"""Create a new encryption
keypair; stored in file fname
if provided. The keypair object
is returned.
"""
kp = public.SecretKey()
if fname:
# Note: handles correct file permissions
kp.save(fname)
return kp
# the next two functions are useful
# for exchaging pubkeys with counterparty
def get_pubkey(kp, as_hex=False):
"""Given a keypair object,
return its public key,
optionally in hex."""
if not isinstance(kp, public.SecretKey):
raise NaclError("Object is not a nacl keypair")
return kp.hex_pk() if as_hex else kp.pk
def init_pubkey(hexpk, fname=None):
"""Create a pubkey object from a
hex formatted string.
Save to file fname if specified.
"""
try:
bin_pk = binascii.unhexlify(hexpk)
except TypeError:
raise NaclError("Invalid hex")
if not len(bin_pk) == 32:
raise NaclError("Public key must be 32 bytes")
pk = public.PublicKey(binascii.unhexlify(hexpk))
if fname:
pk.save(fname)
return pk
def as_init_encryption(kp, c_pk):
"""Given an initialised
keypair kp and a counterparty
pubkey c_pk, create a Box
ready for encryption/decryption.
"""
if not isinstance(c_pk, public.PublicKey):
raise NaclError("Object is not a public key")
if not isinstance(kp, public.SecretKey):
raise NaclError("Object is not a nacl keypair")
return public.Box(kp.sk, c_pk)
'''
After initialisation, it's possible
to use the box object returned from
as_init_encryption to directly change
from plaintext to ciphertext:
ciphertext = box.encrypt(plaintext)
plaintext = box.decrypt(ciphertext)
Notes:
1. use binary format for ctext/ptext
2. Nonce is handled at the implementation layer.
'''
# TODO: Sign, verify. At the moment we are using
# bitcoin signatures so it isn't necessary.
# encoding for passing over the wire
def encrypt_encode(msg, box):
encrypted = box.encrypt(msg)
return base64.b64encode(encrypted)
def decode_decrypt(msg, box):
decoded = base64.b64decode(msg)
return box.decrypt(decoded)

472
daemon/irc.py

@ -0,0 +1,472 @@
from __future__ import absolute_import, print_function
import base64
import random
import socket
import ssl
import threading
import time
import Queue
from joinmarketdaemon.message_channel import MessageChannel
from joinmarketdaemon.support import get_log, chunks
from joinmarketdaemon.socks import socksocket, setdefaultproxy, PROXY_TYPE_SOCKS5
from joinmarketdaemon.protocol import *
MAX_PRIVMSG_LEN = 450
PING_INTERVAL = 300
PING_TIMEOUT = 60
#Throttling parameters; data from
#tests by @chris-belcher:
##worked (bytes per sec/bytes per sec interval / counterparties / max_privmsg_len)
#300/4 / 6 / 400
#600/4 / 6 / 400
#450/4 / 10 / 400
#450/4 / 10 / 450
#525/4 / 10 / 450
##didnt work
#600/4 / 10 / 450
#600/4 / 10 / 400
#2000/2 / 10 / 400
#450/4 / 10 / 475
MSG_INTERVAL = 0.001
B_PER_SEC = 450
B_PER_SEC_INTERVAL = 4.0
def get_config_irc_channel(chan_name, btcnet):
channel = "#" + chan_name
if btcnet == "testnet":
channel += "-test"
return channel
log = get_log()
def get_irc_text(line):
return line[line[1:].find(':') + 2:]
def get_irc_nick(source):
full_nick = source[1:source.find('!')]
return full_nick[:NICK_MAX_ENCODED+2]
class ThrottleThread(threading.Thread):
def __init__(self, irc):
threading.Thread.__init__(self, name='ThrottleThread')
self.daemon = True
self.irc = irc
self.msg_buffer = []
def run(self):
log.debug("starting throttle thread")
last_msg_time = 0
print_throttle_msg = True
while not self.irc.give_up:
self.irc.lockthrottle.acquire()
while not (self.irc.throttleQ.empty() and self.irc.obQ.empty()
and self.irc.pingQ.empty()):
time.sleep(0.0001) #need to avoid cpu spinning if throttled
try:
pingmsg = self.irc.pingQ.get(block=False)
#ping messages are not counted to throttling totals,
#so send immediately
self.irc.sock.sendall(pingmsg + '\r\n')
continue
except Queue.Empty:
pass
except:
log.warn("failed to send ping message on socket")
break
#First throttling mechanism: no more than 1 line
#per MSG_INTERVAL seconds.
x = time.time() - last_msg_time
if x < MSG_INTERVAL:
continue
#Second throttling mechanism: limited kB/s rate
#over the most recent period.
q = time.time() - B_PER_SEC_INTERVAL
#clean out old messages
self.msg_buffer = [_ for _ in self.msg_buffer if _[1] > q]
bytes_recent = sum(len(i[0]) for i in self.msg_buffer)
if bytes_recent > B_PER_SEC * B_PER_SEC_INTERVAL:
if print_throttle_msg:
log.debug("Throttling triggered, with: "+str(
bytes_recent)+ " bytes in the last "+str(
B_PER_SEC_INTERVAL)+" seconds.")
print_throttle_msg = False
continue
print_throttle_msg = True
try:
throttled_msg = self.irc.throttleQ.get(block=False)
except Queue.Empty:
try:
throttled_msg = self.irc.obQ.get(block=False)
except Queue.Empty:
#this code *should* be unreachable.
continue
try:
self.irc.sock.sendall(throttled_msg+'\r\n')
last_msg_time = time.time()
self.msg_buffer.append((throttled_msg, last_msg_time))
except:
log.error("failed to send on socket")
try:
self.irc.fd.close()
except: pass
break
self.irc.lockthrottle.wait()
self.irc.lockthrottle.release()
log.debug("Ended throttling thread.")
class PingThread(threading.Thread):
def __init__(self, irc):
threading.Thread.__init__(self, name='PingThread')
self.daemon = True
self.irc = irc
def run(self):
log.debug('starting ping thread')
while not self.irc.give_up:
time.sleep(PING_INTERVAL)
try:
self.irc.ping_reply = False
# maybe use this to calculate the lag one day
self.irc.lockcond.acquire()
self.irc.send_raw('PING LAG' + str(int(time.time() * 1000)))
self.irc.lockcond.wait(PING_TIMEOUT)
self.irc.lockcond.release()
if not self.irc.ping_reply:
log.warn('irc ping timed out')
try:
self.irc.close()
except:
pass
try:
self.irc.fd.close()
except:
pass
try:
self.irc.sock.shutdown(socket.SHUT_RDWR)
self.irc.sock.close()
except:
pass
except IOError as e:
log.debug('ping thread: ' + repr(e))
log.debug('ended ping thread')
# handle one channel at a time
class IRCMessageChannel(MessageChannel):
# close implies it will attempt to reconnect
def close(self):
try:
self.sock.sendall("QUIT\r\n")
except IOError as e:
log.info('errored while trying to quit: ' + repr(e))
def shutdown(self):
self.close()
self.give_up = True
# Maker callbacks
def _announce_orders(self, orderlist):
"""This publishes orders to the pit and to
counterparties. Note that it does *not* use chunking.
So, it tries to optimise space usage thusly:
As many complete orderlines are fit onto one line
as possible, and overflow goes onto another line.
Each list entry in orderlist must have format:
!ordername <parameters>
Then, what is published is lines of form:
!ordername <parameters>!ordername <parameters>..
fitting as many list entries as possible onto one line,
up to the limit of the IRC parameters (see MAX_PRIVMSG_LEN).
Order announce in private is handled by privmsg/_privmsg
using chunking, no longer using this function.
"""
header = 'PRIVMSG ' + self.channel + ' :'
orderlines = []
for i, order in enumerate(orderlist):
orderlines.append(order)
line = header + ''.join(orderlines) + ' ~'
if len(line) > MAX_PRIVMSG_LEN or i == len(orderlist) - 1:
if i < len(orderlist) - 1:
line = header + ''.join(orderlines[:-1]) + ' ~'
self.send_raw(line)
orderlines = [orderlines[-1]]
def _pubmsg(self, message):
line = "PRIVMSG " + self.channel + " :" + message
assert len(line) <= MAX_PRIVMSG_LEN
ob = False
if any([x in line for x in offername_list]):
ob = True
self.send_raw(line, ob)
def _privmsg(self, nick, cmd, message):
"""Send a privmsg to an irc counterparty,
using chunking as appropriate for long messages.
"""
ob = True if cmd in offername_list else False
header = "PRIVMSG " + nick + " :"
max_chunk_len = MAX_PRIVMSG_LEN - len(header) - len(cmd) - 4
# 1 for command prefix 1 for space 2 for trailer
if len(message) > max_chunk_len:
message_chunks = chunks(message, max_chunk_len)
else:
message_chunks = [message]
for m in message_chunks:
trailer = ' ~' if m == message_chunks[-1] else ' ;'
if m == message_chunks[0]:
m = COMMAND_PREFIX + cmd + ' ' + m
self.send_raw(header + m + trailer, ob)
def change_nick(self, new_nick):
self.nick = new_nick
self.send_raw('NICK ' + self.nick)
def send_raw(self, line, ob=False):
# Messages are queued and prioritised.
# This is an addressing of github #300
if line.startswith("PING") or line.startswith("PONG"):
self.pingQ.put(line)
elif ob:
self.obQ.put(line)
else:
self.throttleQ.put(line)
self.lockthrottle.acquire()
self.lockthrottle.notify()
self.lockthrottle.release()
def __handle_privmsg(self, source, target, message):
nick = get_irc_nick(source)
#ensure return value 'parsed' is length > 2
if len(message) < 4:
return
if target == self.nick:
if message[0] == '\x01':
endindex = message[1:].find('\x01')
if endindex == -1:
return
ctcp = message[1:endindex + 1]
if ctcp.upper() == 'VERSION':
self.send_raw('PRIVMSG ' + nick +
' :\x01VERSION xchat 2.8.8 Ubuntu\x01')
return
if nick not in self.built_privmsg:
self.built_privmsg[nick] = message[:-2]
else:
self.built_privmsg[nick] += message[:-2]
if message[-1] == '~':
parsed = self.built_privmsg[nick]
# wipe the message buffer waiting for the next one
del self.built_privmsg[nick]
log.debug("<<privmsg on %s: " %
(self.hostid) + "nick=%s message=%s" % (nick, parsed))
self.on_privmsg(nick, parsed)
elif message[-1] != ';':
# drop the bad nick
del self.built_privmsg[nick]
elif target == self.channel:
log.info("<<pubmsg on %s: " %
(self.hostid) + "nick=%s message=%s" %
(nick, message))
self.on_pubmsg(nick, message)
else:
log.debug("what is this? privmsg on %s: " %
(self.hostid) + "src=%s target=%s message=%s;" %
(source, target, message))
def __handle_line(self, line):
line = line.rstrip()
# log.debug('<< ' + line)
if line.startswith('PING '):
self.send_raw(line.replace('PING', 'PONG'))
return
_chunks = line.split(' ')
if _chunks[1] == 'QUIT':
nick = get_irc_nick(_chunks[0])
if nick == self.nick:
raise IOError('we quit')
else:
if self.on_nick_leave:
self.on_nick_leave(nick, self)
elif _chunks[1] == '433': # nick in use
# helps keep identity constant if just _ added
#request new nick on *all* channels via callback
if self.on_nick_change:
self.on_nick_change(self.nick + '_')
if self.password:
if _chunks[1] == 'CAP':
if _chunks[3] != 'ACK':
log.warn("server %s " %
(self.hostid) + "does not support SASL, quitting")
self.shutdown()
self.send_raw('AUTHENTICATE PLAIN')
elif _chunks[0] == 'AUTHENTICATE':
self.send_raw('AUTHENTICATE ' + base64.b64encode(
self.nick + '\x00' + self.nick + '\x00' + self.password))
elif _chunks[1] == '903':
log.info("Successfully authenticated on %s" %
(self.hostid))
self.password = None
self.send_raw('CAP END')
elif _chunks[1] == '904':
log.warn("Failed authentication %s " %
(self.hostid) + ", wrong password")
self.shutdown()
return
if _chunks[1] == 'PRIVMSG':
self.__handle_privmsg(_chunks[0], _chunks[2], get_irc_text(line))
if _chunks[1] == 'PONG':
self.ping_reply = True
self.lockcond.acquire()
self.lockcond.notify()
self.lockcond.release()
elif _chunks[1] == '376': # end of motd
self.built_privmsg = {}
if self.on_connect:
self.on_connect(self)
if self.hostid == 'agora-irc':
self.send_raw('PART #AGORA')
self.send_raw('JOIN ' + self.channel)
self.send_raw(
'MODE ' + self.nick + ' +B') # marks as bots on unreal
self.send_raw(
'MODE ' + self.nick + ' -R') # allows unreg'd private messages
elif _chunks[1] == '366': # end of names list
log.info("Connected to IRC and joined channel on %s " %
(self.hostid))
if self.on_welcome:
self.on_welcome(self) #informs mc-collection that we are ready for use
elif _chunks[1] == '332' or _chunks[1] == 'TOPIC': # channel topic
topic = get_irc_text(line)
self.on_set_topic(topic)
elif _chunks[1] == 'KICK':
target = _chunks[3]
if target == self.nick:
self.give_up = True
fmt = '{} has kicked us from the irc channel! Reason= {}'.format
raise IOError(fmt(get_irc_nick(_chunks[0]), get_irc_text(line)))
else:
if self.on_nick_leave:
self.on_nick_leave(target, self)
elif _chunks[1] == 'PART':
nick = get_irc_nick(_chunks[0])
if self.on_nick_leave:
self.on_nick_leave(nick, self)
elif _chunks[1] == '005':
'''
:port80b.se.quakenet.org 005 J5BzJGGfyw5GaPc MAXNICKLEN=15
TOPICLEN=250 AWAYLEN=160 KICKLEN=250 CHANNELLEN=200
MAXCHANNELLEN=200 CHANTYPES=#& PREFIX=(ov)@+ STATUSMSG=@+
CHANMODES=b,k,l,imnpstrDducCNMT CASEMAPPING=rfc1459
NETWORK=QuakeNet :are supported by this server
'''
for chu in _chunks[3:]:
if chu[0] == ':':
break
if chu.lower().startswith('network='):
self.hostid = chu[8:]
log.debug('found network name: ' + self.hostid + ';')
def __init__(self,
configdata,
username='username',
realname='realname',
password=None,
daemon=None):
MessageChannel.__init__(self, daemon=daemon)
self.give_up = True
self.serverport = (configdata['host'], configdata['port'])
#default hostid for use with miniircd which doesnt send NETWORK
self.hostid = configdata['host'] + str(configdata['port'])
self.socks5 = configdata["socks5"]
self.usessl = configdata["usessl"]
self.socks5_host = configdata["socks5_host"]
self.socks5_port = int(configdata["socks5_port"])
self.channel = get_config_irc_channel(configdata["channel"],
configdata["btcnet"])
self.userrealname = (username, realname)
if password and len(password) == 0:
password = None
self.given_password = password
self.pingQ = Queue.Queue()
self.throttleQ = Queue.Queue()
self.obQ = Queue.Queue()
def run(self):
self.give_up = False
self.ping_reply = True
self.lockcond = threading.Condition()
self.lockthrottle = threading.Condition()
PingThread(self).start()
ThrottleThread(self).start()
while not self.give_up:
try:
log.info("connecting to host %s" %
(self.hostid))
if self.socks5.lower() == 'true':
log.debug("Using socks5 proxy %s:%d" %
(self.socks5_host, self.socks5_port))
setdefaultproxy(PROXY_TYPE_SOCKS5,
self.socks5_host, self.socks5_port,
True)
self.sock = socksocket()
else:
self.sock = socket.socket(socket.AF_INET,
socket.SOCK_STREAM)
self.sock.connect(self.serverport)
if self.usessl.lower() == 'true':
self.sock = ssl.wrap_socket(self.sock)
self.fd = self.sock.makefile()
self.password = None
if self.given_password:
self.password = self.given_password
self.send_raw('CAP REQ :sasl')
self.send_raw('USER %s b c :%s' % self.userrealname)
self.nick = self.given_nick
self.send_raw('NICK ' + self.nick)
while 1:
try:
line = self.fd.readline()
except AttributeError as e:
raise IOError(repr(e))
if line is None:
log.debug("line returned null from %s" %
(self.hostid))
break
if len(line) == 0:
log.debug("line was zero length from %s" %
(self.hostid))
break
self.__handle_line(line)
except IOError as e:
import traceback
log.debug("logging traceback from %s: \n" %
(self.hostid) + traceback.format_exc())
finally:
try:
self.fd.close()
self.sock.close()
except Exception as e:
pass
if self.on_disconnect:
self.on_disconnect(self)
log.info("disconnected from irc host %s" %
(self.hostid))
if not self.give_up:
time.sleep(30)
log.info('ending irc')
self.give_up = True

1026
daemon/message_channel.py

File diff suppressed because it is too large Load Diff

132
daemon/orderbookwatch.py

@ -0,0 +1,132 @@
#! /usr/bin/env python
from __future__ import absolute_import, print_function
import base64
import pprint
import random
import sqlite3
import sys
import time
import threading
import json
from decimal import InvalidOperation, Decimal
from joinmarketdaemon.protocol import JM_VERSION
from joinmarketdaemon.support import get_log, joinmarket_alert, DUST_THRESHOLD
from joinmarketdaemon.irc import B_PER_SEC
log = get_log()
class JMTakerError(Exception):
pass
class OrderbookWatch(object):
def set_msgchan(self, msgchan):
self.msgchan = msgchan
self.msgchan.register_orderbookwatch_callbacks(self.on_order_seen,
self.on_order_cancel)
self.msgchan.register_channel_callbacks(
self.on_welcome, self.on_set_topic, None, self.on_disconnect,
self.on_nick_leave, None)
self.dblock = threading.Lock()
con = sqlite3.connect(":memory:", check_same_thread=False)
con.row_factory = sqlite3.Row
self.db = con.cursor()
self.db.execute("CREATE TABLE orderbook(counterparty TEXT, "
"oid INTEGER, ordertype TEXT, minsize INTEGER, "
"maxsize INTEGER, txfee INTEGER, cjfee TEXT);")
@staticmethod
def on_set_topic(newtopic):
chunks = newtopic.split('|')
for msg in chunks[1:]:
try:
msg = msg.strip()
params = msg.split(' ')
min_version = int(params[0])
max_version = int(params[1])
alert = msg[msg.index(params[1]) + len(params[1]):].strip()
except ValueError, IndexError:
continue
if min_version < JM_VERSION < max_version:
print('=' * 60)
print('JOINMARKET ALERT')
print(alert)
print('=' * 60)
joinmarket_alert[0] = alert
def on_order_seen(self, counterparty, oid, ordertype, minsize, maxsize,
txfee, cjfee):
try:
self.dblock.acquire(True)
if int(oid) < 0 or int(oid) > sys.maxint:
log.debug("Got invalid order ID: " + oid + " from " +
counterparty)
return (False, [])
# delete orders eagerly, so in case a buggy maker sends an
# invalid offer, we won't accidentally !fill based on the ghost
# of its previous message.
self.db.execute(
("DELETE FROM orderbook WHERE counterparty=? "
"AND oid=?;"), (counterparty, oid))
# now validate the remaining fields
if int(minsize) < 0 or int(minsize) > 21 * 10**14:
log.debug("Got invalid minsize: {} from {}".format(
minsize, counterparty))
return (False, [])
if int(minsize) < DUST_THRESHOLD:
minsize = DUST_THRESHOLD
log.debug("{} has dusty minsize, capping at {}".format(
counterparty, minsize))
# do not pass return, go not drop this otherwise fine offer
if int(maxsize) < 0 or int(maxsize) > 21 * 10**14:
log.debug("Got invalid maxsize: " + maxsize + " from " +
counterparty)
return (False, [])
if int(txfee) < 0:
log.debug("Got invalid txfee: {} from {}".format(txfee,
counterparty))
return (False, [])
if int(minsize) > int(maxsize):
fmt = ("Got minsize bigger than maxsize: {} - {} "
"from {}").format
log.debug(fmt(minsize, maxsize, counterparty))
return (False, [])
if ordertype == 'absoffer' and not isinstance(cjfee, int):
try:
cjfee = int(cjfee)
except ValueError:
log.debug("Got non integer coinjoin fee: " + str(cjfee) +
" for an absoffer from " + counterparty)
return (False, [])
self.db.execute(
'INSERT INTO orderbook VALUES(?, ?, ?, ?, ?, ?, ?);',
(counterparty, oid, ordertype, minsize, maxsize, txfee,
str(Decimal(cjfee)))) # any parseable Decimal is a valid cjfee
except InvalidOperation:
log.debug("Got invalid cjfee: " + cjfee + " from " + counterparty)
except Exception as e:
log.debug("Error parsing order " + oid + " from " + counterparty)
log.debug("Exception was: " + repr(e))
finally:
self.dblock.release()
return (True, [])
def on_order_cancel(self, counterparty, oid):
with self.dblock:
self.db.execute(
("DELETE FROM orderbook WHERE "
"counterparty=? AND oid=?;"), (counterparty, oid))
def on_nick_leave(self, nick):
with self.dblock:
self.db.execute('DELETE FROM orderbook WHERE counterparty=?;',
(nick,))
def on_disconnect(self):
with self.dblock:
self.db.execute('DELETE FROM orderbook;')

31
daemon/protocol.py

@ -0,0 +1,31 @@
#Protocol version
JM_VERSION = 5
#Username on all messagechannels; will be set in MessageChannelCollection
nickname = None
separator = " "
offertypes = {"reloffer": [(int, "oid"), (int, "minsize"), (int, "maxsize"),
(int, "txfee"), (float, "cjfee")],
"absoffer": [(int, "oid"), (int, "minsize"), (int, "maxsize"),
(int, "txfee"), (int, "cjfee")]}
offername_list = offertypes.keys()
ORDER_KEYS = ['counterparty', 'oid', 'ordertype', 'minsize', 'maxsize', 'txfee',
'cjfee']
COMMAND_PREFIX = '!'
JOINMARKET_NICK_HEADER = 'J'
NICK_HASH_LENGTH = 10
NICK_MAX_ENCODED = 14 #comes from base58 expansion; recalculate if above changes
#Lists of valid commands
encrypted_commands = ["auth", "ioauth", "tx", "sig"]
plaintext_commands = ["fill", "error", "pubkey", "orderbook", "push"]
commitment_broadcast_list = ["hp2"]
plaintext_commands += offername_list
plaintext_commands += commitment_broadcast_list
public_commands = commitment_broadcast_list + ["orderbook", "cancel"
] + offername_list
private_commands = encrypted_commands + plaintext_commands

410
daemon/socks.py

@ -0,0 +1,410 @@
"""SocksiPy - Python SOCKS module.
Version 1.00
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
"""
import socket
import struct
import random
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
PROXY_TYPE_HTTP = 3
_defaultproxy = None
_orgsocket = socket.socket
class ProxyError(IOError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class GeneralProxyError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks5AuthError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks5Error(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks4Error(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class HTTPError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
_generalerrors = ("success", "invalid data", "not connected", "not available",
"bad proxy type", "bad input")
_socks5errors = ("succeeded", "general SOCKS server failure",
"connection not allowed by ruleset", "Network unreachable",
"Host unreachable", "Connection refused", "TTL expired",
"Command not supported", "Address type not supported",
"Unknown error")
_socks5autherrors = ("succeeded", "authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password", "unknown error")
_socks4errors = (
"request granted", "request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
"request rejected because the client program and identd report different user-ids",
"unknown error")
def setdefaultproxy(proxytype=None,
addr=None,
port=None,
rdns=True,
username=str(random.randrange(10000000, 99999999)),
password=str(random.randrange(10000000, 99999999))):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
"""
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
def __init__(self,
family=socket.AF_INET,
type=socket.SOCK_STREAM,
proto=0,
_sock=None):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy is not None:
self.__proxy = _defaultproxy
else:
self.__proxy = (None, None, None, None, None, None)
self.__proxysockname = None
self.__proxypeername = None
def __recvall(self, bytes):
"""__recvall(bytes) -> data
Receive EXACTLY the number of bytes requested from the socket.
Blocks until the required number of bytes have been received.
"""
data = ""
while len(data) < bytes:
data = data + self.recv(bytes - len(data))
return data
def setproxy(self,
proxytype=None,
addr=None,
port=None,
rdns=True,
username=None,
password=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be preformed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password)
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
if (self.__proxy[4] is not None) and (self.__proxy[5] is not None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
self.sendall("\x05\x02\x00\x02")
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
self.sendall("\x05\x01\x00")
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
if chosenauth[0] != "\x05":
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
# Check the chosen authentication method
if chosenauth[1] == "\x00":
# No authentication is required
pass
elif chosenauth[1] == "\x02":
# Okay, we need to perform a basic username/password
# authentication.
self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] +
chr(len(self.__proxy[5])) + self.__proxy[5])
authstat = self.__recvall(2)
if authstat[0] != "\x01":
# Bad response
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if authstat[1] != "\x00":
# Authentication failed
self.close()
raise Socks5AuthError, (3, _socks5autherrors[3])
# Authentication succeeded
else:
# Reaching here is always bad
self.close()
if chosenauth[1] == "\xFF":
raise Socks5AuthError((2, _socks5autherrors[2]))
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
req = "\x05\x01\x00"
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
ipaddr = socket.inet_aton(destaddr)
req = req + "\x01" + ipaddr
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
req = req + "\x03" + chr(len(destaddr)) + destaddr
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
req = req + "\x01" + ipaddr
req += struct.pack(">H", destport)
self.sendall(req)
# Get the response
resp = self.__recvall(4)
if resp[0] != "\x05":
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
elif resp[1] != "\x00":
# Connection failed
self.close()
raise Socks5Error(_socks5errors[min(9, ord(resp[1]))])
# Get the bound address/port
elif resp[3] == "\x01":
boundaddr = self.__recvall(4)
elif resp[3] == "\x03":
resp = resp + self.recv(1)
boundaddr = self.__recvall(resp[4])
else:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr is not None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def getproxysockname(self):
"""getsockname() -> address info
Returns the bound IP address and port number at the proxy.
"""
return self.__proxysockname
def getproxypeername(self):
"""getproxypeername() -> address info
Returns the IP and port number of the proxy.
"""
return _orgsocket.getpeername(self)
def getpeername(self):
"""getpeername() -> address info
Returns the IP address and port number of the destination
machine (note: getproxypeername returns the proxy)
"""
return self.__proxypeername
def __negotiatesocks4(self, destaddr, destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
# Check if the destination address provided is an IP address
rmtrslv = False
try:
ipaddr = socket.inet_aton(destaddr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if self.__proxy[3]:
ipaddr = "\x00\x00\x00\x01"
rmtrslv = True
else:
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
# Construct the request packet
req = "\x04\x01" + struct.pack(">H", destport) + ipaddr
# The username parameter is considered userid for SOCKS4
if self.__proxy[4] is not None:
req = req + self.__proxy[4]
req += "\x00"
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if rmtrslv:
req = req + destaddr + "\x00"
self.sendall(req)
# Get the response from the server
resp = self.__recvall(8)
if resp[0] != "\x00":
# Bad data
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if resp[1] != "\x5A":
# Server returned an error
self.close()
if ord(resp[1]) in (91, 92, 93):
self.close()
raise Socks4Error((ord(resp[1]), _socks4errors[ord(resp[1]) -
90]))
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(
">H", resp[2:4])[0])
if rmtrslv is not None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def __negotiatehttp(self, destaddr, destport):
"""__negotiatehttp(self,destaddr,destport)
Negotiates a connection through an HTTP server.
"""
# If we need to resolve locally, we do this now
if not self.__proxy[3]:
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" +
"Host: " + destaddr + "\r\n\r\n")
# We read the response until we get the string "\r\n\r\n"
resp = self.recv(1)
while resp.find("\r\n\r\n") == -1:
resp = resp + self.recv(1)
# We just need the first line to check if the connection
# was successful
statusline = resp.splitlines()[0].split(" ", 2)
if statusline[0] not in ("HTTP/1.0", "HTTP/1.1"):
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
try:
statuscode = int(statusline[1])
except ValueError:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if statuscode != 200:
self.close()
raise HTTPError((statuscode, statusline[2]))
self.__proxysockname = ("0.0.0.0", 0)
self.__proxypeername = (addr, destport)
def connect(self, destpair):
"""connect(self,despair)
Connects to the specified destination through a proxy.
destpar - A tuple of the IP/DNS address and the port number.
(identical to socket's connect).
To select the proxy server use setproxy().
"""
# Do a minimal input check first
if (type(destpair) in
(list, tuple) == False) or (len(destpair) < 2) or (
type(destpair[0]) != str) or (type(destpair[1]) != int):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] is not None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks5(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
if self.__proxy[2] is not None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] is not None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] is None:
_orgsocket.connect(self, (destpair[0], destpair[1]))
else:
raise GeneralProxyError((4, _generalerrors[4]))

318
joinmarketd.py

@ -0,0 +1,318 @@
#! /usr/bin/env python
from __future__ import print_function
import sys
from joinmarketdaemon import (IRCMessageChannel, MessageChannelCollection,
OrderbookWatch, as_init_encryption, init_pubkey,
NaclError, init_keypair, COMMAND_PREFIX, ORDER_KEYS,
NICK_HASH_LENGTH, NICK_MAX_ENCODED, JM_VERSION,
JOINMARKET_NICK_HEADER)
from joinmarketdaemon.commands import *
from twisted.protocols import amp
from twisted.internet import reactor
from twisted.internet.protocol import ServerFactory
from twisted.python.log import startLogging, err
from twisted.python import log
import json
import time
import threading
"""Joinmarket application protocol control flow.
For documentation on protocol (formats, message sequence) see
https://github.com/JoinMarket-Org/JoinMarket-Docs/blob/master/
Joinmarket-messaging-protocol.md
"""
"""
***
API
***
The client-daemon two-way communication is documented in commands.py
"""
class MCThread(threading.Thread):
def __init__(self, mc):
threading.Thread.__init__(self, name='MCThread')
self.mc = mc
self.daemon = True
def run(self):
self.mc.run()
class JMProtocolError(Exception):
pass
class JMDaemonServerProtocol(amp.AMP, OrderbookWatch):
def __init__(self, factory):
self.factory = factory
#Set of messages we can receive from a client:
self.supported_messages = ["JM_INIT", "JM_SETUP", "JM_FILL",
"JM_MAKE_TX", "JM_REQUEST_OFFERS",
"JM_MAKE_TX", "JM_MSGSIGNATURE",
"JM_MSGSIGNATURE_VERIFY", "JM_START_MC"]
self.jm_state = 0
def checkClientResponse(self, response):
"""A generic check of client acceptance; any failure
is considered criticial.
"""
if 'accepted' not in response or not response['accepted']:
reactor.stop()
@JMInit.responder
def on_JM_INIT(self, bcsource, network, irc_configs, minmakers,
maker_timeout_sec):
self.maker_timeout_sec = int(maker_timeout_sec)
self.minmakers = int(minmakers)
irc_configs = json.loads(irc_configs)
mcs = [IRCMessageChannel(c,
daemon=self,
realname='btcint=' + bcsource)
for c in irc_configs]
#(bitcoin) network only referenced in channel name construction
self.network = network
self.mcc = MessageChannelCollection(mcs)
OrderbookWatch.set_msgchan(self, self.mcc)
#register taker-specific msgchan callbacks here
self.mcc.register_taker_callbacks(self.on_error, self.on_pubkey,
self.on_ioauth, self.on_sig)
self.mcc.set_daemon(self)
d = self.callRemote(JMInitProto,
nick_hash_length=NICK_HASH_LENGTH,
nick_max_encoded=NICK_MAX_ENCODED,
joinmarket_nick_header=JOINMARKET_NICK_HEADER,
joinmarket_version=JM_VERSION)
d.addCallback(self.checkClientResponse)
return {'accepted': True}
@JMStartMC.responder
def on_JM_START_MC(self, nick):
"""Starts message channel threads;
JM_UP will be called when the welcome messages are received.
"""
self.init_connections(nick)
return {'accepted': True}
def init_connections(self, nick):
self.jm_state = 0 #uninited
self.mcc.set_nick(nick)
MCThread(self.mcc).start()
def on_welcome(self):
"""Fired when channel indicated state readiness
"""
d = self.callRemote(JMUp)
d.addCallback(self.checkClientResponse)
@JMSetup.responder
def on_JM_SETUP(self, role, n_counterparties):
assert self.jm_state == 0
assert n_counterparties > 1
#TODO consider MAKER role implementation here
assert role == "TAKER"
self.requested_counterparties = n_counterparties
self.crypto_boxes = {}
self.kp = init_keypair()
print("Received setup command")
d = self.callRemote(JMSetupDone)
d.addCallback(self.checkClientResponse)
#Request orderbook here, on explicit setup request from client,
#assumes messagechannels are in "up" state. Orders are read
#in the callback on_order_seen in OrderbookWatch.
self.mcc.pubmsg(COMMAND_PREFIX + "orderbook")
self.jm_state = 1
return {'accepted': True}
@JMRequestOffers.responder
def on_JM_REQUEST_OFFERS(self):
"""Reports the current state of the orderbook.
This call is stateless."""
rows = self.db.execute('SELECT * FROM orderbook;').fetchall()
self.orderbook = [dict([(k, o[k]) for k in ORDER_KEYS]) for o in rows]
log.msg("About to send orderbook of size: " + str(len(self.orderbook)))
string_orderbook = json.dumps(self.orderbook)
d = self.callRemote(JMOffers,
orderbook=string_orderbook)
d.addCallback(self.checkClientResponse)
return {'accepted': True}
@JMFill.responder
def on_JM_FILL(self, amount, commitment, revelation, filled_offers):
if not (self.jm_state == 1 and isinstance(amount, int) and amount >=0):
return {'accepted': False}
self.cjamount = amount
self.commitment = commitment
self.revelation = revelation
#Reset utxo data to null for this new transaction
self.ioauth_data = {}
self.active_orders = json.loads(filled_offers)
for nick, offer_dict in self.active_orders.iteritems():
offer_fill_msg = " ".join([str(offer_dict["oid"]), str(amount), str(
self.kp.hex_pk()), str(commitment)])
self.mcc.prepare_privmsg(nick, "fill", offer_fill_msg)
self.first_stage_timer = time.time()
self.jm_state = 2
return {'accepted': True}
def on_pubkey(self, nick, maker_pk):
"""This is handled locally in the daemon; set up e2e
encrypted messaging with this counterparty
"""
if nick not in self.active_orders.keys():
log.msg("Counterparty not part of this transaction. Ignoring")
return
try:
self.crypto_boxes[nick] = [maker_pk, as_init_encryption(
self.kp, init_pubkey(maker_pk))]
except NaclError as e:
print("Unable to setup crypto box with " + nick + ": " + repr(e))
self.mcc.send_error(nick, "invalid nacl pubkey: " + maker_pk)
return
self.mcc.prepare_privmsg(nick, "auth", str(self.revelation))
def on_ioauth(self, nick, utxo_list, auth_pub, cj_addr, change_addr,
btc_sig):
"""Passes through to Taker the information from counterparties once
they've all been received; note that we must also pass back the maker_pk
so it can be verified against the btc-sigs for anti-MITM
"""
def respond(accepted):
d = self.callRemote(JMFillResponse,
success=accepted,
ioauth_data = json.dumps(self.ioauth_data))
if not accepted:
#Client simply accepts failure TODO
d.addCallback(self.checkClientResponse)
else:
#Act differently if *we* provided utxos, but
#client does not accept for some reason
d.addCallback(self.checkUtxosAccepted)
if nick not in self.active_orders.keys():
print("Got an unexpected ioauth from nick: " + str(nick))
return
self.ioauth_data[nick] = [utxo_list, auth_pub, cj_addr, change_addr,
btc_sig, self.crypto_boxes[nick][0]]
if self.ioauth_data.keys() == self.active_orders.keys():
respond(True)
else:
time_taken = time.time() - self.first_stage_timer
#if the timer has run out, either pass through if we have
#at least minmakers, else return a failure condition
if time_taken > self.maker_timeout_sec:
if len(self.ioauth_data.keys()) >= self.minmakers:
respond(True)
else:
respond(False)
def checkUtxosAccepted(self, accepted):
if not accepted:
log.msg("Taker rejected utxos provided; resetting.")
#TODO create re-set function to start again
@JMMakeTx.responder
def on_JM_MAKE_TX(self, nick_list, txhex):
if not self.jm_state == 2:
return {'accepted': False}
nick_list = json.loads(nick_list)
self.mcc.send_tx(nick_list, txhex)
return {'accepted': True}
def on_sig(self, nick, sig):
"""Pass signature through to Taker.
"""
d = self.callRemote(JMSigReceived,
nick=nick,
sig=sig)
d.addCallback(self.checkClientResponse)
"""The following functions handle requests and responses
from client for messaging signing and verifying.
"""
def request_signed_message(self, nick, cmd, msg, msg_to_be_signed, hostid):
"""The daemon passes the nick and cmd fields
to the client so it can be echoed back to the privmsg
after return (with signature); note that the cmd is already
inside "msg" after having been parsed in MessageChannel; this
duplication is so that the client does not need to know the
message syntax.
"""
d = self.callRemote(JMRequestMsgSig,
nick=str(nick),
cmd=str(cmd),
msg=str(msg),
msg_to_be_signed=str(msg_to_be_signed),
hostid=str(hostid))
d.addCallback(self.checkClientResponse)
def request_signature_verify(self, msg, fullmsg, sig, pubkey, nick, hashlen,
max_encoded, hostid):
d = self.callRemote(JMRequestMsgSigVerify,
msg=msg,
fullmsg=fullmsg,
sig=sig,
pubkey=pubkey,
nick=nick,
hashlen=hashlen,
max_encoded=max_encoded,
hostid=hostid)
d.addCallback(self.checkClientResponse)
@JMMsgSignature.responder
def on_JM_MSGSIGNATURE(self, nick, cmd, msg_to_return, hostid):
self.mcc.privmsg(nick, cmd, msg_to_return, mc=hostid)
return {'accepted': True}
@JMMsgSignatureVerify.responder
def on_JM_MSGSIGNATURE_VERIFY(self, verif_result, nick, fullmsg, hostid):
if not verif_result:
log.msg("Verification failed for nick: " + str(nick))
else:
self.mcc.on_verified_privmsg(nick, fullmsg, hostid)
return {'accepted': True}
def get_crypto_box_from_nick(self, nick):
if nick in self.crypto_boxes and self.crypto_boxes[nick] != None:
return self.crypto_boxes[nick][1] # libsodium encryption object
else:
log.msg('something wrong, no crypto object, nick=' + nick +
', message will be dropped')
return None
def on_error(self):
log.msg("Unimplemented on_error")
def mc_shutdown(self):
log.msg("Message channels shut down in proto")
self.mcc.shutdown()
class JMDaemonServerProtocolFactory(ServerFactory):
protocol = JMDaemonServerProtocol
def buildProtocol(self, addr):
return JMDaemonServerProtocol(self)
def startup_joinmarketd(port, finalizer=None, finalizer_args=None):
"""Start event loop for joinmarket daemon here.
Args:
port : port over which to serve the daemon
finalizer: a function which is called after the reactor has shut down.
finalizer_args : arguments to finalizer function.
"""
log.startLogging(sys.stdout)
factory = JMDaemonServerProtocolFactory()
reactor.listenTCP(port, factory)
if finalizer:
reactor.addSystemEventTrigger("after", "shutdown", finalizer,
finalizer_args)
reactor.run()
if __name__ == "__main__":
port = int(sys.argv[1])
startup_joinmarketd(port)

201
sendpayment.py

@ -0,0 +1,201 @@
#! /usr/bin/env python
from __future__ import absolute_import, print_function
"""
A sample implementation of a single coinjoin script,
adapted from `sendpayment.py` in Joinmarket-Org/joinmarket.
More complex applications can extend from Taker and add
more features, such as repeated joins. This will also allow
easier coding of non-CLI interfaces.
Other potential customisations of the Taker object instantiation
include:
external_addr=None implies joining to another mixdepth
in the same wallet.
order_chooser can be set to a different custom function that selects
counterparty offers according to different rules.
"""
import random
import sys
import threading
from optparse import OptionParser
import time
from joinmarketclient import (Taker, load_program_config,
JMTakerClientProtocolFactory, start_reactor,
validate_address, jm_single, get_log,
choose_orders, choose_sweep_orders, pick_order,
cheapest_order_choose, weighted_order_choose,
debug_dump_object, Wallet, BitcoinCoreWallet,
estimate_tx_fee)
log = get_log()
def check_high_fee(total_fee_pc):
WARNING_THRESHOLD = 0.02 # 2%
if total_fee_pc > WARNING_THRESHOLD:
print('\n'.join(['=' * 60] * 3))
print('WARNING ' * 6)
print('\n'.join(['=' * 60] * 1))
print('OFFERED COINJOIN FEE IS UNUSUALLY HIGH. DOUBLE/TRIPLE CHECK.')
print('\n'.join(['=' * 60] * 1))
print('WARNING ' * 6)
print('\n'.join(['=' * 60] * 3))
def main():
parser = OptionParser(
usage=
'usage: %prog [options] [wallet file / fromaccount] [amount] [destaddr]',
description='Sends a single payment from a given mixing depth of your '
+
'wallet to an given address using coinjoin and then switches off. Also sends from bitcoinqt. '
+
'Setting amount to zero will do a sweep, where the entire mix depth is emptied')
parser.add_option(
'-f',
'--txfee',
action='store',
type='int',
dest='txfee',
default=-1,
help=
'number of satoshis per participant to use as the initial estimate ' +
'for the total transaction fee, default=dynamically estimated, note that this is adjusted '
+
'based on the estimated fee calculated after tx construction, based on '
+ 'policy set in joinmarket.cfg.')
parser.add_option(
'-w',
'--wait-time',
action='store',
type='float',
dest='waittime',
help='wait time in seconds to allow orders to arrive, default=15',
default=15)
parser.add_option(
'-N',
'--makercount',
action='store',
type='int',
dest='makercount',
help='how many makers to coinjoin with, default random from 4 to 6',
default=random.randint(4, 6))
parser.add_option('-p',
'--port',
type='int',
dest='daemonport',
help='port on which joinmarketd is running',
default='12345')
parser.add_option(
'-C',
'--choose-cheapest',
action='store_true',
dest='choosecheapest',
default=False,
help=
'override weightened offers picking and choose cheapest. this might reduce anonymity.')
parser.add_option(
'-P',
'--pick-orders',
action='store_true',
dest='pickorders',
default=False,
help=
'manually pick which orders to take. doesn\'t work while sweeping.')
parser.add_option('-m',
'--mixdepth',
action='store',
type='int',
dest='mixdepth',
help='mixing depth to spend from, default=0',
default=0)
parser.add_option('-a',
'--amtmixdepths',
action='store',
type='int',
dest='amtmixdepths',
help='number of mixdepths in wallet, default 5',
default=5)
parser.add_option('-g',
'--gap-limit',
type="int",
action='store',
dest='gaplimit',
help='gap limit for wallet, default=6',
default=6)
parser.add_option('--yes',
action='store_true',
dest='answeryes',
default=False,
help='answer yes to everything')
parser.add_option(
'--rpcwallet',
action='store_true',
dest='userpcwallet',
default=False,
help=('Use the Bitcoin Core wallet through json rpc, instead '
'of the internal joinmarket wallet. Requires '
'blockchain_source=json-rpc'))
(options, args) = parser.parse_args()
if len(args) < 3:
parser.error('Needs a wallet, amount and destination address')
sys.exit(0)
wallet_name = args[0]
amount = int(args[1])
destaddr = args[2]
load_program_config()
jm_single().maker_timeout_sec = 5
addr_valid, errormsg = validate_address(destaddr)
if not addr_valid:
print('ERROR: Address invalid. ' + errormsg)
return
chooseOrdersFunc = None
if options.pickorders:
chooseOrdersFunc = pick_order
if amount == 0:
print('WARNING: You may have to pick offers multiple times')
print('WARNING: due to manual offer picking while sweeping')
elif options.choosecheapest:
chooseOrdersFunc = cheapest_order_choose
else: # choose randomly (weighted)
chooseOrdersFunc = weighted_order_choose
# Dynamically estimate a realistic fee if it currently is the default value.
# At this point we do not know even the number of our own inputs, so
# we guess conservatively with 2 inputs and 2 outputs each
if options.txfee == -1:
options.txfee = max(options.txfee, estimate_tx_fee(2, 2))
log.debug("Estimated miner/tx fee for each cj participant: " + str(
options.txfee))
assert (options.txfee >= 0)
log.debug('starting sendpayment')
if not options.userpcwallet:
wallet = Wallet(wallet_name, options.amtmixdepths, options.gaplimit)
else:
wallet = BitcoinCoreWallet(fromaccount=wallet_name)
jm_single().bc_interface.sync_wallet(wallet)
taker = Taker(wallet,
options.mixdepth,
amount,
options.makercount,
order_chooser=chooseOrdersFunc,
external_addr=destaddr)
clientfactory = JMTakerClientProtocolFactory(taker)
start_reactor("localhost", options.daemonport, clientfactory)
if __name__ == "__main__":
main()
print('done')
Loading…
Cancel
Save