|
| 1 | +import hashlib |
| 2 | +import os.path |
| 3 | +from bisect import bisect_right,bisect_left |
| 4 | + |
| 5 | +wordlist_english=list(open(os.path.join(os.path.dirpath(__file__),'english.txt'),'r')) |
| 6 | + |
| 7 | +def entropy_cs(entbytes): |
| 8 | + entropy_size=8*len(entbytes) |
| 9 | + checksum_size=entropy_size//32 |
| 10 | + hd=hashlib.sha256(entbytes).hexdigest() |
| 11 | + csint=int(hd,16) >> (256-checksum_size) |
| 12 | + return csint,checksum_size |
| 13 | + |
| 14 | +def entropy_to_words(entbytes,wordlist=wordlist_english): |
| 15 | + entropy_size=8*len(entbytes) |
| 16 | + csint,checksum_size = entropy_cs(entbytes) |
| 17 | + |
| 18 | + mint=(entint << checksum_size) | csint |
| 19 | + mnemonic_size=(entropy_size+checksum_size)/11 |
| 20 | + |
| 21 | + backwords=[wordlist[(seedint >> (11*x)) & 0x7FF].strip() for x in range(seedsize)] |
| 22 | + return reversed(backwords) |
| 23 | + |
| 24 | + |
| 25 | +#def words_to_seed(words,wordlist=wordlist_english): |
| 26 | + |
| 27 | + |
| 28 | +def words_bisect(word,wordlist=wordlist_english): |
| 29 | + lo=bisect_left(wordlist,word) |
| 30 | + hi=lo |
| 31 | + lw=len(word) |
| 32 | + while(wordlist[hi][:lw]==word): |
| 33 | + hi+=1 |
| 34 | + |
| 35 | + return lo,hi |
| 36 | + |
| 37 | +def words_split(wordstr,wordlist=wordlist_english): |
| 38 | + def popword(wordstr,wordlist): |
| 39 | + for fwl in range(1,9): |
| 40 | + w=wordstr[:fwl] |
| 41 | + lo,hi=words_bisect(w,wordlist) |
| 42 | + if(hi-lo == 1): |
| 43 | + return w,wordstr[fwl:] |
| 44 | + wordlist=wordlist[lo:hi] |
| 45 | + raise Exception("Wordstr %s not found in list" %(w)) |
| 46 | + |
| 47 | + words=[] |
| 48 | + tail=wordstr |
| 49 | + while(len(tail)): |
| 50 | + head,tail=popword(tail,wordlist) |
| 51 | + words.append(head) |
| 52 | + return words |
| 53 | + |
| 54 | +def words_verify(words): |
| 55 | + pass |
| 56 | + #if words in string, split them first. |
| 57 | + |
0 commit comments