forked from mratsim/Arraymancer
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ex06_shakespeare_generator.nim
811 lines (697 loc) · 27.2 KB
/
ex06_shakespeare_generator.nim
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
# Create a Shakespeare AI
#
# Inspired by Andrej Karpathy http://karpathy.github.io/2015/05/21/rnn-effectiveness/
# and https://github.com/karpathy/char-rnn
# This can learn anything text based, including code and LaTe paper ;).
# Note: training is quite slow on CPU, 30 min for my i5-5257U (2.7GHz dual-core Broadwell from 2015)
#
# Also parallelizing via OpenMP will slow down computation so don't use it.
# there is probably false sharing in the GRU layer, reshape layer or flatten_idx from Embedding.
# Remember that the network
# - must learn, not to use !?;. everywhere
# - must learn how to use spaces and new lines
# - must learn capital letters
# - must learn that character form words
# TODO: save/reload trained weights
import
streams, os, random, times, strformat, algorithm, sequtils, tables,
../src/arraymancer
# ################################################################
#
# Environment constants
#
# ################################################################
# Printable chars - from Python: import string; string.printable
const IxToChar = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!\"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~ \t\n\r\x0b\x0c"
const UnkCharIx = IxToChar.len # Unknown characters will be replaced by this
type PrintableIdx = uint8
func genCharToIx(): Table[char, PrintableIdx] =
result = initTable[char, PrintableIdx]()
for idx, ch in IxToChar:
result[ch] = PrintableIdx idx
const
CharToIx = genCharToIx()
VocabSize = IxToChar.len + 1 # Cardinality of the set of PrintableChars. There is an extra 1 for the "UnknownChar"
BatchSize = 100
Epochs = 2000 # This take a long long time, I'm not even sure it converges
Layers = 2
HiddenSize = 100
LearningRate = 0.01'f32
EmbedSize = 100
SeqLen = 200 # Characters sequences will be split in chunks of 200
StatusReport = 200 # Report training status every x batches
# ################################################################
#
# Helpers
#
# ################################################################
func strToTensor(str: string|TaintedString): Tensor[PrintableIdx] =
result = newTensor[PrintableIdx](str.len)
# For each x in result, map the corresponding char index
for i, val in result.menumerate:
if str[i] in CharToIx:
val = CharToIx[str[i]]
else:
# otherwise skip - this will be a padding index
val = UnkCharIx
# Weighted random sampling / multinomial sampling
# Note: during text generation we only work with
# a batch size of 1 so for simplicity we use
# seq and openarrays instead of tensors
func cumsum[T](x: openarray[T]): seq[T] =
## Cumulative sum of a 1D array/seq
#
# Note: this will have a proper and faster implementation for tensors in the future
result = newSeq[T](x.len)
result[0] = x[0]
for i in 1 ..< x.len:
result[i] = x[i] + result[i-1]
proc searchsorted[T](x: openarray[T], value: T, leftSide: static bool = true): int =
## Returns the index corresponding to where the input value would be inserted at.
## Input must be a sorted 1D seq/array.
## In case of exact match, leftSide indicates if we put the value
## on the left or the right of the exact match.
##
## This is equivalent to Numpy and Tensorflow searchsorted
## Example
## [0, 3, 9, 9, 10] with value 4 will return 2
## [1, 2, 3, 4, 5] 2 will return 1 if left side, 2 otherwise
#
# Note: this will have a proper and faster implementation for tensors in the future
when leftSide:
result = x.lowerBound(value)
else:
result = x.upperBound(value)
proc sample[T](probs: Tensor[T]): int =
## Returns a weighted random sample (multinomial sampling)
## from a 1D Tensor of probabilities.
## Probabilities must sum to 1 (normalised)
## For example:
## - a Tensor of [0.1, 0.4, 0.2, 0.3]
## will return 0 in 10% of cases
## 1 in 40% of cases
## 2 in 20% of cases
## 3 in 30% of cases
assert probs.rank == 1
assert probs.is_C_contiguous
assert probs.sum - 1.T < T(1e-5)
# We use a separate RNG for our sampling
var rng {.global.} = initRand(0xDEADBEEF)
# We pass our 1D Tensor as an openarray to avoid copies
let p = cast[ptr UncheckedArray[T]](probs.get_data_ptr)
# Get a sample from an uniform distribution
let u = T(rng.rand(1.0))
# Get the Cumulative Distribution Function of our probabilities
let cdf = cumsum p.toOpenArray(0, probs.shape[0] - 1)
result = cdf.searchsorted(u, leftSide = false)
# ################################################################
#
# Neural network model
#
# ################################################################
# Create our model and the weights to train
#
# Due too much convenience, the neural net declaration mini-language
# used in examples 2 to 5
# only accepts Variable[Tensor[float32]] (for a Tensor[float32] context)
# but we also need a Tensor[char] input for embedding.
# So much for trying to be too clever ¯\_(ツ)_/¯.
#
# Furthermore, you don't have flexibility in the return variables
# while we need to also return the hidden state of our text generation model.
#
# So we need to do everything manually...
# We use a classic Encoder-Decoder architecture, with text encoded into an internal representation
# and then decoded back into text.
# So we need to train the encoder, the internal representation and the decoder.
type
## The following is normally unnecessary when using the NN mini-lang
LinearLayer[TT] = object
weight: Variable[TT]
bias: Variable[TT]
GRULayer[TT] = object
W3s0, W3sN: Variable[TT]
U3s: Variable[TT]
bW3s, bU3s: Variable[TT]
EmbeddingLayer[TT] = object
weight: Variable[TT]
ShakespeareNet[TT] = object
# Embedding weight = Encoder
encoder: EmbeddingLayer[TT]
# GRU RNN = Internal representation
gru: GRULayer[TT]
# Linear layer weight and bias = Decoder
decoder: LinearLayer[TT]
template weightInit(shape: varargs[int], init_kind: untyped): Variable =
## Even though we need to do the initialisation manually
## let's not repeat ourself too much.
ctx.variable(
init_kind(shape, float32),
requires_grad = true
)
proc newShakespeareNet[TT](ctx: Context[TT]): ShakespeareNet[TT] =
## Initialise a model with random weights.
## Normally this is done for you with the `network` macro
# Embedding layer
# Input: [SeqLen, BatchSize, VocabSize]
# Output: [SeqLen, BatchSize, EmbedSize]
result.encoder.weight = ctx.variable(
# initialisation bench https://arxiv.org/pdf/1711.09160.pdf
# Convergence is **VERY** sensitive, I can't reproduce the paper.
# Best in our case is mean = 0, std = 1.
randomNormalTensor(VocabSize, EmbedSize, 0'f32, 1'f32),
requires_grad = true
)
# Fill the padding/unknown character mapping with 0
result.encoder.weight.value[UnkCharIx, _] = 0
# GRU layer
# Input: [SeqLen, BatchSize, EmbedSize]
# Hidden0: [Layers, BatchSize, HiddenSize]
#
# Output: [SeqLen, BatchSize, HiddenSize]
# HiddenN: [Layers, BatchSize, HiddenSize]
# GRU have 5 weights/biases that can be trained.
# This initialisation is normally hidden from you.
result.gru.W3s0 = weightInit( 3 * HiddenSize, EmbedSize, xavier_uniform)
result.gru.W3sN = weightInit(Layers - 1, 3 * HiddenSize, HiddenSize, xavier_uniform)
result.gru.U3s = weightInit( Layers, 3 * HiddenSize, HiddenSize, yann_normal)
result.gru.bW3s = ctx.variable(zeros[float32](Layers, 1, 3 * HiddenSize), requires_grad = true)
result.gru.bU3s = ctx.variable(zeros[float32](Layers, 1, 3 * HiddenSize), requires_grad = true)
# Linear layer
# Input: [BatchSize, HiddenSize]
# Output: [BatchSize, VocabSize]
result.decoder.weight = weightInit(VocabSize, HiddenSize, kaiming_normal)
result.decoder.bias = ctx.variable(zeros[float32](1, VocabSize), requires_grad = true)
# Some wrappers to pass the layer weights
proc encode[TT](model: ShakespeareNet[TT], x: Tensor[PrintableIdx]): Variable[TT] =
embedding(x, model.encoder.weight, padding_idx = UnkCharIx)
proc gru_forward(model: ShakespeareNet, x, hidden0: Variable): tuple[output, hiddenN: Variable] =
gru(
x, hidden0,
model.gru.W3s0, model.gru.W3sN,
model.gru.U3s,
model.gru.bW3s, model.gru.bU3s
)
proc decode(model: ShakespeareNet, x: Variable): Variable =
linear(x, model.decoder.weight, model.decoder.bias)
proc forward[TT](
model: ShakespeareNet[TT],
input: Tensor[PrintableIdx],
hidden0: Variable[TT]
): tuple[output, hidden: Variable[TT]] =
let encoded = model.encode(input)
let (output, hiddenN) = model.gru_forward(encoded, hidden0)
# result.output is of shape [Sequence, BatchSize, HiddenSize]
# In our case the sequence is 1 so we can simply flatten
let flattened = output.reshape(output.value.shape[1], HiddenSize)
result.output = model.decode(flattened)
result.hidden = hiddenN
# ################################################################
#
# Training
#
# ################################################################
proc gen_training_set(
data: Tensor[PrintableIdx],
seq_len, batch_size: int,
rng: var Rand
): tuple[input, target: Tensor[PrintableIdx]] =
## Generate a set of input sequences of length `seq_len`
## and the immediate following `seq_len` characters to predict
## Sequence are extracted randomly from the whole text.
## i.e. If we have ABCDEF input data
## we can have ABC input
## and BCD target
result.input = newTensor[PrintableIdx](seq_len, batch_size)
result.target = newTensor[PrintableIdx](seq_len, batch_size)
let length = data.shape[0]
for batch_id in 0 ..< batch_size:
let start_idx = rng.rand(0 ..< (length - seq_len))
let end_idx = start_idx + seq_len + 1
result.input[_, batch_id] = data[start_idx ..< end_idx - 1]
result.target[_, batch_id] = data[start_idx + 1 ..< end_idx]
proc train[TT](
ctx: Context[TT],
model: ShakespeareNet[TT],
optimiser: var Optimizer[TT],
input, target: Tensor[PrintableIdx]): float32 =
## Train a model with an input and the corresponding characters to predict.
## Return the loss after the training session
let seq_len = input.shape[0]
var hidden = ctx.variable zeros[float32](Layers, BatchSize, HiddenSize)
# We will cumulate the loss on the whole seq before backpropping at once.
var seq_loss = ctx.variable(zeros[float32](1), requires_grad = true)
for char_pos in 0 ..< seq_len:
var output: Variable[TT]
(output, hidden) = model.forward(input[char_pos, _], hidden)
let batch_loss = output.sparse_softmax_cross_entropy(target[char_pos, _].squeeze(0))
seq_loss = seq_loss + batch_loss
seq_loss.backprop()
optimiser.update()
result = seq_loss.value[0] / seq_len.float32
# ################################################################
#
# Text generator
#
# ################################################################
proc gen_text[TT](
ctx: Context[TT],
model: ShakespeareNet[TT],
seed_chars = "Wh", # Why, What, Who ...
seq_len = SeqLen,
temperature = 0.8'f32
): string =
## Inputs:
## - Model: the trained model
## - seed_chars: A string to initialise the generator state and get it running
## - seq_len: Generation are done by chunk of `seq_len` length
## - temperature: The conservative <--> diversity scale of the generator.
## Value between 0 and 1, near 0 it will be conservative,
## near 1 it will take liberties but make more mistakes.
ctx.no_grad_mode:
var
hidden = ctx.variable zeros[float32](Layers, 1, HiddenSize) # batch_size is now 1
let primer = seed_chars.strToTensor().unsqueeze(1) # Shape [seq_len, 1]
# Create a consistent hidden state
for char_pos in 0 ..< primer.shape[0] - 1:
var (_, hidden) = model.forward(primer[char_pos, _], hidden)
result = seed_chars
# And start from the last char!
var input = primer[^1, _]
var output: Variable[TT]
for _ in 0 ..< seq_len:
(output, hidden) = model.forward(input, hidden)
# output is of shape [BatchSize, VocabSize] with BatchSize = 1
# Go back in the tensor domain
var preds = output.value
# We scale by the temperature first.
preds /.= temperature
# Get a probability distribution.
let probs = preds.softmax().squeeze(0)
# Sample and append to the generated chars
let ch_ix = probs.sample().PrintableIdx
result &= IxToChar[ch_ix]
# Next char
input = newTensor[PrintableIdx](1, 1)
input[0, 0] = ch_ix
# ################################################################
#
# User interaction
#
# ################################################################
proc main() =
# Parse the input file
let filePath = paramStr(1).string
let txt_raw = readFile(filePath)
echo "Checking the first hundred characters of your file"
echo txt_raw[0 .. 100]
echo "\n####\nStarting training\n"
# For our need in gen_training_set, we reshape it from [nb_chars] to [nb_chars, 1]
let txt = txt_raw.strToTensor.unsqueeze(1)
# Make the results reproducible
randomize(0xDEADBEEF) # Changing that will change the weight initialisation
# Create our autograd context that will track deep learning operations applied to tensors.
let ctx = newContext Tensor[float32]
# Build our model and initialize its weights
let model = ctx.newShakespeareNet()
# Optimizer
# let optim = model.optimizerSGD(learning_rate = LearningRate)
var optim = model.optimizerAdam(learning_rate = LearningRate)
# We use a different RNG for seq split
var split_rng = initRand(42)
# Start our time counter
let start = epochTime()
for epoch in 0 ..< Epochs:
let (input, target) = gen_training_set(txt, SeqLen, BatchSize, split_rng)
let loss = ctx.train(model, optim, input, target)
if epoch mod StatusReport == 0:
let elapsed = epochTime() - start
echo &"\n####\nTime: {elapsed:>4.4f} s, Epoch: {epoch}/{Epochs}, Loss: {loss:>2.4f}"
echo "Sample: "
echo ctx.gen_text(model, seq_len = 100)
echo "\n##########\nTraining end. Generating 4000 characters Shakespeare masterpiece in 3. 2. 1...\n\n"
echo ctx.gen_text(model, seq_len = 4000)
main()
# ###########################################################################################
#
# Text generation - Shakespeare
#
# ###########################################################################################
# $ ./build/ex06 examples/ex06_shakespeare_input.txt
# Checking the first hundred characters of your file
# First Citizen:
# Before we proceed any further, hear me speak.
#
# All:
# Speak, speak.
#
# First Citizen:
# You
# #################
# Starting training
# ####
# Time: 0.9528 s, Epoch: 0/2000, Loss: 4.6075
# Sample:
# Whxpq[<],@
# \;d@JmO.JQg-DN!e7tUXO{D(PftMkX7
# Hhz;
# dN<w
# {9Z<&}Sw/Y\\6vE/>ISOUF
# URfPy=>&>z`
# ####
# Time: 162.7714 s, Epoch: 200/2000, Loss: 1.5452
# Sample:
# Whe coman:
# You will occse is not
# this bright in the deny,
# Doth
# What, he do not on the malding wouth th
# ####
# Time: 318.5498 s, Epoch: 400/2000, Loss: 1.4075
# Sample:
# Whis fortunable face,
# And specians for the thought to beeF the air are to your jat,
# To must be in that
# ####
# Time: 483.7722 s, Epoch: 600/2000, Loss: 1.4117
# Sample:
# Whfor as to the soons
# Of wish'd us o' this banes.
#
# KING HENRY VI:
# So places you do not sweet heart, bi
# ####
# Time: 644.9014 s, Epoch: 800/2000, Loss: 1.3998
# Sample:
# Whee, hence is bastard repose where and
# grie for likelical o'er his stating.
#
# ANGELO:
# O comfort world:
# ####
# Time: 812.3859 s, Epoch: 1000/2000, Loss: 1.3498
# Sample:
# Whall foectiove of Lord:
# Why Hastand Boisely.
# First Citizen:
# Good-shappets and all the secares Homedi
# ####
# Time: 969.4953 s, Epoch: 1200/2000, Loss: 1.3605
# Sample:
# Whou judgty injurity sorrow's quarrel conmioner?
#
# BAPTISTA:
# No fetcom up, I say with one more of time
# ####
# Time: 1138.4985 s, Epoch: 1400/2000, Loss: 1.3145
# Sample:
# Whe sweet Citying
# A bloody though yourson to the Duke of Hereford are:
# My life in Dost to be so on? He
# ####
# Time: 1303.5912 s, Epoch: 1600/2000, Loss: 1.3774
# Sample:
# Whemselves,
# And hates in a accides whilst my state,
# She dival wrough not unto this, see to your lander
# ####
# Time: 1470.3374 s, Epoch: 1800/2000, Loss: 1.3561
# Sample:
# Wh your banished, after but the only ignorland.
# O, must it close out lies
# To courtious are quiet upon,
# ##########
# Training end. Generating 4000 characters Shakespeare masterpiece in 3. 2. 1...
# Whter!
# Take's servant seal'd, making uponweed but rascally guess-boot,
# Bare them be that been all ingal to me;
# Your play to the see's wife the wrong-pars
# With child of queer wretchless dreadful cold
# Cursters will how your part? I prince!
# This is time not in a without a tands:
# You are but foul to this.
# I talk and fellows break my revenges, so, and of the hisod
# As you lords them or trues salt of the poort.
#
# ROMEO:
# Thou hast facted to keep thee, and am speak
# Of them; she's murder'd of your galla?
#
# ANTES:
# Nay, I hear i' the day, bie in half exorcheqous again.
# Cockin Tinved: I is wont? Who be youth friends
# In our beauty of one raised me in all me;
# This will recour castle appelied is:
# I thank you, lords.
# Who, I have not offer, the shipp'd, shalt it is Isabels
# We will be with my keepons of your witfers.
# I was as you have perfited to give car.
#
# SICINE:
# In a sisterexber his record to my turn
# Made you dishonour's, if they have so yean
# Reportistiful viel offs, which we will prayed
# By merry the nightly to find them:
# The fiery to: and she double last speak it,
# For I will resian, he, mark for the air:
# O did thy mustable lodge! Nen't, my mosts!
# I greet before,--hath age-tinent or breath?
# I would your firms it be new-was 'scape. Is he shall choice,
# Were our husband, in what here twenties and forly,
# Althess to bries are time and senses, and dead-hear themselves
# Having, and this brother is they had'd is; I have a captive:
# My grains! a scarl doing of true forth, some trutis
# As Paduition, by this till us, as you teever
# Whething those baintious plague honour of gentleman,
# Through God lies,
# conunsel, to dishanging can for that men will well were my rasped me
# As well'd as the way off than her wairs with Lancaster show.
# Ah, will you forgot, and good lies of woman
# With a
# feshie:
# Good my Lord.
#
# AUTOLYCUS:
# Whit!
# Grave ta'en my lord, I'ld their names. The are mored of sorrow hath those
# soon weep'st his eyes. My horrcowns, bone, I kindness:
# How idle were which mean nothing cannot weep
# To rescockingly that hasting the sorrow,
# A good to grow'd of our hate how--
# Hear thee your tempest provided: I never confirm,
# Let's a brackful wife calms; they are instyef,
# Shall make thee, but my love.
#
# LADY ANNE:
# Methinks to him:
# But O, have it become ingly stand; think,
# And told the sringer'd againny, Pito:
# Ay, sir; answer'd awe! methink-'Ge is good hour!
# I pray you casquen not hear my form.
# Your unmanding them friends and barth halber,
# More words should not; and to a daughter'd and poor strop'd
# By one as we prove a cursed would not now:
# For thus in a flate death the heaven'd:
# And lies before I hapk or were.
#
# Nurse:
# Fearlwellare, confiarly Marciusbson,
# Were I how stop poiring to no more,
# To worser body to me and die clots, and out
# Their correction defimbry's truth.
#
# BRUTUS:
# Prother to be deadly of gold to be yet,
# Witholesfair than your complished, thus
# wearing triumph that live thyse toes a noble queen:
# I will yet, let him friends to given: take all
# Clease them a slain: our hours and saw Richmes,
# 'Foren thou straight whet it for your treis.
# First is, for you to cousosa thus I'll make weed.
#
# QUEEN:
# I thrive, and how all thy comes?
#
# PRINCE EDWARD:
# Why, the day of all spoil'd nor unsure?
# Come, but never my love is mine,
# To she he himself prevone one it eag.
# Holdis true, bid got I am will not to titteat?
#
# SICINIUS:
# Consign nows this,
# My turns and dead before they-that was me to thy deat?
#
# CORIOLANUS:
# Even earth,
# Your churchister of Romeo, and grace is honest
# and mine envyou.
#
# DUCHESS OF YORK:
# Stand doth ceasians of Edward is time
# Of those would hence I have stopp'd;
# That is this parlest for all time and that eyes
# -adey is remain twine, that can yield
# Have I cursed and were they shouldst fire; I
# privile to thy fair Richard quietlious.
#
# LADY CAPULEL:
# No, but some bebarduched fight the so?
# If I may shake one will't not find him be souls
# They have you inkfender in death to give:
# Soft! hast here and sister of yourmer shuts
# Yet be it strike deabe; thy sures the while.
#
# WARWICK:
# ###########################################################################################
#
# Text generation - Pride and Prejudice, Jane Austen
#
# ###########################################################################################
# $ ./build/ex06 build/pride_and_prejudice.txt
# Checking the first hundred characters of your file
# PRIDE AND PREJUDICE
#
# By Jane Austen
#
#
#
# Chapter 1
#
#
# It is a truth universally acknowledged, that a sin
# ####
# Starting training
# ####
# Time: 0.8692 s, Epoch: 0/2000, Loss: 4.6137
# Sample:
# A8T+ ^Cyvd&Ep<"e8tVXO{C(PftLlY7
# ^=d[KmP.IQg,DN
# {9!=&~Sw/Y]]6uD/?HSOUEVSgPy=>&?y{ Hiy;
# ####
# Time: 153.5895 s, Epoch: 200/2000, Loss: 1.3105
# Sample:
# Whereince you trood object. She Elizabeth; she gratise her Lyday's like no;
# in the manies.
#
#
#
# Ve
# ####
# Time: 301.1851 s, Epoch: 400/2000, Loss: 1.2050
# Sample:
# Whe had evident heard an
# apporing? Do her long luditure, he alsoment on it?
#
# STreeapen
# had get
# ####
# Time: 458.2439 s, Epoch: 600/2000, Loss: 1.1570
# Sample:
# Wher was to recommend on where his own; she are explain her for; and that he
# town to her
# feative her l
# ####
# Time: 609.7807 s, Epoch: 800/2000, Loss: 1.1460
# Sample:
# Wheir false of advice with which which was complimented. But
# as he had once towards the belief of
# spir
# ####
# Time: 768.7109 s, Epoch: 1000/2000, Loss: 1.1374
# Sample:
# Whall
# following
# oof
# those
# is
# very, it was disgrace, was soon engaged as his affected the common women
# ####
# Time: 915.6025 s, Epoch: 1200/2000, Loss: 1.1079
# Sample:
# Whis
# indiFforth, she added Darcy too, it may longed I suppose, over the
# you, there is
# in do?
#
#
# ####
# Time: 1063.1721 s, Epoch: 1400/2000, Loss: 1.1095
# Sample:
# Wher with which him at least the visitors to see Mr. Collins. We me all in friend who mistaken to temp
# ####
# Time: 1209.6270 s, Epoch: 1600/2000, Loss: 1.1141
# Sample:
# Whement reason! and at him all be view to him to give mine
# and
# Lydia, he was not understoom would hard
# ####
# Time: 1354.8098 s, Epoch: 1800/2000, Loss: 1.1236
# Sample:
# Wh them again, ageable.
# Even she knowsifience, entering to find the sense
# of course of assures how lon
# ##########
# Training end. Generating 4000 characters Shakespeare masterpiece in 3. 2. 1...
# Which you all temper, that I had Mr.
# Longbourn, Lizzy! that Miss de Jane! she cannot be anxious fair propose
# that it
# was she thought he was true, not you are often county
# tto disagreeing. They are you are willing to the best, the idea of her own neither eam since never and Mr. Bennet, he has love a cprovember for Hunsford, except of imaginable new more wanted there said before them, observation to have still every kingded, and received against as possition for the evensing--hortness toward too, and saying, which
# Mlay
# else has daugement to her sister, one again
# he
# ever deson its restance of her tone of protessed with his always congratulation.
#
# Ortrutes without entering; and her sister did nothing of a little beneach of attention. But so much twenting you perverseness were stairs.
#
# Lydy!
# Uflect as for good immediately to clother to you, as she thoughth indole young ladyship will be more feelings, that she did not
# and sister's good to do ill! Where were time before he has night; but
# without considered him to says, indeed, were a great heavance of Jane, she feeling at Roshe thing out colour Mr. Darcy is. Her favourously certain proved in acquaintance of listened.
#
# But who live forward more equacity, suitated, for Jane had satisfied there was early outs chance yourself, more buined
# their reason which she beg there imaginary gone with animation was seeing their being here; and air ran the weated with a still encomined to tell me to indifferent, however in Maria much compliments to see him. The
# supportation; belonged looking in this
# which gate of the housematy.
#
#
#
# Chance now. Elizabeth took denoring was not upon the room. Not at preserving eqiadly leaving that her family, and everything in love all the morning. They may joy iming any fact remil in ought to set you could
# not be--His won conviction to time was, then indeed, where considables for Jane; and if he evielver, and hoped soon face than she had alacrity from another, who had a very speech: 'Oh being in tear of both directly or bour into three more. Why must have both.
#
# I must turn was not out of declared, has been begged a comfort openly. A good repaired a Sir William. He
# was assured the eviven to her sisters' without warmity and of her, satisfied with a great child. The autuit and paid, and Mr. Darcy too happiness at his little chance? Oh, Lointhing entrowed
# heart, however, he has disanyonded, he is now.
#
# In the way, acconduch surprised himself, but them all, and once sound, with such an her sister daughter between them to be look except him freedom there worth home.
#
# There is attentive anybody.
#
# Mr. Bingley's discomposure of Maria, was no loss which her sorry to know
# to Colonel Fitzwilliam; especially, and supportable of the two hour, and to discover the sooner shared. After schemnession because had affected to
# kind disturbed; but
# she continued; nor informed, an inconcludled to take to assure the
# office he with some agoyced are
# so asks to have here; though, was toler more. Have heard with their humbore she would stay; but it is, she rivery
# twoo was such a daughter. Of her situation would always
# feeling to
# mention to be more than to come to say, whom Elizabeth added himself. Tto
# feelings with the whole into a Mr. Collins, and was truth, he amges
# Pemberley, it would
# table to Mr. Collins; but I assured out I did, and be delivery and gave a plan, said Jine, strange, which had no
# success of during determined him engaged.
#
# All aning out of the time of unknown to the whole, and
# her minulsficent, she
# keeps as without interrupted to encourable in such a good friendropy; yet. The
# design
# in the still out
# of her agreeable as
# saw more
# could. In his sister were placed a young man with they house. He is now
# trees. But his account attentions--and attended out? Upon my up. I shall insteems, replied Meryton of Kitty settling might pleasantly with her eyesoughter, said Elizabeth, have me
# this!
#
# Pride of danger was till you have no
# longer we wr