1
1
__author__ = 'SherlockLiao'
2
2
3
3
import torch
4
+ import torch .nn .functional as F
4
5
from torch import nn , optim
5
6
from torch .autograd import Variable
6
- import torch .nn .functional as F
7
7
8
- training_data = [
9
- ( "The dog ate the apple" . split (), ["DET" , "NN" , "V" , "DET" , "NN" ]),
10
- ("Everybody read that book" .split (), ["NN" , "V" , "DET" , "NN" ])
11
- ]
8
+ training_data = [( "The dog ate the apple" . split (),
9
+ ["DET" , "NN" , "V" , "DET" , "NN" ]),
10
+ ("Everybody read that book" .split (), ["NN" , "V" , "DET" ,
11
+ "NN" ]) ]
12
12
13
13
word_to_idx = {}
14
14
tag_to_idx = {}
@@ -38,12 +38,12 @@ def forward(self, x):
38
38
39
39
40
40
class LSTMTagger (nn .Module ):
41
- def __init__ (self , n_word , n_char , char_dim , n_dim , char_hidden ,
42
- n_hidden , n_tag ):
41
+ def __init__ (self , n_word , n_char , char_dim , n_dim , char_hidden , n_hidden ,
42
+ n_tag ):
43
43
super (LSTMTagger , self ).__init__ ()
44
44
self .word_embedding = nn .Embedding (n_word , n_dim )
45
45
self .char_lstm = CharLSTM (n_char , char_dim , char_hidden )
46
- self .lstm = nn .LSTM (n_dim + char_hidden , n_hidden , batch_first = True )
46
+ self .lstm = nn .LSTM (n_dim + char_hidden , n_hidden , batch_first = True )
47
47
self .linear1 = nn .Linear (n_hidden , n_tag )
48
48
49
49
def forward (self , x , word ):
@@ -54,11 +54,16 @@ def forward(self, x, word):
54
54
char_list .append (character_to_idx [letter .lower ()])
55
55
char_list = torch .LongTensor (char_list )
56
56
char_list = char_list .unsqueeze (0 )
57
- tempchar = self .char_lstm (Variable (char_list ).cuda ())
57
+ if torch .cuda .is_available ():
58
+ tempchar = self .char_lstm (Variable (char_list ).cuda ())
59
+ else :
60
+ tempchar = self .char_lstm (Variable (char_list ))
58
61
tempchar = tempchar .squeeze (0 )
59
62
char = torch .cat ((char , tempchar .cpu ().data ), 0 )
60
63
char = char .squeeze (1 )
61
- char = Variable (char ).cuda ()
64
+ if torch .cuda .is_available ():
65
+ char = char .cuda ()
66
+ char = Variable (char )
62
67
x = self .word_embedding (x )
63
68
x = torch .cat ((x , char ), 1 )
64
69
x = x .unsqueeze (0 )
@@ -69,8 +74,8 @@ def forward(self, x, word):
69
74
return y
70
75
71
76
72
- model = LSTMTagger (len ( word_to_idx ), len ( character_to_idx ), 10 ,
73
- 100 , 50 , 128 , len (tag_to_idx ))
77
+ model = LSTMTagger (
78
+ len ( word_to_idx ), len ( character_to_idx ), 10 , 100 , 50 , 128 , len (tag_to_idx ))
74
79
if torch .cuda .is_available ():
75
80
model = model .cuda ()
76
81
criterion = nn .CrossEntropyLoss ()
@@ -84,8 +89,8 @@ def make_sequence(x, dic):
84
89
85
90
86
91
for epoch in range (300 ):
87
- print ('*' * 10 )
88
- print ('epoch {}' .format (epoch + 1 ))
92
+ print ('*' * 10 )
93
+ print ('epoch {}' .format (epoch + 1 ))
89
94
running_loss = 0
90
95
for data in training_data :
91
96
word , tag = data
0 commit comments