-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy pathHypernymScore.lua
41 lines (34 loc) · 1.2 KB
/
HypernymScore.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
require 'nn'
require 'dpnn'
local HypernymScore, parent = torch.class('nn.HypernymScore', 'nn.Sequential')
function HypernymScore:__init(params, num_entities)
parent.__init(self)
local lookup = nn.LookupTable(num_entities, params.D_embedding)
local embedding = nn.Sequential():add(lookup)
local embedding2 = embedding:sharedClone()
-- self: takes two input words, outputs a probability that the first is a hypernym of the second
self:add(nn.ParallelTable():add(embedding):add(embedding2))
if params.symmetric then
self:add(nn.CosineDistance())
self:add(nn.AddConstant(1))
self:add(nn.MulConstant(0.5))
else
self:add(nn.CSubTable())
self:add(nn.AddConstant(params.eps))
self:add(nn.ReLU())
if params.norm > 1000 then
self:add(nn.Max(2))
elseif params.norm == 2 then
self:add(nn.Power(2))
self:add(nn.Sum(2))
elseif params.norm == 1 then
self:add(nn.Mean(2))
end
end
if USE_CUDA then
self:cuda()
--reshare parameters
embedding:share(embedding2, 'weight', 'bias', 'gradWeight', 'gradBias')
end
self.lookupModule = lookup
end