1
- <<<<<< < HEAD
2
- using NNlib: conv, ∇conv_data, depthwiseconv, crossconv
3
- ====== =
4
- using NNlib: conv, depthwiseconv, crosscor
5
- >>>>>> > some final changes
1
+ using NNlib: conv, ∇conv_data, depthwiseconv, crosscor
6
2
7
3
@generated sub2 (:: Val{N} ) where N = :(Val ($ (N- 2 )))
8
4
73
69
"""
74
70
ConvTranspose(size, in=>out)
75
71
ConvTranspose(size, in=>out, relu)
76
- CrossCor(size, in=>out)
77
- CrossCor(size, in=>out, relu)
78
72
79
73
Standard convolutional transpose layer. `size` should be a tuple like `(2, 2)`.
80
74
`in` and `out` specify the number of input and output channels respectively.
@@ -83,7 +77,6 @@ be a `100×100×3` array, and a batch of 50 would be a `100×100×3×50` array.
83
77
Takes the keyword arguments `pad`, `stride` and `dilation`.
84
78
"""
85
79
struct ConvTranspose{N,F,A,V}
86
- struct CrossCor{N,F,A,V}
87
80
σ:: F
88
81
weight:: A
89
82
bias:: V
@@ -173,8 +166,8 @@ function Base.show(io::IO, l::DepthwiseConv)
173
166
end
174
167
175
168
"""
176
- CrossConv (size, in=>out)
177
- CrossConv (size, in=>out, relu)
169
+ CrossCor (size, in=>out)
170
+ CrossCor (size, in=>out, relu)
178
171
179
172
Standard cross convolutional layer. `size` should be a tuple like `(2, 2)`.
180
173
`in` and `out` specify the number of input and output channels respectively.
@@ -197,8 +190,8 @@ CrossCor(w::AbstractArray{T,N}, b::AbstractVector{T}, σ = identity;
197
190
stride = 1 , pad = 0 , dilation = 1 ) where {T,N} =
198
191
CrossCor (σ, w, b, expand .(sub2 (Val (N)), (stride, pad, dilation))... )
199
192
200
- CrossCor (k:: NTuple{N,Integer} , ch:: Pair{<:Integer,<:Integer} , σ = identity; init = initn,
201
- stride = 1 , pad = 0 , dilation = 1 ) where N =
193
+ CrossCor (k:: NTuple{N,Integer} , ch:: Pair{<:Integer,<:Integer} , σ = identity;
194
+ init = glorot_uniform, stride = 1 , pad = 0 , dilation = 1 ) where N =
202
195
CrossCor (param (init (k... , ch... )), param (zeros (ch[2 ])), σ,
203
196
stride = stride, pad = pad, dilation = dilation)
204
197
@@ -218,6 +211,12 @@ function Base.show(io::IO, l::CrossCor)
218
211
print (io, " )" )
219
212
end
220
213
214
+ (a:: CrossCor{<:Any,<:Any,W} )(x:: AbstractArray{T} ) where {T <: Union{Float32,Float64} , W <: AbstractArray{T} } =
215
+ invoke (a, Tuple{AbstractArray}, x)
216
+
217
+ (a:: CrossCor{<:Any,<:Any,W} )(x:: AbstractArray{<:Real} ) where {T <: Union{Float32,Float64} , W <: AbstractArray{T} } =
218
+ a (T .(x))
219
+
221
220
"""
222
221
MaxPool(k)
223
222
@@ -260,48 +259,4 @@ MeanPool(k::NTuple{N,Integer}; pad = 0, stride = k) where N =
260
259
261
260
function Base. show (io:: IO , m:: MeanPool )
262
261
print (io, " MeanPool(" , m. k, " , pad = " , m. pad, " , stride = " , m. stride, " )" )
263
- end
264
- <<<<<< < HEAD
265
- ====== =
266
-
267
- """
268
- CrossCor(size, in=>out)
269
- CrossCor(size, in=>out, relu)
270
- Standard cross convolutional layer. `size` should be a tuple like `(2, 2)`.
271
- `in` and `out` specify the number of input and output channels respectively.
272
- Data should be stored in WHCN order. In other words, a 100×100 RGB image would
273
- be a `100×100×3` array, and a batch of 50 would be a `100×100×3×50` array.
274
- Takes the keyword arguments `pad`, `stride` and `dilation`.
275
- """
276
- struct CrossCor{N,F,A,V}
277
- σ:: F
278
- weight:: A
279
- bias:: V
280
- stride:: NTuple{N,Int}
281
- pad:: NTuple{N,Int}
282
- dilation:: NTuple{N,Int}
283
- end
284
- CrossCor (w:: AbstractArray{T,N} , b:: AbstractVector{T} , σ = identity;
285
- stride = 1 , pad = 0 , dilation = 1 ) where {T,N} =
286
- CrossCor (σ, w, b, expand .(sub2 (Val (N)), (stride, pad, dilation))... )
287
-
288
- CrossCor (k:: NTuple{N,Integer} , ch:: Pair{<:Integer,<:Integer} , σ = identity;
289
- init = glorot_uniform, stride = 1 , pad = 0 , dilation = 1 ) where N =
290
- CrossCor (param (init (k... , ch... )), param (zeros (ch[2 ])), σ,
291
- stride = stride, pad = pad, dilation = dilation)
292
-
293
- @treelike CrossCor
294
-
295
- function (c:: CrossCor )(x)
296
- # TODO : breaks gpu broadcast :(
297
- # ndims(x) == ndims(c.weight)-1 && return squeezebatch(c(reshape(x, size(x)..., 1)))
298
- σ, b = c. σ, reshape (c. bias, map (_-> 1 , c. stride)... , :, 1 )
299
- σ .(crosscor (x, c. weight, stride = c. stride, pad = c. pad, dilation = c. dilation) .+ b)
300
- end
301
- function Base. show (io:: IO , l:: CrossCor )
302
- print (io, " CrossCor(" , size (l. weight)[1 : ndims (l. weight)- 2 ])
303
- print (io, " , " , size (l. weight, ndims (l. weight)- 1 ), " =>" , size (l. weight, ndims (l. weight)))
304
- l. σ == identity || print (io, " , " , l. σ)
305
- print (io, " )" )
306
- end
307
- >>>>>> > some final changes
262
+ end
0 commit comments