Skip to content

Commit

Permalink
chmod files 644 -> 777
Browse files Browse the repository at this point in the history
  • Loading branch information
SpicyYeol committed Jul 23, 2021
1 parent 784c0c0 commit 7c096ee
Show file tree
Hide file tree
Showing 27 changed files with 71 additions and 27 deletions.
Empty file modified dataset/DeltaDataset.py
100644 → 100755
Empty file.
Binary file added dataset/__pycache__/DeltaDataset.cpython-39.pyc
Binary file not shown.
Binary file added dataset/__pycache__/dataset_loader.cpython-39.pyc
Binary file not shown.
Empty file modified dataset/dataset_loader.py
100644 → 100755
Empty file.
Empty file modified loss.py
100644 → 100755
Empty file.
Empty file modified nets/AppearanceModel.py
100644 → 100755
Empty file.
33 changes: 33 additions & 0 deletions nets/LinearModel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import torch
from layers.complexLayers import ComplexDropout, ComplexLinear
from funcs.complexFunctions import complex_tanh


class LinearModel(torch.nn.Module):
def __init__(self):
super().__init__()
self.f_drop1 = torch.nn.Dropout(0.25)
self.f_linear1 = torch.nn.Linear(5184, 256, bias=True)
self.f_linear2 = torch.nn.Linear(256, 1, bias=True)

def forward(self, input):
f1 = torch.flatten(input, start_dim=1)
f2 = self.f_drop1(f1)
f3 = torch.tanh(self.f_linear1(f2))
f4 = self.f_linear2(f3)
return f4


class ComplexLinearModel(torch.nn.Module):
def __init__(self):
super(ComplexLinearModel, self).__init__()
self.f_drop1 = ComplexDropout(0.25)
self.f_linear1 = ComplexLinear(64 * 6 * 6, 256)
self.f_linear2 = ComplexLinear(256, 1)

def forward(self, input):
f1 = input.view(-1, 64 * 6 * 6)
f2 = self.f_drop1(f1)
f3 = complex_tanh(self.f_linear1(f2))
f4 = self.f_linear2(f3)
return f4
Empty file removed nets/Model.py
Empty file.
54 changes: 33 additions & 21 deletions nets/Models.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,36 +1,51 @@
import torch

from AppearanceModel import AppearanceModel_2D
from MotionModel import motion_model

from AppearanceModel import AppearanceModel_2D, AppearanceModel_DA
from MotionModel import MotionModel
from LinearModel import LinearModel

class Deepphys(torch.nn.Module):
def __init__(self):
super().__init__()
self.model = model
self.in_channels = 3
self.out_channels = 32
self.kernel_size = 3

self.appearance_model = AppearanceModel_2D(in_channels=self.in_channels, out_channels=self.out_channels * 4,
kernel_size=self.kernel_size)
self.motion_model = MotionModel(in_channels=self.in_channels, out_channels=self.out_channels,
kernel_size=self.kernel_size)

self.linear_model = LinearModel()

# mot: c-b-c-b-d-a-c-b-c-b-d-a
# app: c-b-c-b-d-at-a-c-b-c-b-d-at
def forward(self, appearance_input, motion_input):
"""
:param appearance_input:
:param motion_input:
:return:
original 2d model
"""
attention_mask1, attention_mask2 = self.appearance_model(appearance_input)
motion_output = self.motion_model(motion_input, attention_mask1, attention_mask2)
out = self.linear_model(motion_output)

# """
# self.appearance_model = DA_appearance_model(in_channels=self.in_channels, out_channels=self.out_channels,
# kernel_size=self.kernel_size)
self.motion_model = motion_model(in_channels=self.in_channels, out_channels=self.out_channels,
kernel_size=self.kernel_size, model=model)
return out, attention_mask1, attention_mask2

self.fully = fc()
# """
# self.appearance_model = complexAttention(in_channels=self.in_channels, out_channels=self.out_channels,
# kernel_size=self.kernel_size)
# self.fully = Compelxfc()

# def forward(self, appearance_input, motion_input):
class Deepphys_DA(torch.nn.Module):
def __init__(self):
super().__init__()
self.in_channels = 3
self.out_channels = 32
self.kernel_size = 3

self.appearance_model = AppearanceModel_DA(in_channels=self.in_channels, out_channels=self.out_channels,
kernel_size=self.kernel_size)
self.motion_model = MotionModel(in_channels=self.in_channels, out_channels=self.out_channels,
kernel_size=self.kernel_size)

self.fully = LinearModel()

def forward(self, appearance_input, motion_input):
"""
:param appearance_input:
Expand All @@ -42,8 +57,5 @@ def forward(self, appearance_input, motion_input):
motion_output = self.motion_model(motion_input, attention_mask1, attention_mask2)
out = self.fully(motion_output)

# X = torch.Tensor(X).cuda()
# attention_mask1, attention_mask2,out = self.appearance_model(X)
# out = self.fully(out)

return out, attention_mask1, attention_mask2

4 changes: 2 additions & 2 deletions nets/MotionModel.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import torch
import torch.nn as nn
class motion_model(torch.nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, model):
class MotionModel(torch.nn.Module):
def __init__(self, in_channels, out_channels, kernel_size):
super().__init__()
# Motion model
self.m_conv1 = torch.nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size,
Expand Down
Empty file removed nets/attentionModel.py
Empty file.
Empty file modified nets/blocks/AttentionBlocks.py
100644 → 100755
Empty file.
Empty file removed nets/blocks/attenBlocks.py
Empty file.
7 changes: 3 additions & 4 deletions nets/blocks/attentionBlocks.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import torch
import torch.nn as nn
from ..

from ..modules.modules import DAModule

class attention_block(nn.Module):

class AttentionBlock(nn.Module):
def __init__(self, in_channels):
super().__init__()
self.attention = DAModule(in_channels)
Expand All @@ -18,5 +19,3 @@ def forward(self, input):
mask = torch.div(mask * H * W, norm)
mask = self.conv1x1(mask)
return mask


Empty file modified nets/blocks/attentionModel.py
100644 → 100755
Empty file.
Empty file modified nets/blocks/blocks.py
100644 → 100755
Empty file.
Empty file modified nets/blocks/complexLayers.py
100644 → 100755
Empty file.
Empty file modified nets/blocks/motionBlocks.py
100644 → 100755
Empty file.
Empty file modified nets/funcs/complexFunctions.py
100644 → 100755
Empty file.
Empty file modified nets/layers/complexLayers.py
100644 → 100755
Empty file.
Empty file modified nets/modules/modules
100644 → 100755
Empty file.
Empty file modified nets/modules/modules.py
100644 → 100755
Empty file.
Empty file modified optim.py
100644 → 100755
Empty file.
Empty file modified unused/Facedetect.py
100644 → 100755
Empty file.
Empty file modified unused/bvpdataset.py
100644 → 100755
Empty file.
Empty file modified unused/face_detection_front.tflite
100644 → 100755
Empty file.
Empty file modified unused/sample.py
100644 → 100755
Empty file.

0 comments on commit 7c096ee

Please sign in to comment.