Skip to content

Commit

Permalink
Merge pull request #35 from ruiqixu37/main
Browse files Browse the repository at this point in the history
improve reproducibility
  • Loading branch information
factoryofthesun authored May 19, 2024
2 parents 5578d47 + 62f1de2 commit e86150e
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 2 deletions.
4 changes: 3 additions & 1 deletion neural_style_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@
class ProgressiveEncoding(nn.Module):
def __init__(self, mapping_size, T, d=3, apply=True):
super(ProgressiveEncoding, self).__init__()
self._t = 0
self._t = nn.Parameter(
torch.tensor(0, dtype=torch.float32, device=device), requires_grad=False
)
self.n = mapping_size
self.T = T
self.d = d
Expand Down
3 changes: 2 additions & 1 deletion utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import torch
import torch.nn as nn
import kaolin as kal
import clip
import numpy as np
Expand Down Expand Up @@ -394,7 +395,7 @@ def __init__(self, num_input_channels, mapping_size=256, scale=10, exclude=0):
self.exclude = exclude
B = torch.randn((num_input_channels, mapping_size)) * scale
B_sort = sorted(B, key=lambda x: torch.norm(x, p=2))
self._B = torch.stack(B_sort) # for sape
self._B = nn.Parameter(torch.stack(B_sort), requires_grad=False) # for sape

def forward(self, x):
# assert x.dim() == 4, 'Expected 4D input (got {}D input)'.format(x.dim())
Expand Down

0 comments on commit e86150e

Please sign in to comment.