Skip to content

Commit

Permalink
migrate to cluster
Browse files Browse the repository at this point in the history
  • Loading branch information
chris-warner-II committed Mar 22, 2024
1 parent c235596 commit 9018e25
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 7 deletions.
9 changes: 5 additions & 4 deletions autoencoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,20 @@ def parse_args():

def main():

args = parse_args()

args = parse_args()
print(args)

# # (1). Directory and device
dir_pre = 'store/models/diffae/'
dir_figs = 'store/output/diffae/autoencoding/'
os.makedirs(dir_figs,exist_ok=True)

device = 'mps' if torch.backends.mps.is_available() else 'cuda' if torch.cuda.is_available() else 'cpu'
device = 'cpu'
#device = 'cpu'
print(f'Using device: {device}')


if device=='cuda':
os.system('nvidia_smi')

# # (2). Setup and load in model
conf = ffhq256_autoenc()
Expand Down
Binary file removed imgs_manipulated/compare.png
Binary file not shown.
5 changes: 4 additions & 1 deletion interpolate.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,12 @@ def main():
os.makedirs(dir_figs,exist_ok=True)

device = 'mps' if torch.backends.mps.is_available() else 'cuda' if torch.cuda.is_available() else 'cpu'
device = 'cpu'
#device = 'cpu'
print(f'Using device: {device}')

if device=='cuda':
os.system('nvidia_smi')


# # (2). Setup and load in model
conf = ffhq256_autoenc()
Expand Down
5 changes: 4 additions & 1 deletion manipulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,12 @@ def main():
os.makedirs(dir_figs,exist_ok=True)

device = 'mps' if torch.backends.mps.is_available() else 'cuda' if torch.cuda.is_available() else 'cpu'
device = 'cpu'
#device = 'cpu'
print(f'Using device: {device}')

if device=='cuda':
os.system('nvidia_smi')


# # (2). Setup and load in models
conf = ffhq256_autoenc()
Expand Down
5 changes: 4 additions & 1 deletion sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,12 @@ def main():
os.makedirs(dir_figs,exist_ok=True)

device = 'mps' if torch.backends.mps.is_available() else 'cuda' if torch.cuda.is_available() else 'cpu'
device = 'cpu'
#device = 'cpu'
print(f'Using device: {device}')

if device=='cuda':
os.system('nvidia_smi')


# # (2). Setup and load in model
conf = ffhq256_autoenc_latent()
Expand Down

0 comments on commit 9018e25

Please sign in to comment.