Skip to content

Commit

Permalink
Fixes for Multi-Gpu_basics No Longer hangs on CPU
Browse files Browse the repository at this point in the history
  • Loading branch information
Helw150 committed Sep 28, 2016
1 parent a4418e8 commit 101e0fc
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 94 deletions.
19 changes: 10 additions & 9 deletions examples/5_MultiGPU/multigpu_basics.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import print_function
'''
Basic Multi GPU computation example using TensorFlow library.
Expand All @@ -12,7 +13,7 @@
"/gpu:1": The second GPU of your machine
'''

from __future__ import print_function


import numpy as np
import tensorflow as tf
Expand All @@ -31,8 +32,8 @@
* Multi GPU computation time: 0:00:07.131701
'''
# Create random large matrix
A = np.random.rand(1e4, 1e4).astype('float32')
B = np.random.rand(1e4, 1e4).astype('float32')
A = np.random.rand(10000, 10000).astype('float32')
B = np.random.rand(10000, 10000).astype('float32')

# Create a graph to store results
c1 = []
Expand All @@ -48,8 +49,8 @@ def matpow(M, n):
Single GPU computing
'''
with tf.device('/gpu:0'):
a = tf.constant(A)
b = tf.constant(B)
a = tf.placeholder(tf.float32, [10000, 10000])
b = tf.placeholder(tf.float32, [10000, 10000])
# Compute A^n and B^n and store results in c1
c1.append(matpow(a, n))
c1.append(matpow(b, n))
Expand All @@ -60,7 +61,7 @@ def matpow(M, n):
t1_1 = datetime.datetime.now()
with tf.Session(config=tf.ConfigProto(log_device_placement=log_device_placement)) as sess:
# Run the op.
sess.run(sum)
sess.run(sum, {a:A, b:B})
t2_1 = datetime.datetime.now()


Expand All @@ -70,13 +71,13 @@ def matpow(M, n):
# GPU:0 computes A^n
with tf.device('/gpu:0'):
# Compute A^n and store result in c2
a = tf.constant(A)
a = tf.placeholder(tf.float32, [10000, 10000])
c2.append(matpow(a, n))

# GPU:1 computes B^n
with tf.device('/gpu:1'):
# Compute B^n and store result in c2
b = tf.constant(B)
b = tf.placeholder(tf.float32, [10000, 10000])
c2.append(matpow(b, n))

with tf.device('/cpu:0'):
Expand All @@ -85,7 +86,7 @@ def matpow(M, n):
t1_2 = datetime.datetime.now()
with tf.Session(config=tf.ConfigProto(log_device_placement=log_device_placement)) as sess:
# Run the op.
sess.run(sum)
sess.run(sum, {a:A, b:B})
t2_2 = datetime.datetime.now()


Expand Down
85 changes: 0 additions & 85 deletions multigpu_basics.py

This file was deleted.

0 comments on commit 101e0fc

Please sign in to comment.