Skip to content

Commit 03e6c7b

Browse files
committed
docs updated and minor bugs fixed
1 parent 8b6f5c3 commit 03e6c7b

File tree

89 files changed

+5061
-422
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

89 files changed

+5061
-422
lines changed

.idea/workspace.xml

Lines changed: 136 additions & 69 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

cleanup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ def main():
1717
print("Removing: {}".format(path))
1818
os.system('rm -rf {}'.format(path))
1919

20-
print("Removing pype.egg-info")
21-
os.system('rm -rf {}'.format(os.path.join(dirpath,'pype.egg-info')))
20+
print("Removing dKeras.egg-info")
21+
os.system('rm -rf {}'.format(os.path.join(dirpath,'dKeras.egg-info')))
2222

2323

2424
if __name__ == "__main__":

dKeras.egg-info/PKG-INFO

Lines changed: 0 additions & 10 deletions
This file was deleted.

dKeras.egg-info/SOURCES.txt

Lines changed: 0 additions & 9 deletions
This file was deleted.

dKeras.egg-info/dependency_links.txt

Lines changed: 0 additions & 1 deletion
This file was deleted.

dKeras.egg-info/top_level.txt

Lines changed: 0 additions & 1 deletion
This file was deleted.

dkeras/benchmarks/model_benchmark.py

Lines changed: 31 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -85,39 +85,45 @@ def main():
8585
print("Time elapsed: {}\nFPS: {}".format(elapsed, n_data / elapsed))
8686

8787
elif test_type == 1:
88-
if use_search:
89-
results = {}
90-
best_time = np.inf
91-
best_n_workers = -1
88+
if model_name != 'all':
89+
models = [model_names[model_name]]
90+
else:
91+
models = [model_names[m] for m in model_names.keys()]
9292

93-
for n in search_pool:
94-
model = dKeras(model_names[model_name], wait_for_workers=True,
95-
n_workers=n)
96-
print("Workers are ready")
93+
for m in models:
94+
if use_search:
95+
results = {}
96+
best_time = np.inf
97+
best_n_workers = -1
9798

98-
start_time = time.time()
99-
preds = model.predict(test_data)
100-
elapsed = time.time() - start_time
99+
for n in search_pool:
100+
model = dKeras(m, wait_for_workers=True,
101+
n_workers=n)
102+
print("Workers are ready")
101103

102-
time.sleep(3)
104+
start_time = time.time()
105+
preds = model.predict(test_data)
106+
elapsed = time.time() - start_time
103107

104-
if elapsed < best_time:
105-
best_time = elapsed
106-
best_n_workers = n
108+
time.sleep(3)
107109

108-
results[str(n)] = elapsed
109-
model.close()
110+
if elapsed < best_time:
111+
best_time = elapsed
112+
best_n_workers = n
110113

111-
print('{}\nN\tElapsed Time'.format('=' * 80))
114+
results[str(n)] = elapsed
115+
model.close()
112116

113-
for k in results.keys():
114-
print("{}\t{}".format(k, results[k]))
117+
print('{}\nN\tElapsed Time'.format('=' * 80))
115118

116-
print("{}\nTests completed:\n\tBest N workers: {}\t FPS: {}".format(
117-
'=' * 80, best_n_workers, n_data / best_time))
118-
else:
119-
model = dKeras(model_names[model_name], wait_for_workers=True,
120-
n_workers=n_workers)
119+
for k in results.keys():
120+
print("{}\t{}".format(k, results[k]))
121+
122+
print("{}\nTests completed:\n\tBest N workers: {}\t FPS: {}".format(
123+
'=' * 80, best_n_workers, n_data / best_time))
124+
else:
125+
model = dKeras(model_names[model_name], wait_for_workers=True,
126+
n_workers=n_workers)
121127

122128
start_time = time.time()
123129
preds = model.predict(test_data)

dkeras/config/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
# Remote Worker Functions Configuration
1010
# -----------------------------------------------------------------------------
1111
DEFAULT_N_WORKERS = 4
12-
WORKER_WAIT_TIME = 1e-4
12+
WORKER_WAIT_TIME = 5e-3
1313
WORKER_INFERENCE_BATCH_SIZE = 250
1414

1515
# -----------------------------------------------------------------------------

dkeras/dkeras.py

Lines changed: 65 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -18,26 +18,78 @@
1818

1919

2020
class dKeras(object):
21+
"""Distributed Keras Model Wrapper.
22+
23+
It will automatically set up remote
24+
workers and data servers for data parallelism algorithms. Using
25+
the same notation as a regular Keras model, it makes distributing a
26+
Keras model simple.
27+
28+
.. code-block:: python
29+
30+
from tensorflow.keras.applications import ResNet50
31+
from dkeras import dKeras
32+
33+
model = dKeras(ResNet50)
34+
preds = model.predict(data)
35+
36+
Arguments:
37+
model: Un-initialized Keras model
38+
verbose: Verbose setting boolean variable. Default is False.
39+
weights: Weights arg for prebuilt models, example: ResNet50(
40+
weights='imagenet'). Default is None.
41+
n_workers: Integer number of worker processes. If left None,
42+
then it will automatically find the an estimate of the optimal
43+
number of workers. Default is None.
44+
init_ray: Boolean arg for whether to initialize Ray within
45+
the model initialization. Default is False.
46+
rm_existing_ray: Boolean arg for whether to remove any
47+
existing Ray clusters. Default is True.
48+
rm_local_model: Boolean arg for whether to remove the local
49+
copy of the Keras model for memory conservation. Default is
50+
False.
51+
wait_for_workers: Boolean arg for whether to wait for all of
52+
the worker processes to initialize and connect to the data
53+
server.
54+
redis_address: In the case of initializing Ray inside of
55+
model initialization, the redis address is required for
56+
connecting to existing Ray clusters.
57+
n_cpus_per_worker: The integer number of CPUs per worker
58+
processes. If left None, it will allocate automatically. The
59+
default is None.
60+
n_gpus_per_worker: The integer or float number of GPUs per
61+
worker processes. If left None, it will allocate
62+
automatically. The default is None.
63+
n_cpus_per_server: The integer number of CPUs per data
64+
server. If left None, it will allocate automatically. The
65+
default is None.
66+
"""
2167

2268
def __init__(self,
2369
model,
24-
weights=None,
25-
n_workers=None,
26-
init_ray=True,
27-
rm_existing_ray=False,
28-
rm_local_model=True,
29-
wait_for_workers=False,
30-
redis_address=None):
31-
"""
32-
33-
:param model:
34-
:param n_workers:
35-
"""
70+
verbose: bool = True,
71+
weights: list = None,
72+
n_workers: int = None,
73+
init_ray: bool = True,
74+
rm_existing_ray: bool = False,
75+
rm_local_model: bool = True,
76+
wait_for_workers: bool = False,
77+
redis_address: str = None,
78+
n_cpus_per_worker: int = None,
79+
n_gpus_per_worker: int = None,
80+
n_cpus_per_server: int = None):
81+
82+
config.N_CPUS_PER_SERVER = n_cpus_per_server
83+
config.N_CPUS_PER_WORKER = n_cpus_per_worker
84+
config.N_CPUS_PER_SERVER = n_gpus_per_worker
85+
self.verbose = verbose
3686
if init_ray:
3787
if ray.is_initialized():
3888
if rm_existing_ray:
3989
ray.shutdown()
4090
ray.init()
91+
else:
92+
ray.init(redis_address=redis_address)
4193
else:
4294
ray.init()
4395

@@ -91,7 +143,7 @@ def predict(self, data, distributed=True, stop_ray=False):
91143
n_data = len(data)
92144
if n_data % self.n_workers > 0:
93145
self.data_server.set_batch_size.remote(
94-
int(n_data / self.n_workers)+1)
146+
int(n_data / self.n_workers) + 1)
95147
else:
96148
self.data_server.set_batch_size.remote(
97149
int(n_data / self.n_workers))

docs/build/.DS_Store

0 Bytes
Binary file not shown.

0 commit comments

Comments
 (0)