Skip to content

Commit

Permalink
Merge pull request #21 from khalid-davis/develop
Browse files Browse the repository at this point in the history
1. rename hard sample to hard example
2. clean pre_hook/post_hook of JointInference lib code
  • Loading branch information
llhuii authored Jan 11, 2021
2 parents 6d4c02b + 4925635 commit 5bf29f7
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 50 deletions.
4 changes: 2 additions & 2 deletions examples/helmet_detection_inference/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ EOF
Note the setting of the following parameters, which have to same as the script [little_model.py](/examples/helmet_detection_inference/little_model/little_model.py):
- hardExampleMining: set hard example algorithm from {IBT, CrossEntropy} for inferring in edge side.
- video_url: set the url for video streaming.
- all_sample_inference_output: set your output path for the inference results, and note that the root path has to be /home/data.
- all_examples_inference_output: set your output path for the inference results, and note that the root path has to be /home/data.
- hard_example_edge_inference_output: set your output path for results of inferring hard examples in edge side.
- hard_example_cloud_inference_output: set your output path for results of inferring hard examples in cloud side.

Expand Down Expand Up @@ -105,7 +105,7 @@ spec:
value: "416,736"
- key: "video_url"
value: "rtsp://localhost/video"
- key: "all_sample_inference_output"
- key: "all_examples_inference_output"
value: "/home/data/output"
- key: "hard_example_cloud_inference_output"
value: "/home/data/hard_example_cloud_inference_output"
Expand Down
40 changes: 20 additions & 20 deletions examples/helmet_detection_inference/little_model/little_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,14 @@
colors = [(255, 0, 0), (0, 255, 0), (0, 0, 255), (255, 255, 0), (255, 0, 255),
(0, 255, 255), (255, 255, 255)]
class_names = ['person', 'helmet', 'helmet_on', 'helmet_off']
all_output_path = neptune.context.get_parameters('all_sample_inference_output')
hard_sample_edge_output_path = neptune.context.get_parameters(
'hard_sample_edge_inference_output'
all_output_path = neptune.context.get_parameters(
'all_examples_inference_output'
)
hard_sample_cloud_output_path = neptune.context.get_parameters(
'hard_sample_cloud_inference_output'
hard_example_edge_output_path = neptune.context.get_parameters(
'hard_example_edge_inference_output'
)
hard_example_cloud_output_path = neptune.context.get_parameters(
'hard_example_cloud_inference_output'
)


Expand Down Expand Up @@ -80,7 +82,7 @@ def preprocess(image, input_shape):
new_image.fill(128)
bh, bw, _ = new_image.shape
new_image[int((bh - nh) / 2):(nh + int((bh - nh) / 2)),
int((bw - nw) / 2):(nw + int((bw - nw) / 2)), :] = image
int((bw - nw) / 2):(nw + int((bw - nw) / 2)), :] = image

new_image /= 255.
new_image = np.expand_dims(new_image, 0) # Add batch dimension.
Expand Down Expand Up @@ -110,7 +112,7 @@ def create_output_fetch(sess):
return output_fetch


def post_hook(model_output):
def postprocess(model_output):
all_classes, all_scores, all_bboxes = model_output
bboxes = []
for c, s, bbox in zip(all_classes, all_scores, all_bboxes):
Expand All @@ -131,20 +133,20 @@ def output_deal(inference_result: InferenceResult, nframe, img_rgb):

cv2.imwrite(f"{all_output_path}/{nframe}.jpeg", collaboration_frame)

# save hard sample image to dir
if not inference_result.is_hard_sample:
# save hard example image to dir
if not inference_result.is_hard_example:
return

if inference_result.hard_sample_cloud_result is not None:
cv2.imwrite(f"{hard_sample_cloud_output_path}/{nframe}.jpeg",
if inference_result.hard_example_cloud_result is not None:
cv2.imwrite(f"{hard_example_cloud_output_path}/{nframe}.jpeg",
collaboration_frame)
edge_collaboration_frame = draw_boxes(
img_rgb,
inference_result.hard_sample_edge_result,
inference_result.hard_example_edge_result,
colors="green,blue,yellow,red",
text_thickness=None,
box_thickness=None)
cv2.imwrite(f"{hard_sample_edge_output_path}/{nframe}.jpeg",
cv2.imwrite(f"{hard_example_edge_output_path}/{nframe}.jpeg",
edge_collaboration_frame)


Expand All @@ -163,18 +165,18 @@ def run():
camera_address = neptune.context.get_parameters('video_url')

mkdir(all_output_path)
mkdir(hard_sample_edge_output_path)
mkdir(hard_sample_cloud_output_path)
mkdir(hard_example_edge_output_path)
mkdir(hard_example_cloud_output_path)

# create little model object
model = neptune.joint_inference.TSLittleModel(
preprocess=preprocess,
postprocess=None,
postprocess=postprocess,
input_shape=input_shape,
create_input_feed=create_input_feed,
create_output_fetch=create_output_fetch
)
# create hard sample algorithm
# create hard example algorithm
threshold_box = float(neptune.context.get_hem_parameters(
"threshold_box", 0.5
))
Expand All @@ -186,9 +188,7 @@ def run():
# create joint inference object
inference_instance = neptune.joint_inference.JointInference(
little_model=model,
hard_example_mining_algorithm=hard_example_mining_algorithm,
pre_hook=None,
post_hook=post_hook,
hard_example_mining_algorithm=hard_example_mining_algorithm
)

# use video streams for testing
Expand Down
53 changes: 25 additions & 28 deletions lib/neptune/joint_inference/joint_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,10 @@ def inference(self, img_data):
input_feed = self.create_input_feed(self.session, new_image,
img_data_np)
output_fetch = self.create_output_fetch(self.session)
return self.session.run(output_fetch, input_feed)
output = self.session.run(output_fetch, input_feed)
if self.postprocess:
output = self.postprocess(output)
return output


class LCReporter(threading.Thread):
Expand Down Expand Up @@ -253,7 +256,7 @@ def run(self):
info.inferenceNumber = self.inference_number
info.hardExampleNumber = self.hard_example_number
info.uploadCloudRatio = (
self.hard_example_number / self.inference_number
self.hard_example_number / self.inference_number
)
message = {
"name": BaseConfig.worker_name,
Expand All @@ -271,35 +274,33 @@ def run(self):
class InferenceResult:
"""The Result class for joint inference
:param is_hard_sample: `True` means a hard sample, `False` means not a hard
sample
:param is_hard_example: `True` means a hard example, `False` means not a
hard example
:param final_result: the final inference result
:param hard_sample_edge_result: the edge little model inference result of
hard sample
:param hard_sample_cloud_result: the cloud big model inference result of
hard sample
:param hard_example_edge_result: the edge little model inference result of
hard example
:param hard_example_cloud_result: the cloud big model inference result of
hard example
"""

def __init__(self, is_hard_sample, final_result,
hard_sample_edge_result, hard_sample_cloud_result):
self.is_hard_sample = is_hard_sample
def __init__(self, is_hard_example, final_result,
hard_example_edge_result, hard_example_cloud_result):
self.is_hard_example = is_hard_example
self.final_result = final_result
self.hard_sample_edge_result = hard_sample_edge_result
self.hard_sample_cloud_result = hard_sample_cloud_result
self.hard_example_edge_result = hard_example_edge_result
self.hard_example_cloud_result = hard_example_cloud_result


class JointInference:
"""Class provided for external systems for model joint inference.
:param little_model: the little model entity for edge inference
:param hard_example_mining_algorithm: the algorithm for judging hard sample
:param pre_hook: the pre function of edge inference
:param post_hook: the post function of edge inference
:param hard_example_mining_algorithm: the algorithm for judging hard
example
"""

def __init__(self, little_model: BaseModel,
hard_example_mining_algorithm=None,
pre_hook=None, post_hook=None):
hard_example_mining_algorithm=None):
self.little_model = little_model
self.big_model = BigModelClient()
# TODO how to deal process use-defined cloud_offload_algorithm,
Expand Down Expand Up @@ -327,9 +328,7 @@ def __init__(self, little_model: BaseModel,
else:
hard_example_mining_algorithm = ThresholdFilter()

self.cloud_offload_algorithm = hard_example_mining_algorithm
self.pre_hook = pre_hook
self.post_hook = post_hook
self.hard_example_mining_algorithm = hard_example_mining_algorithm

self.lc_reporter = LCReporter()
self.lc_reporter.setDaemon(True)
Expand All @@ -338,14 +337,12 @@ def __init__(self, little_model: BaseModel,
def inference(self, img_data) -> InferenceResult:
"""Image inference function."""
img_data_pre = img_data
if self.pre_hook:
img_data_pre = self.pre_hook(img_data_pre)
edge_result = self.little_model.inference(img_data_pre)
if self.post_hook:
edge_result = self.post_hook(edge_result)
is_hard_sample = self.cloud_offload_algorithm.hard_judge(edge_result)
if not is_hard_sample:
LOG.debug("not hard sample, use edge result directly")
is_hard_example = self.hard_example_mining_algorithm.hard_judge(
edge_result
)
if not is_hard_example:
LOG.debug("not hard example, use edge result directly")
self.lc_reporter.update_for_edge_inference()
return InferenceResult(False, edge_result, None, None)

Expand Down

0 comments on commit 5bf29f7

Please sign in to comment.