From 4d3680c81dad760e5d1ffe397ab2fc9169d9fb70 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Tue, 13 Oct 2020 17:24:27 +0200 Subject: [PATCH] Minor import and spelling updates (#1133) --- detect.py | 1 - train.py | 2 +- utils/general.py | 12 ++++++------ 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/detect.py b/detect.py index 67e4248a9509..eee5f0208244 100644 --- a/detect.py +++ b/detect.py @@ -1,6 +1,5 @@ import argparse import os -import platform import shutil import time from pathlib import Path diff --git a/train.py b/train.py index 357c2c6434ad..80d9eec1e823 100644 --- a/train.py +++ b/train.py @@ -1,12 +1,12 @@ import argparse import logging -import math import os import random import shutil import time from pathlib import Path +import math import numpy as np import torch.distributed as dist import torch.nn.functional as F diff --git a/utils/general.py b/utils/general.py index f58f7850be73..3513d65cb4c6 100755 --- a/utils/general.py +++ b/utils/general.py @@ -143,7 +143,7 @@ def check_dataset(dict): if val and len(val): val = [os.path.abspath(x) for x in (val if isinstance(val, list) else [val])] # val path if not all(os.path.exists(x) for x in val): - print('\nWARNING: Dataset not found, nonexistant paths: %s' % [*val]) + print('\nWARNING: Dataset not found, nonexistent paths: %s' % [*val]) if s and len(s): # download script print('Downloading %s ...' % s) if s.startswith('http') and s.endswith('.zip'): # URL @@ -158,7 +158,7 @@ def check_dataset(dict): def make_divisible(x, divisor): - # Returns x evenly divisble by divisor + # Returns x evenly divisible by divisor return math.ceil(x / divisor) * divisor @@ -169,9 +169,9 @@ def labels_to_class_weights(labels, nc=80): labels = np.concatenate(labels, 0) # labels.shape = (866643, 5) for COCO classes = labels[:, 0].astype(np.int) # labels = [class xywh] - weights = np.bincount(classes, minlength=nc) # occurences per class + weights = np.bincount(classes, minlength=nc) # occurrences per class - # Prepend gridpoint count (for uCE trianing) + # Prepend gridpoint count (for uCE training) # gpi = ((320 / 32 * np.array([1, 2, 4])) ** 2 * 3).sum() # gridpoints per image # weights = np.hstack([gpi * len(labels) - weights.sum() * 9, weights * 9]) ** 0.5 # prepend gridpoints to start @@ -820,7 +820,7 @@ def print_results(k): k, dist = kmeans(wh / s, n, iter=30) # points, mean distance k *= s wh = torch.tensor(wh, dtype=torch.float32) # filtered - wh0 = torch.tensor(wh0, dtype=torch.float32) # unflitered + wh0 = torch.tensor(wh0, dtype=torch.float32) # unfiltered k = print_results(k) # Plot @@ -1281,7 +1281,7 @@ def plot_results(start=0, stop=0, bucket='', id=(), labels=(), save_dir=''): for i in range(10): y = results[i, x] if i in [0, 1, 2, 5, 6, 7]: - y[y == 0] = np.nan # dont show zero loss values + y[y == 0] = np.nan # don't show zero loss values # y /= y[0] # normalize label = labels[fi] if len(labels) else Path(f).stem ax[i].plot(x, y, marker='.', label=label, linewidth=1, markersize=6)