Skip to content

Commit 8ac0236

Browse files
author
Raminder Singh
committed
Removed unnecessary comparisons against bool literals
See this clippy lint for details: https://rust-lang.github.io/rust-clippy/master/index.html#bool_comparison
1 parent c55b1f3 commit 8ac0236

File tree

13 files changed

+70
-75
lines changed

13 files changed

+70
-75
lines changed

src/channel/command.rs

+10-10
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ impl ChannelCommandBase {
149149
))])
150150
}
151151
(Some(manual_key), next_part) => {
152-
if next_part.is_none() == true {
152+
if next_part.is_none() {
153153
if let Some(manual_data) = manuals.get(manual_key) {
154154
Ok(vec![ChannelCommandResponse::Result(format!(
155155
"{}({})",
@@ -171,7 +171,7 @@ impl ChannelCommandBase {
171171
let mut text_raw = String::new();
172172

173173
while let Some(text_part) = parts.next() {
174-
if text_raw.is_empty() == false {
174+
if !text_raw.is_empty() {
175175
text_raw.push_str(" ");
176176
}
177177

@@ -209,7 +209,7 @@ impl ChannelCommandBase {
209209
let text_bytes = text_raw.as_bytes();
210210
let text_bytes_len = text_bytes.len();
211211

212-
if text_raw.is_empty() == true
212+
if text_raw.is_empty()
213213
|| text_bytes_len < 2
214214
|| text_bytes[0] as char != TEXT_PART_BOUNDARY
215215
|| text_bytes[text_bytes_len - 1] as char != TEXT_PART_BOUNDARY
@@ -231,7 +231,7 @@ impl ChannelCommandBase {
231231
debug!("parsed text parts (post-processed): {}", text_inner_string);
232232

233233
// Text must not be empty
234-
if text_inner_string.is_empty() == false {
234+
if !text_inner_string.is_empty() {
235235
Some(text_inner_string)
236236
} else {
237237
None
@@ -254,7 +254,7 @@ impl ChannelCommandBase {
254254
) -> Option<MetaPartsResult<'a>> {
255255
if let Some(part) = parts.next() {
256256
// Parse meta (with format: 'KEY(VALUE)'; no '(' or ')' is allowed in KEY and VALUE)
257-
if part.is_empty() == false {
257+
if !part.is_empty() {
258258
if let Some(index_open) = part.find(META_PART_GROUP_OPEN) {
259259
let (key_bound_start, key_bound_end) = (0, index_open);
260260
let (value_bound_start, value_bound_end) = (index_open + 1, part.len() - 1);
@@ -266,10 +266,10 @@ impl ChannelCommandBase {
266266
);
267267

268268
// Ensure final key and value do not contain reserved syntax characters
269-
return if key.contains(META_PART_GROUP_OPEN) == false
270-
&& key.contains(META_PART_GROUP_CLOSE) == false
271-
&& value.contains(META_PART_GROUP_OPEN) == false
272-
&& value.contains(META_PART_GROUP_CLOSE) == false
269+
return if !key.contains(META_PART_GROUP_OPEN)
270+
&& !key.contains(META_PART_GROUP_CLOSE)
271+
&& !value.contains(META_PART_GROUP_OPEN)
272+
&& !value.contains(META_PART_GROUP_CLOSE)
273273
{
274274
debug!("parsed meta part as: {} = {}", key, value);
275275

@@ -781,7 +781,7 @@ impl ChannelCommandControl {
781781
CONTROL_TRIGGER_ACTIONS.join(", ")
782782
))]),
783783
(Some(action_key), next_part) => {
784-
if next_part.is_none() == true {
784+
if next_part.is_none() {
785785
let action_key_lower = action_key.to_lowercase();
786786

787787
match action_key_lower.as_str() {

src/channel/handle.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ impl ChannelHandle {
9898
}
9999

100100
fn configure_stream(stream: &TcpStream, is_established: bool) {
101-
let tcp_timeout = if is_established == true {
101+
let tcp_timeout = if is_established {
102102
APP_CONF.channel.tcp_timeout
103103
} else {
104104
TCP_TIMEOUT_NON_ESTABLISHED
@@ -170,7 +170,7 @@ impl ChannelHandle {
170170
}
171171

172172
// Incomplete line remaining? Put it back in buffer.
173-
if processed_line.is_empty() == false {
173+
if !processed_line.is_empty() {
174174
buffer.extend(processed_line);
175175
}
176176
}

src/channel/message.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ impl ChannelMessage {
6969

7070
// Serve response messages on socket
7171
for response_args in response_args_groups {
72-
if response_args.0.is_empty() == false {
72+
if !response_args.0.is_empty() {
7373
if let Some(ref values) = response_args.1 {
7474
let values_string = values.join(" ");
7575

src/executor/flushb.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ impl ExecutorFlushB {
2121
// Important: acquire bucket store write lock
2222
executor_kv_lock_write!(kv_store);
2323

24-
if kv_store.is_some() == true {
24+
if kv_store.is_some() {
2525
// Store exists, proceed erasure.
2626
debug!(
2727
"collection store exists, erasing: {} from {}",
@@ -35,7 +35,7 @@ impl ExecutorFlushB {
3535
// erasing a bucket requires a database lock, which would incur a dead-lock, \
3636
// thus we need to perform the erasure from there.
3737
if let Ok(erase_count) = kv_action.batch_erase_bucket() {
38-
if StoreFSTActionBuilder::erase(collection, Some(bucket)).is_ok() == true {
38+
if StoreFSTActionBuilder::erase(collection, Some(bucket)).is_ok() {
3939
debug!("done with bucket erasure");
4040

4141
return Ok(erase_count);

src/executor/pop.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -89,20 +89,20 @@ impl ExecutorPop {
8989
// Nuke IID in Term-to-IIDs list
9090
for (pop_term, pop_term_hashed) in &pop_terms {
9191
// Check that term is linked to IID (and should be removed)
92-
if iid_terms_hashed.contains(pop_term_hashed) == true {
92+
if iid_terms_hashed.contains(pop_term_hashed) {
9393
if let Ok(Some(mut pop_term_iids)) =
9494
kv_action.get_term_to_iids(*pop_term_hashed)
9595
{
9696
// Remove IID from list of IIDs to be popped
9797
pop_term_iids.retain(|cur_iid| cur_iid != &iid);
9898

99-
if pop_term_iids.is_empty() == true {
99+
if pop_term_iids.is_empty() {
100100
// IIDs list was empty, delete whole key
101101
executor_ensure_op!(kv_action
102102
.delete_term_to_iids(*pop_term_hashed));
103103

104104
// Pop from FST graph (does not exist anymore)
105-
if fst_action.pop_word(pop_term) == true {
105+
if fst_action.pop_word(pop_term) {
106106
debug!(
107107
"pop term hash nuked from graph: {}",
108108
pop_term_hashed

src/executor/push.rs

+4-5
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ impl ExecutorPush {
5858
StoreMetaValue::IIDIncr(iid_incr),
5959
)
6060
.is_ok()
61-
== true
6261
{
6362
// Associate OID <> IID (bidirectional)
6463
executor_ensure_op!(kv_action.set_oid_to_iid(oid, iid_incr));
@@ -96,7 +95,7 @@ impl ExecutorPush {
9695

9796
while let Some((term, term_hashed)) = lexer.next() {
9897
// Check that term is not already linked to IID
99-
if iid_terms_hashed.contains(&term_hashed) == false {
98+
if !iid_terms_hashed.contains(&term_hashed) {
10099
if let Ok(term_iids) = kv_action.get_term_to_iids(term_hashed) {
101100
has_commits = true;
102101

@@ -105,7 +104,7 @@ impl ExecutorPush {
105104

106105
// Remove IID from list of IIDs to be popped before inserting in \
107106
// first position?
108-
if term_iids.contains(&iid) == true {
107+
if term_iids.contains(&iid) {
109108
term_iids.retain(|cur_iid| cur_iid != &iid);
110109
}
111110

@@ -141,13 +140,13 @@ impl ExecutorPush {
141140
}
142141

143142
// Push to FST graph? (this consumes the term; to avoid sub-clones)
144-
if fst_action.push_word(&term) == true {
143+
if fst_action.push_word(&term) {
145144
debug!("push term commited to graph: {}", term);
146145
}
147146
}
148147

149148
// Commit updated list of terms for IID? (if any commit made)
150-
if has_commits == true {
149+
if has_commits {
151150
let collected_iids: Vec<StoreTermHashed> =
152151
iid_terms_hashed.into_iter().collect();
153152

src/executor/search.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ impl ExecutorSearch {
101101
// Do not append the same IID twice (can happen a lot \
102102
// when completing from suggested results that point \
103103
// to the same end-OID)
104-
if iids.contains(&suggested_iid) == false {
104+
if !iids.contains(&suggested_iid) {
105105
iids.insert(suggested_iid);
106106

107107
iids_new_len += 1;
@@ -133,7 +133,7 @@ impl ExecutorSearch {
133133
debug!("got search executor iids: {:?} for term: {}", iids, term);
134134

135135
// Intersect found IIDs with previous batch
136-
if found_iids.is_empty() == true {
136+
if found_iids.is_empty() {
137137
found_iids = iids;
138138
} else {
139139
found_iids = found_iids.intersection(&iids).map(|value| *value).collect();
@@ -145,7 +145,7 @@ impl ExecutorSearch {
145145
);
146146

147147
// No IID found? (stop there)
148-
if found_iids.is_empty() == true {
148+
if found_iids.is_empty() {
149149
info!(
150150
"stop search executor as no iid was found in common for term: {}",
151151
term
@@ -177,7 +177,7 @@ impl ExecutorSearch {
177177

178178
info!("got search executor final oids: {:?}", result_oids);
179179

180-
return Ok(if result_oids.is_empty() == false {
180+
return Ok(if !result_oids.is_empty() {
181181
Some(result_oids)
182182
} else {
183183
None

src/lexer/stopwords.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ impl LexerStopWord {
139139
pub fn is(word: &str, locale: Option<Lang>) -> bool {
140140
if let Some(locale) = locale {
141141
// Word is a stopword (given locale)
142-
if Self::lang_stopwords(locale).contains(word) == true {
142+
if Self::lang_stopwords(locale).contains(word) {
143143
return true;
144144
}
145145
}
@@ -171,14 +171,14 @@ impl LexerStopWord {
171171
for script_lang in script_langs {
172172
let lang_stopwords = Self::lang_stopwords(*script_lang);
173173

174-
if lang_stopwords.is_empty() == false {
174+
if !lang_stopwords.is_empty() {
175175
let mut lang_count = 0;
176176

177177
// This is a simple split, that does not take into account uppercase letters and \
178178
// punctuation, as to prevent memory allocations and other heavy operations. \
179179
// Trade-offs are made as this is a best-effort last-resort check.
180180
for word in &text_split {
181-
if lang_stopwords.contains(word) == true {
181+
if lang_stopwords.contains(word) {
182182
lang_count += 1;
183183
}
184184
}

src/lexer/token.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ impl TokenLexerBuilder {
144144
// Confidence is low, try to detect locale from stop-words.
145145
// Notice: this is a fallback but should not be too reliable for short \
146146
// texts.
147-
if detector.is_reliable() == false {
147+
if !detector.is_reliable() {
148148
debug!("[slow lexer] trying to detect locale from stopwords instead");
149149

150150
// Better alternate locale found?
@@ -238,15 +238,15 @@ impl<'a> Iterator for TokenLexer<'a> {
238238

239239
// Check if normalized word is a stop-word? (if should normalize and cleanup)
240240
if self.mode == TokenLexerMode::NormalizeOnly
241-
|| LexerStopWord::is(&word, self.locale) == false
241+
|| !LexerStopWord::is(&word, self.locale)
242242
{
243243
// Hash the term (this is used by all iterator consumers, as well as internally \
244244
// in the iterator to keep track of already-yielded words in a space-optimized \
245245
// manner, ie. by using 32-bit unsigned integer hashes)
246246
let term_hash = StoreTermHash::from(&word);
247247

248248
// Check if word was not already yielded? (we return unique words)
249-
if self.yields.contains(&term_hash) == false {
249+
if !self.yields.contains(&term_hash) {
250250
debug!("lexer yielded word: {}", word);
251251

252252
self.yields.insert(term_hash);

0 commit comments

Comments
 (0)