Skip to content

Commit

Permalink
Remove unused summary/conclusion logic (#1179)
Browse files Browse the repository at this point in the history
* remove unused summary/conclusion logic

* Set timestamp when concluding answer

* Restore line number offset logic in transcoder, fix tests

---------

Co-authored-by: calyptobai <calyptobai@gmail.com>
  • Loading branch information
ggordonhall and calyptobai authored Dec 15, 2023
1 parent 7f44f51 commit f84aab6
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 315 deletions.
6 changes: 3 additions & 3 deletions server/bleep/src/agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ impl Drop for Agent {
.with_payload("message", "request panicked"),
);
} else {
self.last_exchange_mut().apply_update(Update::Cancel);
self.last_exchange_mut().apply_update(Update::SetTimestamp);

self.track_query(
EventData::output_stage("cancelled")
Expand Down Expand Up @@ -319,8 +319,8 @@ impl Agent {

let answer = match e.answer() {
// NB: We intentionally discard the summary as it is redundant.
Some((answer, _conclusion)) => {
let encoded = transcoder::encode_summarized(answer, None, "gpt-3.5-turbo")?;
Some(answer) => {
let encoded = transcoder::encode_summarized(answer, "gpt-3.5-turbo")?;
Some(llm_gateway::api::Message::function_return("none", &encoded))
}

Expand Down
32 changes: 6 additions & 26 deletions server/bleep/src/agent/exchange.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use crate::query::parser::SemanticQuery;
use std::fmt;

use chrono::prelude::{DateTime, Utc};
use rand::seq::SliceRandom;

/// A continually updated conversation exchange.
///
Expand Down Expand Up @@ -59,25 +58,10 @@ impl Exchange {
Update::Article(full_text) => {
*self.answer.get_or_insert_with(String::new) = full_text;
}
Update::Conclude(conclusion) => {
self.response_timestamp = Some(Utc::now());
self.conclusion = Some(conclusion);
}
Update::Focus(chunk) => {
self.focused_chunk = Some(chunk);
}
Update::Cancel => {
let conclusion = [
"The article wasn't completed. See what's available",
"Your article stopped before completion. Check out the available content",
"The content stopped generating early. Review the initial response",
]
.choose(&mut rand::thread_rng())
.copied()
.unwrap()
.to_owned();

self.conclusion = Some(conclusion);
Update::SetTimestamp => {
self.response_timestamp = Some(Utc::now());
}
}
Expand All @@ -88,14 +72,11 @@ impl Exchange {
self.query.target().map(|q| q.to_string())
}

/// Get the answer and conclusion associated with this exchange, if a conclusion has been made.
/// Get the answer associated with this exchange.
///
/// This returns a tuple of `(full_text, conclusion)`.
pub fn answer(&self) -> Option<(&str, &str)> {
match (&self.answer, &self.conclusion) {
(Some(answer), Some(conclusion)) => Some((answer.as_str(), conclusion.as_str())),
_ => None,
}
/// This returns a tuple of `full_text`.
pub fn answer(&self) -> Option<&str> {
return self.answer.as_deref();
}

/// Return a copy of this exchange, with all function call responses redacted.
Expand Down Expand Up @@ -203,7 +184,6 @@ pub enum Update {
StartStep(SearchStep),
ReplaceStep(SearchStep),
Article(String),
Conclude(String),
Focus(FocusedChunk),
Cancel,
SetTimestamp,
}
34 changes: 7 additions & 27 deletions server/bleep/src/agent/tools/answer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use std::{collections::HashMap, mem, ops::Range, pin::pin};

use anyhow::{anyhow, Context, Result};
use futures::StreamExt;
use rand::{rngs::OsRng, seq::SliceRandom};
use tracing::{debug, info, instrument, trace};

use crate::{
Expand Down Expand Up @@ -77,32 +76,15 @@ impl Agent {
let fragment = fragment?;
response += &fragment;

let (article, summary) = transcoder::decode(&response);
let article = transcoder::decode(&response);
self.update(Update::Article(article)).await?;

if let Some(summary) = summary {
self.update(Update::Conclude(summary)).await?;
}
}

// We re-decode one final time to catch cases where `summary` is `None`, and to log the
// output as a trace.
let (article, summary) = transcoder::decode(&response);
let summary = summary.unwrap_or_else(|| {
[
"I hope that was useful, can I help with anything else?",
"Is there anything else I can help you with?",
"Can I help you with anything else?",
]
.choose(&mut OsRng)
.copied()
.unwrap()
.to_owned()
});

trace!(%article, "generated answer");
if let Some(article) = self.last_exchange().answer() {
trace!(%article, "generated answer");
}

self.update(Update::Conclude(summary)).await?;
self.update(Update::SetTimestamp).await?;

self.track_query(
EventData::output_stage("answer_article")
Expand Down Expand Up @@ -223,10 +205,8 @@ impl Agent {
content: q,
});

let conclusion = e.answer().map(|(answer, conclusion)| {
let encoded =
transcoder::encode_summarized(answer, Some(conclusion), "gpt-4-0613")
.unwrap();
let conclusion = e.answer().map(|answer| {
let encoded = transcoder::encode_summarized(answer, "gpt-4-0613").unwrap();

llm_gateway::api::Message::PlainText {
role: "assistant".to_owned(),
Expand Down
Loading

0 comments on commit f84aab6

Please sign in to comment.