Skip to content

Commit

Permalink
feat: add streaming API response
Browse files Browse the repository at this point in the history
- the following commit adds three ways streaming can be used get responses from an API
- all three are novel approaches and super cool
  • Loading branch information
raghav-rama committed Feb 26, 2024
1 parent e7004d1 commit 048f82c
Show file tree
Hide file tree
Showing 3 changed files with 217 additions and 0 deletions.
92 changes: 92 additions & 0 deletions src/bin/stream_api_buf_reader.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
use dotenvy::dotenv;
use reqwest::{
blocking::Client,
header::{HeaderMap, HeaderValue, ACCEPT, AUTHORIZATION},
};
use serde::Deserialize;
use serde_json::json;
use std::io::{self, BufRead, BufReader, Write};
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct ChatCompletionChunk {
choices: Vec<Choices>,
}

#[allow(dead_code)]
#[derive(Debug, Deserialize)]
struct Choices {
index: u64,
delta: Delta,
}

#[allow(dead_code)]
#[derive(Debug, Deserialize)]
struct Delta {
content: String,
}

fn main() {
dotenv().expect(".env file not found");
let openai_api_key =
std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not found in .env file");
let client = Client::new();
let url = "https://api.openai.com/v1/chat/completions";

let mut headers = HeaderMap::new();

headers.insert(
ACCEPT,
HeaderValue::from_bytes(b"text/event-stream").unwrap(),
);
headers.insert(
AUTHORIZATION,
HeaderValue::from_str(&format!("Bearer {}", openai_api_key)).unwrap(),
);

let request_body = json!(
{
"model": "gpt-4-turbo-preview",
"messages": [
{
"role": "system",
"content": "You are a helpful assistant, who generates output in JSON format only."
},
{"role": "user", "content": "Hi!"}
],
"response_format": {"type": "json_object"},
"stream": true
}
);

let response = client
.post(url)
.headers(headers)
.json(&request_body)
.send()
.unwrap();
let reader = BufReader::new(response);

let mut buffer = String::new();
for line in reader.lines() {
let line = line.unwrap();

if line.starts_with("data: ") {
buffer.push_str(&line[6..]); // Extract payload after "data: "
} else if line.is_empty() {
// Encountered a complete SSE message
let chunk_result = serde_json::from_str::<ChatCompletionChunk>(&buffer);
match chunk_result {
Ok(chunk) => {
print!("{}", chunk.choices[0].delta.content);
io::stdout().flush().unwrap();
}
Err(_) => {
// println!("Error parsing chunk: {:?}", e);
continue;
}
}
buffer.clear();
}
}
println!();
}
64 changes: 64 additions & 0 deletions src/bin/stream_api_response_byte_stream.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
use dotenvy::dotenv;
use error_chain::error_chain;
use futures_util::stream::StreamExt;
use reqwest::{
header::{HeaderMap, HeaderValue, ACCEPT, AUTHORIZATION},
Client,
};
use serde_json::json;
use std::env;

error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(reqwest::Error);
}
}

#[tokio::main]
async fn main() -> Result<()> {
dotenv().expect(".env file not found");
let openai_api_key = env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not found in .env file");
let url = "https://api.openai.com/v1/chat/completions";
let mut headers = HeaderMap::new();

headers.insert(
ACCEPT,
HeaderValue::from_bytes(b"text/event-stream").unwrap(),
);
headers.insert(
AUTHORIZATION,
HeaderValue::from_str(&format!("Bearer {}", openai_api_key)).unwrap(),
);

let request_body = json!(
{
"model": "gpt-4-turbo-preview",
"messages": [
{
"role": "system",
"content": "You are a helpful assistant, who generates output in JSON format only."
},
{"role": "user", "content": "What is the meaning of life?"}
],
"response_format": {"type": "json_object"},
"stream": true
}
);

let client = Client::new();

let mut response_stream = client
.post(url)
.headers(headers)
.json(&request_body)
.send()
.await?
.bytes_stream();

while let Some(chunk) = response_stream.next().await {
println!("Chunk: {:?}", chunk?);
}

Ok(())
}
61 changes: 61 additions & 0 deletions src/bin/stream_api_response_chunk.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
use dotenvy::dotenv;
use error_chain::error_chain;
use reqwest::{
header::{HeaderMap, HeaderValue, ACCEPT, AUTHORIZATION},
Client,
};
use serde_json::json;
use std::env;

error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(reqwest::Error);
}
}

#[tokio::main]
async fn main() -> Result<()> {
dotenv().expect(".env file not found");
let openai_api_key = env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not found in .env file");
let url = "https://api.openai.com/v1/chat/completions";
let mut headers = HeaderMap::new();

headers.insert(
ACCEPT,
HeaderValue::from_bytes(b"text/event-stream").unwrap(),
);
headers.insert(
AUTHORIZATION,
HeaderValue::from_str(&format!("Bearer {}", openai_api_key)).unwrap(),
);

let request_body = json!(
{
"model": "gpt-4-turbo-preview",
"messages": [
{
"role": "system",
"content": "You are a helpful assistant, who generates output in JSON format only."
},
{"role": "user", "content": "What is the meaning of life?"}
],
"response_format": {"type": "json_object"},
"stream": true
}
);

let client = Client::new();
let mut response = client
.post(url)
.headers(headers.clone())
.json(&request_body)
.send()
.await?;

while let Some(chunk) = response.chunk().await? {
println!("Chunk: {:?}", chunk);
}

Ok(())
}

0 comments on commit 048f82c

Please sign in to comment.