12 个版本
0.1.12 | 2022 年 10 月 10 日 |
---|---|
0.1.11 | 2022 年 10 月 10 日 |
#317 在 HTTP 客户端 中
每月下载量 56
用于 linguee
17KB
156 行
发送 HTTP/HTTPS 请求。
cargo add tomcat
tokio = { version = "1.21.2", features = ["full"] }
tomcat = "0.1.1"
示例
非阻塞 get
#[tokio::main]
async fn main(){
use tomcat::*;
if let Ok(res) = get("https://www.spacex.com").await{
assert_eq!(200,res.status);
assert_eq!(r#"{"content-type": "text/html; charset=utf-8", "vary": "Accept-Encoding", "date": "Sun, 09 Oct 2022 18:49:44 GMT", "connection": "keep-alive", "keep-alive": "timeout=5", "transfer-encoding": "chunked"}"#,format!("{:?}",res.headers));
println!("{}",res.text);
println!("{}",res.text_with_charset);
println!("{}",res.url);
println!("{}",res.remote_addr);
println!("{:?}",res.version);
}
}
pub fn look_ip(url: &str){
use tomcat;
use tokio::runtime::Runtime;
let mut io_loop = Runtime::new().unwrap();
let res = tomcat::get(url);
let mut response = io_loop.block_on(res).unwrap();
let addr = response.remote_addr.ip();
print!("{:?}",addr);
}
阻塞 get
fn main(){
use tomcat::*;
if let Ok(res) = get_blocking("https://www.spacex.com"){
assert_eq!(200,res.status);
assert_eq!(r#"{"content-type": "text/html; charset=utf-8", "vary": "Accept-Encoding", "date": "Sun, 09 Oct 2022 18:49:44 GMT", "connection": "keep-alive", "keep-alive": "timeout=5", "transfer-encoding": "chunked"}"#,format!("{:?}",res.headers));
println!("{}",res.text);
println!("{}",res.text_with_charset);
println!("{}",res.url);
println!("{}",res.remote_addr);
println!("{:?}",res.version);
}
}
非阻塞 post
#[tokio::main]
async fn main() {
openai().await.unwrap();
}
async fn openai()-> Result<(), Box<dyn std::error::Error + Send + Sync>>{
use std::env::{self, args};
use reqwest::Body;
use serde_derive::Serialize;
use serde_derive::Deserialize;
use http::header;
#[derive(Serialize, Debug)]
struct OpenAIRequest {
model: String,
prompt: String,
max_tokens: u32,
stop: String,
}
#[derive(Deserialize, Debug)]
struct OpenAIChoices {
text: String,
}
#[derive(Deserialize, Debug)]
struct OpenAIResponse {
choices: Vec<OpenAIChoices>,
}
let api_key = match env::var("OPENAI_KEY") {
Ok(key) => key,
Err(_) => {
println!("Error: please create an environment variable OPENAI_KEY");
std::process::exit(1);
}
};
let uri = "https://api.openai.com/v1/completions";
let model = String::from("text-davinci-002");
let stop = String::from("Text");
let default_prompt =
"Given text, return 1 bash command. Text:list contents of a directory. Command:ls";
let mut user_input = String::new();
let mut arguments: Vec<String> = args().collect();
arguments.remove(0);
if arguments.is_empty() {
println!("Welcome to Rusty! Enter an argument to get started.");
std::process::exit(1);
}
for x in arguments {
user_input.push(' ');
user_input.push_str(&x);
}
let auth_header_val = format!("Bearer {}", api_key);
let openai_request = OpenAIRequest {
model,
prompt: format!("{} Text:{}. Command:", default_prompt, user_input),
max_tokens: 64,
stop,
};
let body = Body::from(serde_json::to_vec(&openai_request)?);
//
if let Ok(req) = tomcat::post("https://api.openai.com/v1/completions").await {
let res = req
.header(header::CONTENT_TYPE, "application/json")
.header("Authorization", &auth_header_val)
.body(body).send().await.unwrap();
let text = res.text().await.unwrap();
let json: OpenAIResponse = match serde_json::from_str(&text){
Ok(response) => response,
Err(_) => {
println!("Error calling OpenAI. Check environment variable OPENAI_KEY");
std::process::exit(1);
}
};
println!(
"{}",
json.choices[0]
.text
.split("\n")
.map(|s| s.trim())
.filter(|s| s.len() > 0)
.collect::<Vec<_>>()
.join("\n")
);
}
Ok(())
}
阻塞 post
fn main(){
openai_blocking().unwrap();
}
fn openai_blocking()-> Result<(), Box<dyn std::error::Error + Send + Sync>>{
use std::env::{self, args};
use reqwest::blocking::Body;
use serde_derive::Serialize;
use serde_derive::Deserialize;
use http::header;
#[derive(Serialize, Debug)]
struct OpenAIRequest {
model: String,
prompt: String,
max_tokens: u32,
stop: String,
}
#[derive(Deserialize, Debug)]
struct OpenAIChoices {
text: String,
}
#[derive(Deserialize, Debug)]
struct OpenAIResponse {
choices: Vec<OpenAIChoices>,
}
let api_key = match env::var("OPENAI_KEY") {
Ok(key) => key,
Err(_) => {
println!("Error: please create an environment variable OPENAI_KEY");
std::process::exit(1);
}
};
let uri = "https://api.openai.com/v1/completions";
let model = String::from("text-davinci-002");
let stop = String::from("Text");
let default_prompt =
"Given text, return 1 bash command. Text:list contents of a directory. Command:ls";
let mut user_input = String::new();
let mut arguments: Vec<String> = args().collect();
arguments.remove(0);
if arguments.is_empty() {
println!("Welcome to Rusty! Enter an argument to get started.");
std::process::exit(1);
}
for x in arguments {
user_input.push(' ');
user_input.push_str(&x);
}
let auth_header_val = format!("Bearer {}", api_key);
let openai_request = OpenAIRequest {
model,
prompt: format!("{} Text:{}. Command:", default_prompt, user_input),
max_tokens: 64,
stop,
};
let body = Body::from(serde_json::to_vec(&openai_request)?);
if let Ok(req) = tomcat::post_blocking("https://api.openai.com/v1/completions"){
let res = req
.header(header::CONTENT_TYPE, "application/json")
.header("Authorization", &auth_header_val)
.body(body).send().unwrap();
let text = res.text().unwrap();
let json: OpenAIResponse = match serde_json::from_str(&text){
Ok(response) => response,
Err(_) => {
println!("Error calling OpenAI. Check environment variable OPENAI_KEY");
std::process::exit(1);
}
};
println!(
"{}",
json.choices[0]
.text
.split("\n")
.map(|s| s.trim())
.filter(|s| s.len() > 0)
.collect::<Vec<_>>()
.join("\n")
);
}
Ok(())
}
依赖项
~6–18MB
~268K SLoC