-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathllm.rs
More file actions
45 lines (37 loc) · 1.68 KB
/
llm.rs
File metadata and controls
45 lines (37 loc) · 1.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
use blockless_sdk::llm::*;
/// This example demonstrates how to use the Blockless SDK to interact with two different LLM models.
///
/// It sets up two instances of the BlocklessLlm struct.
/// Each model is configured with a system message that changes the assistant's name.
/// The example then sends chat requests to both models and prints their responses,
/// demonstrating how the same instance maintains state between requests.
fn main() {
// large model
let mut llm = BlocklessLlm::new(Models::Mistral7BInstructV03(None)).unwrap();
// small model
let mut llm_small = BlocklessLlm::new(Models::Llama321BInstruct(None)).unwrap();
let prompt = r#"
You are a helpful assistant.
First time I ask, you name will be lucy.
Second time I ask, you name will be bob.
"#;
llm.set_options(LlmOptions::default().with_system_message(prompt.to_string()))
.unwrap();
let response = llm.chat_request("What is your name?").unwrap();
println!("llm Response: {}", response);
let prompt_smol = r#"
You are a helpful assistant.
First time I ask, you name will be daisy.
Second time I ask, you name will be hector.
"#;
llm_small
.set_options(LlmOptions::default().with_system_message(prompt_smol.to_string()))
.unwrap();
let response = llm_small.chat_request("What is your name?").unwrap();
println!("llm_small Response: {}", response);
let response = llm_small.chat_request("What is your name?").unwrap();
println!("llm_small Response: {}", response);
// test if same instance is used in host/runtime
let response = llm.chat_request("What is your name?").unwrap();
println!("llm Response: {}", response);
}