A local-first private AI assistant for everyday use. Runs on-device models with encrypted P2P sync, and supports sharing chats publicly on ATProto.
10
fork

Configure Feed

Select the types of activity you want to include in your feed.

feat: Renamed repl commands + added models_used field to SharedSession

- models_used field in SharedSession is a list
- Each chats will have the model name it used, since ideally a session
can have multiple models used

+135 -23
+111 -4
tiles/src/core/chats.rs
··· 53 53 updated_at: u64, 54 54 row_counter: i64, 55 55 session_id: String, 56 + model_name: String, 56 57 } 57 58 58 59 #[derive(Debug, serde::Serialize, serde::Deserialize, Clone)] ··· 99 100 updated_at: get_unix_time_now(), 100 101 row_counter: row_counter + 1, 101 102 session_id: chat_resp.session_id, 103 + model_name: chat_resp.model_used, 102 104 }; 103 105 104 - conn.execute("insert into chats(id, user_id, content, resp_id, role, context_id, created_at, updated_at, row_counter, session_id) values (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)", (&chat.id.to_string(), &chat.user_id, &chat.content, &chat.response_id, Into::<String>::into(chat.role), &chat.context_id, &chat.created_at.to_string(), &chat.updated_at.to_string(), &chat.row_counter, &chat.session_id))?; 106 + conn.execute("insert into chats(id, user_id, content, resp_id, role, context_id, created_at, updated_at, row_counter, session_id, model_name) values (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11)", (&chat.id.to_string(), &chat.user_id, &chat.content, &chat.response_id, Into::<String>::into(chat.role), &chat.context_id, &chat.created_at.to_string(), &chat.updated_at.to_string(), &chat.row_counter, &chat.session_id, &chat.model_name))?; 105 107 106 108 Ok(chat) 107 109 } ··· 138 140 139 141 /// Return a Delta of chats and sessions for the given `user_id` since `last_row_counter` 140 142 pub fn get_delta(conn: &Connection, user_id: &str, last_row_couter: i64) -> Result<DeltaChat> { 141 - let query = "select id, user_id, content, resp_id, role, context_id, created_at, updated_at , row_counter, session_id from chats where user_id = ?1 and row_counter > ?2 order by id"; 143 + let query = "select id, user_id, content, resp_id, role, context_id, created_at, updated_at , row_counter, session_id, model_name from chats where user_id = ?1 and row_counter > ?2 order by id"; 142 144 143 145 let lrc_str = last_row_couter.to_string(); 144 146 ··· 161 163 let updated_at: f64 = row.get(7)?; 162 164 let resp_id: Option<String> = row.get(3)?; 163 165 let ctx_id = row.get(5)?; 166 + let model_name_db: Option<String> = row.get(9)?; 167 + 168 + let model_name: String = model_name_db.unwrap_or("".to_owned()); 164 169 165 170 // This is to handle older versions which can have null session_id in DB 166 171 let session_id_db: Option<String> = row.get(9)?; ··· 189 194 updated_at: updated_at as u64, 190 195 row_counter: row.get(8)?, 191 196 session_id, 197 + model_name, 192 198 }) 193 199 })?; 194 200 ··· 397 403 Ok(sessions) 398 404 } 399 405 406 + pub fn fetch_models_used_by_session(conn: &Connection, session_id: &str) -> Result<Vec<String>> { 407 + let query = "select distinct model_name from chats where session_id = ?1"; 408 + 409 + let mut stmt = conn.prepare(query)?; 410 + let model_names_rows = stmt.query_map([session_id], |row| { 411 + let model_opt: Option<String> = row.get(0)?; 412 + Ok(model_opt.unwrap_or("".to_owned())) 413 + })?; 414 + 415 + let mut model_names: Vec<String> = vec![]; 416 + 417 + for model_name in model_names_rows { 418 + if let Ok(model) = model_name 419 + && !model.is_empty() 420 + { 421 + model_names.push(model); 422 + } 423 + } 424 + Ok(model_names) 425 + } 400 426 fn encode_delta_to_bytes(delta_chats: &DeltaChat) -> Vec<u8> { 401 427 postcard::to_stdvec(delta_chats).expect("Failed to convert to bytes with postcard") 402 428 } ··· 427 453 account::local::{ACCOUNT, User}, 428 454 chats::{ 429 455 apply_delta, create_session, decode_delta_from_bytes, encode_delta_to_bytes, 430 - get_delta, get_last_row_counter, save_chat, 456 + fetch_models_used_by_session, get_delta, get_last_row_counter, save_chat, 431 457 }, 432 458 }, 433 459 runtime::mlx::ChatResponse, ··· 448 474 prev_response_id: None, 449 475 parent_chat_id: None, 450 476 metrics: None, 477 + model_used: "gpt-oss".to_owned(), 451 478 }; 452 479 let chat = save_chat(&conn, &user, chat_response).expect("chat should be saved"); 453 480 ··· 477 504 prev_response_id: None, 478 505 parent_chat_id: Some(parent_chat_id.clone()), 479 506 metrics: None, 507 + model_used: "gpt-oss".to_owned(), 480 508 }; 481 509 let chat = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 482 510 ··· 502 530 prev_response_id: Some(Uuid::now_v7().to_string()), 503 531 parent_chat_id: Some(Uuid::now_v7().to_string()), 504 532 metrics: None, 533 + model_used: "gpt-oss".to_owned(), 505 534 }; 506 535 507 536 let chat = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 524 553 prev_response_id: None, 525 554 parent_chat_id: None, 526 555 metrics: None, 556 + model_used: "gpt-oss".to_owned(), 527 557 }; 528 558 let chat = 529 559 save_chat(&conn, &user, chat_response).expect("empty content should still be saved"); ··· 545 575 prev_response_id: None, 546 576 parent_chat_id: None, 547 577 metrics: None, 578 + model_used: "gpt-oss".to_owned(), 548 579 }; 549 580 let result = save_chat(&conn, &user, chat_response); 550 581 ··· 563 594 prev_response_id: None, 564 595 parent_chat_id: None, 565 596 metrics: None, 597 + model_used: "gpt-oss".to_owned(), 566 598 }; 567 599 let chat = save_chat(&conn, &user, chat_response).expect("chat should be saved"); 568 600 ··· 595 627 prev_response_id: None, 596 628 parent_chat_id: None, 597 629 metrics: None, 630 + model_used: "gpt-oss".to_owned(), 598 631 }; 599 632 let chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 600 633 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 619 652 prev_response_id: None, 620 653 parent_chat_id: None, 621 654 metrics: None, 655 + model_used: "gpt-oss".to_owned(), 622 656 }; 623 657 624 658 conn.execute("insert into chats(id, user_id, content, resp_id, role, context_id, created_at, updated_at, row_counter, session_id) values (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)", (Uuid::now_v7().to_string(), &user.user_id, &chat_response.input, None::<String>, Into::<String>::into(chat_response.role), &chat_response.parent_chat_id, get_unix_time_now().to_string(), get_unix_time_now().to_string(), 1, None::<String>)).unwrap(); ··· 641 675 prev_response_id: None, 642 676 parent_chat_id: None, 643 677 metrics: None, 678 + model_used: "gpt-oss".to_owned(), 644 679 }; 645 680 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 646 681 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 664 699 prev_response_id: None, 665 700 parent_chat_id: None, 666 701 metrics: None, 702 + model_used: "gpt-oss".to_owned(), 667 703 }; 668 704 create_session(&conn, "session_abc", "sesh", &user.user_id).unwrap(); 669 705 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 689 725 prev_response_id: None, 690 726 parent_chat_id: None, 691 727 metrics: None, 728 + model_used: "gpt-oss".to_owned(), 692 729 }; 693 730 create_session(&conn, "session_abc", "sesh", &user.user_id).unwrap(); 694 731 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 706 743 prev_response_id: None, 707 744 parent_chat_id: None, 708 745 metrics: None, 746 + model_used: "gpt-oss".to_owned(), 709 747 }; 710 748 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 711 749 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 727 765 prev_response_id: None, 728 766 parent_chat_id: None, 729 767 metrics: None, 768 + model_used: "gpt-oss".to_owned(), 730 769 }; 731 770 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 732 771 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 751 790 prev_response_id: None, 752 791 parent_chat_id: None, 753 792 metrics: None, 793 + model_used: "gpt-oss".to_owned(), 754 794 }; 755 795 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 756 796 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 778 818 prev_response_id: None, 779 819 parent_chat_id: None, 780 820 metrics: None, 821 + model_used: "gpt-oss".to_owned(), 781 822 }; 782 823 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 783 824 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 807 848 prev_response_id: None, 808 849 parent_chat_id: None, 809 850 metrics: None, 851 + model_used: "gpt-oss".to_owned(), 810 852 }; 811 853 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 812 854 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 836 878 prev_response_id: None, 837 879 parent_chat_id: None, 838 880 metrics: None, 881 + model_used: "gpt-oss".to_owned(), 839 882 }; 840 883 let chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 841 884 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 860 903 prev_response_id: None, 861 904 parent_chat_id: None, 862 905 metrics: None, 906 + model_used: "gpt-oss".to_owned(), 863 907 }; 864 908 let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 865 909 let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); ··· 897 941 prev_response_id: None, 898 942 parent_chat_id: None, 899 943 metrics: None, 944 + model_used: "gpt-oss".to_owned(), 900 945 }; 901 946 let _chat_1 = 902 947 save_chat(&conn, &user_a, chat_response.clone()).expect("chat should be saved"); ··· 914 959 prev_response_id: None, 915 960 parent_chat_id: None, 916 961 metrics: None, 962 + model_used: "gpt-oss".to_owned(), 917 963 }; 918 964 let _chat_1 = 919 965 save_chat(&conn_2, &user_b, chat_response.clone()).expect("chat should be saved"); ··· 1018 1064 assert_eq!(user.user_id, session_2.creator_id); 1019 1065 } 1020 1066 1067 + #[test] 1068 + fn test_fetching_models_used_in_session() { 1069 + let conn = setup_db_schema(); 1070 + let user = create_user(); 1071 + let input = "2+2"; 1072 + let chat_response = ChatResponse { 1073 + input: input.to_owned(), 1074 + session_id: String::from("session_abc"), 1075 + role: Role::User, 1076 + code: None, 1077 + prev_response_id: None, 1078 + parent_chat_id: None, 1079 + metrics: None, 1080 + model_used: "gpt-oss".to_owned(), 1081 + }; 1082 + 1083 + let chat_response_2 = ChatResponse { 1084 + input: input.to_owned(), 1085 + session_id: String::from("session_abc"), 1086 + role: Role::User, 1087 + code: None, 1088 + prev_response_id: None, 1089 + parent_chat_id: None, 1090 + metrics: None, 1091 + model_used: "kimi".to_owned(), 1092 + }; 1093 + create_session(&conn, "session_abc", "sesh", &user.user_id).unwrap(); 1094 + let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 1095 + let _ = save_chat(&conn, &user, chat_response_2.clone()).expect("chat should be saved"); 1096 + let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 1097 + let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 1098 + 1099 + conn.execute("insert into chats(id, user_id, content, resp_id, role, context_id, created_at, updated_at, row_counter, session_id, model_name) values (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11)", (Uuid::now_v7().to_string(), &user.user_id, &chat_response.input, None::<String>, Into::<String>::into(chat_response.role), &chat_response.parent_chat_id, get_unix_time_now().to_string(), get_unix_time_now().to_string(), 1, "session_abc".to_owned(), None::<String>)).unwrap(); 1100 + 1101 + create_session(&conn, "session_def", "sesh-2", &user.user_id).unwrap(); 1102 + 1103 + let input = "4+4"; 1104 + let chat_response = ChatResponse { 1105 + input: input.to_owned(), 1106 + session_id: String::from("session_def"), 1107 + role: Role::User, 1108 + code: None, 1109 + prev_response_id: None, 1110 + parent_chat_id: None, 1111 + metrics: None, 1112 + model_used: "gpt-oss".to_owned(), 1113 + }; 1114 + let _chat_1 = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 1115 + let _ = save_chat(&conn, &user, chat_response.clone()).expect("chat should be saved"); 1116 + 1117 + let rows = get_delta(&conn, &user.user_id, 0).unwrap(); 1118 + assert_eq!(rows.sessions.len(), 2); 1119 + assert_eq!(rows.chats.len(), 7); 1120 + let models = fetch_models_used_by_session(&conn, "session_abc").unwrap(); 1121 + 1122 + assert_eq!(models.len(), 2); 1123 + assert_eq!(models[0], "gpt-oss".to_owned()); 1124 + assert_eq!(models[1], "kimi"); 1125 + } 1126 + 1021 1127 struct SavedChatRow { 1022 1128 content: String, 1023 1129 resp_id: Option<String>, ··· 1092 1198 created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), 1093 1199 updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), 1094 1200 row_counter INTEGER, 1095 - session_id TEXT 1201 + session_id TEXT, 1202 + model_name TEXT 1096 1203 );", 1097 1204 [], 1098 1205 )
+1
tiles/src/core/storage/db.rs
··· 91 91 )", 92 92 ), 93 93 M::up("CREATE INDEX idx_chats_session_id ON chats(session_id);"), 94 + M::up("ALTER TABLE CHATS ADD COLUMN model_name TEXT;"), 94 95 ]; 95 96 96 97 const CHATS_MIGRATIONS: Migrations = Migrations::from_slice(CHATS_MIGRATION_ARRAY);
+23 -19
tiles/src/runtime/mlx.rs
··· 1 1 use crate::core::account::atproto::share_session; 2 2 use crate::core::account::local::get_current_user; 3 3 use crate::core::chats::{ 4 - Message, create_session, fetch_chats_by_session_id, fetch_sessions, save_chat, 4 + Message, create_session, fetch_chats_by_session_id, fetch_models_used_by_session, 5 + fetch_sessions, save_chat, 5 6 }; 6 7 use crate::core::storage::db::Dbconn; 7 8 use crate::runtime::RunArgs; ··· 68 69 pub prev_response_id: Option<String>, 69 70 pub parent_chat_id: Option<String>, 70 71 pub metrics: Option<BenchmarkMetrics>, 72 + pub model_used: String, 71 73 } 72 74 73 75 #[derive(Serialize, Deserialize, Debug)] ··· 264 266 265 267 #[derive(Deserialize, Serialize, Debug)] 266 268 enum CommandType { 267 - #[serde(rename = "get_state")] 268 - State, 269 + #[serde(rename = "status")] 270 + Status, 269 271 #[serde(rename = "share")] 270 272 Share, 271 - #[serde(rename = "list-sessions")] 272 - ListSessions, 273 - #[serde(rename = "load-session")] 274 - LoadSession, 273 + #[serde(rename = "sessions")] 274 + Sessions, 275 + #[serde(rename = "resume")] 276 + Resume, 275 277 #[serde(other)] 276 278 Unknown, 277 279 } ··· 284 286 name: String, 285 287 contents: Vec<SharedContent>, 286 288 created_at: String, 289 + models_used: Vec<String>, 287 290 } 288 291 289 292 #[derive(Serialize, Deserialize, Debug)] ··· 314 317 fn show_help() { 315 318 let help_list = vec![ 316 319 ("status", "Show the current session state"), 317 - ("list-sessions", "List available sessions"), 320 + ("sessions", "List available sessions"), 318 321 ( 319 322 "share", 320 323 "Create a shareable link for currently running session", 321 324 ), 322 - ( 323 - "load-session <sessionId>", 324 - "Loads and resume the given session", 325 - ), 325 + ("resume <sessionId>", "Loads and resume the given session"), 326 326 ( 327 327 "share", 328 328 "Create a shareable link for currently running session", ··· 405 405 let mut session_id = String::new(); 406 406 let pi_stdin = pi_process.stdin.as_mut().unwrap(); 407 407 let mut stdout = pi_process.stdout.take().expect("stdout"); 408 - let inti_cmd_payload = get_command_payload(CommandType::State); 408 + let inti_cmd_payload = get_command_payload(CommandType::Status); 409 409 send_to_pi(pi_stdin, inti_cmd_payload).inspect_err(|_e| eprintln!("send pi failed"))?; 410 410 411 411 //TODO: Refactor session_id fetching ··· 424 424 loop { 425 425 let readline = editor.readline(">>> "); 426 426 let input = match readline { 427 - Ok(line) => line.trim().to_string(), 427 + Ok(line) => line.trim().to_string().to_lowercase(), 428 428 Err(_) => { 429 429 //TODO: Panic when entering another prompt after ctr-l C 430 430 // called `Result::unwrap()` on an `Err` value: Os { code: 32, kind: BrokenPipe, message: "Broken pipe" } ··· 488 488 process_share_session(db_conn, &session_id, &args).await?; 489 489 continue; 490 490 } 491 - CommandType::ListSessions => { 491 + CommandType::Sessions => { 492 492 show_session_info(db_conn)?; 493 493 continue; 494 494 } 495 - CommandType::LoadSession => { 495 + CommandType::Resume => { 496 496 match load_session(db_conn, &args) { 497 497 Ok((sesh_id, turn_count)) => { 498 498 session_id = sesh_id; ··· 542 542 // on agent end create a new session entry, only for the 543 543 // first time 544 544 if session_turn_count == 1 { 545 - info!("Created session {}", session_id); 545 + // info!("Created session {}", session_id); 546 546 create_session(&db_conn.chat, &session_id, &input, &current_user.user_id)?; 547 547 } 548 548 let parent_chat_id = if session_turn_count == 1 { ··· 558 558 prev_response_id: None, 559 559 parent_chat_id, 560 560 metrics: None, 561 + model_used: modelname.clone(), 561 562 }; 562 563 let prompt_chat = save_chat(&db_conn.chat, &current_user, chat_response)?; 563 564 last_chat_id = prompt_chat.id; ··· 572 573 prev_response_id: None, 573 574 parent_chat_id: Some(last_chat_id.clone()), 574 575 metrics: None, 576 + model_used: modelname.clone(), 575 577 }; 576 578 let chat = save_chat(&db_conn.chat, &current_user, chat_response)?; 577 579 last_chat_id = chat.id; ··· 809 811 "type": "none" 810 812 }) 811 813 } 812 - CommandType::State => { 814 + CommandType::Status => { 813 815 json!({ 814 816 "type": "get_state", 815 817 }) ··· 822 824 fn process_command(cmd: CommandType, data: Option<Value>) -> Result<()> { 823 825 match cmd { 824 826 CommandType::Unknown => (), 825 - CommandType::State => { 827 + CommandType::Status => { 826 828 let state: GetStateData = serde_json::from_value(data.unwrap())?; 827 829 println!("{:?}", state); 828 830 use std::io::Write; ··· 869 871 }); 870 872 } 871 873 874 + let models_used = fetch_models_used_by_session(&conn.chat, session_id)?; 872 875 let shared_sessions = SharedSession { 873 876 r#type: "run.tiles.session".to_string(), 874 877 session_id: session_id.to_string(), 875 878 name: session.name.clone(), 876 879 contents: shared_contents, 877 880 created_at: Datetime::now().as_str().to_string(), 881 + models_used, 878 882 }; 879 883 880 884 share_session(&conn.common, shared_sessions).await?;