diff --git a/shinkai-bin/shinkai-node/src/llm_provider/execution/chains/generic_chain/generic_inference_chain.rs b/shinkai-bin/shinkai-node/src/llm_provider/execution/chains/generic_chain/generic_inference_chain.rs index 2cb531e5d..b655b70ab 100644 --- a/shinkai-bin/shinkai-node/src/llm_provider/execution/chains/generic_chain/generic_inference_chain.rs +++ b/shinkai-bin/shinkai-node/src/llm_provider/execution/chains/generic_chain/generic_inference_chain.rs @@ -1289,34 +1289,51 @@ impl GenericInferenceChain { } }; + let original_response = function_response.response.clone(); let max_input_tokens = ModelCapabilitiesManager::get_max_input_tokens(&provider_interface); let max_tokens_for_response = ((max_input_tokens as f64 * 0.9) as usize).max(1024); // Allow 90% of the context window, minimum 1024 tokens - let response_tokens = count_tokens_from_message_llama3(&function_response.response); - if response_tokens > max_tokens_for_response { + let response_tokens = count_tokens_from_message_llama3(&original_response); + let response_exceeded_limit = response_tokens > max_tokens_for_response; + + if response_exceeded_limit { + // Tell the LLM why the tool response was skipped while keeping user visibility of the original output. function_response.response = json!({ "max_tokens_for_response": max_tokens_for_response, "max_input_tokens": max_input_tokens, "response_tokens": response_tokens, "response": "IMPORTANT: Function response exceeded model context window, try again with a smaller response or a more capable model.", - }).to_string(); + }) + .to_string(); } + let user_visible_response = if response_exceeded_limit { + json!({ + "error": format!("This tool response exceeded the model context window ({} tokens > allowed {}).", response_tokens, max_tokens_for_response), + "new_response": function_response.response, + "original_response": original_response, + }).to_string() + } else { + original_response.clone() + }; + let mut function_call_with_router_key = function_call.clone(); function_call_with_router_key.tool_router_key = Some(shinkai_tool.tool_router_key().to_string_without_version()); - function_call_with_router_key.response = Some(function_response.response.clone()); + function_call_with_router_key.response = Some(user_visible_response.clone()); tool_calls_history.push(function_call_with_router_key); - // Trigger WS update after receiving function_response + // Trigger WS update after receiving function_response (show user the full tool output when available) + let mut user_function_response = function_response.clone(); + user_function_response.response = user_visible_response.clone(); Self::trigger_ws_update( &ws_manager_trait, &Some(full_job.job_id.clone()), - &function_response, + &user_function_response, shinkai_tool.tool_router_key().to_string_without_version(), ) .await; - // Store all function responses to use in the next prompt + // Store all function responses to use in the next prompt (LLM sees the sanitized version if needed) iteration_function_responses.push(function_response); } diff --git a/shinkai-bin/shinkai-node/src/llm_provider/execution/prompts/prompt_test.rs b/shinkai-bin/shinkai-node/src/llm_provider/execution/prompts/prompt_test.rs index c8690663e..cda81e58a 100644 --- a/shinkai-bin/shinkai-node/src/llm_provider/execution/prompts/prompt_test.rs +++ b/shinkai-bin/shinkai-node/src/llm_provider/execution/prompts/prompt_test.rs @@ -52,6 +52,7 @@ mod tests { ToolOutputArg::empty(), None, "local:::__official_shinkai:::concat_strings".to_string(), + "1.0.0".to_string(), ); let shinkai_tool = ShinkaiTool::Rust(tool, true); @@ -216,6 +217,7 @@ mod tests { ToolOutputArg::empty(), None, "local:::__official_shinkai:::concat_strings".to_string(), + "1.0.0".to_string(), ); let shinkai_tool = ShinkaiTool::Rust(tool, true); diff --git a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/claude_api.rs b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/claude_api.rs index cc33f4a69..de0df1cb4 100644 --- a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/claude_api.rs +++ b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/claude_api.rs @@ -8,27 +8,7 @@ use shinkai_message_primitives::schemas::llm_providers::serialized_llm_provider: use shinkai_message_primitives::schemas::prompts::Prompt; use std::collections::HashMap; use uuid::Uuid; -use super::shared_model_logic::get_image_type; - -fn sanitize_tool_name(name: &str) -> String { - let sanitized: String = name - .chars() - .map(|c| { - if c.is_alphanumeric() || c == '_' || c == '-' { - c.to_ascii_lowercase() - } else { - '_' - } - }) - .collect(); - - // Ensure length is between 1 and 64 characters - if sanitized.is_empty() { - "tool".to_string() - } else { - sanitized.chars().take(64).collect() - } -} +use super::shared_model_logic::{get_image_type, sanitize_tool_name}; pub fn claude_prepare_messages( model: &LLMProviderInterface, diff --git a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/gemini_api.rs b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/gemini_api.rs index 3545d6973..e6f8c8529 100644 --- a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/gemini_api.rs +++ b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/gemini_api.rs @@ -1,4 +1,4 @@ -use super::shared_model_logic::{get_image_type, get_video_type, get_audio_type}; +use super::shared_model_logic::{get_image_type, get_video_type, get_audio_type, sanitize_tool_name}; use crate::llm_provider::error::LLMProviderError; use crate::managers::model_capabilities_manager::ModelCapabilitiesManager; use crate::managers::model_capabilities_manager::PromptResult; @@ -301,9 +301,7 @@ pub fn gemini_prepare_messages(model: &LLMProviderInterface, prompt: Prompt) -> } serde_json::json!({ - "name": function.name.chars() - .map(|c| if c.is_alphanumeric() || c == '_' || c == '-' { c.to_ascii_lowercase() } else { '_' }) - .collect::(), + "name": sanitize_tool_name(&function.name), "description": function.description, "parameters": function.parameters }) diff --git a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/ollama_api.rs b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/ollama_api.rs index 3f7a90b91..a6b6273e4 100644 --- a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/ollama_api.rs +++ b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/ollama_api.rs @@ -1,6 +1,7 @@ use shinkai_message_primitives::schemas::{ - llm_message::{LlmMessage, DetailedFunctionCall}, llm_providers::serialized_llm_provider::LLMProviderInterface, prompts::Prompt + llm_message::LlmMessage, llm_providers::serialized_llm_provider::LLMProviderInterface, prompts::Prompt }; +use super::shared_model_logic::sanitize_tool_name; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -174,19 +175,7 @@ pub fn ollama_conversation_prepare_messages_with_tooling( .map(|mut tool| { if let Some(functions) = tool.functions.as_mut() { for function in functions { - // Replace any characters that aren't alphanumeric, underscore, or hyphen - function.name = function - .name - .chars() - .map(|c| { - if c.is_alphanumeric() || c == '_' || c == '-' { - c - } else { - '_' - } - }) - .collect::() - .to_lowercase(); + function.name = sanitize_tool_name(&function.name); } } tool @@ -245,7 +234,7 @@ pub fn ollama_conversation_prepare_messages_with_tooling( mod tests { use serde_json::json; use shinkai_message_primitives::schemas::{ - llm_providers::serialized_llm_provider::SerializedLLMProvider, subprompts::{SubPrompt, SubPromptAssetType, SubPromptType} + llm_message::DetailedFunctionCall, llm_providers::serialized_llm_provider::SerializedLLMProvider, subprompts::{SubPrompt, SubPromptAssetType, SubPromptType} }; use super::*; diff --git a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/openai_api.rs b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/openai_api.rs index a1ad8bc63..776f09c69 100644 --- a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/openai_api.rs +++ b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/openai_api.rs @@ -7,9 +7,8 @@ use serde::{Deserialize, Serialize}; use serde_json::{self}; use shinkai_message_primitives::schemas::llm_providers::serialized_llm_provider::LLMProviderInterface; use shinkai_message_primitives::schemas::prompts::Prompt; -use shinkai_message_primitives::schemas::subprompts::{SubPrompt, SubPromptType}; -use super::shared_model_logic; +use super::shared_model_logic::{self, sanitize_tool_name}; #[derive(Debug, Deserialize)] pub struct OpenAIResponse { @@ -140,30 +139,7 @@ pub fn openai_prepare_messages(model: &LLMProviderInterface, prompt: Prompt) -> .map(|mut tool| { if let Some(functions) = tool.functions.as_mut() { for function in functions { - // Replace any characters that aren't alphanumeric, underscore, or hyphen - let mut sanitized_name = function - .name - .chars() - .map(|c| { - if c.is_alphanumeric() || c == '_' || c == '-' { - c - } else { - '_' - } - }) - .collect::() - .to_lowercase(); - - // Truncate function name to OpenAI's 64-character limit - // If name is too long, keep the end part (similar to agent_id truncation) - let max_len = 64; - if sanitized_name.len() > max_len { - let chars: Vec = sanitized_name.chars().collect(); - let start_index = chars.len() - max_len; - sanitized_name = chars[start_index..].iter().collect(); - } - - function.name = sanitized_name; + function.name = sanitize_tool_name(&function.name); } } tool diff --git a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/shared_model_logic.rs b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/shared_model_logic.rs index 0532397f8..c241a4f5b 100644 --- a/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/shared_model_logic.rs +++ b/shinkai-bin/shinkai-node/src/llm_provider/providers/shared/shared_model_logic.rs @@ -271,3 +271,32 @@ pub async fn send_tool_ws_update_with_status( } Ok(()) } + +pub fn sanitize_tool_name(name: &str) -> String { + let sanitized: String = name + .chars() + .map(|c| { + if c.is_alphanumeric() || c == '_' || c == '-' { + c.to_ascii_lowercase() + } else { + '_' + } + }) + .collect(); + + let mut result = if sanitized.is_empty() { + "tool".to_string() + } else { + sanitized.chars().take(64).collect() + }; + + // Ensure the name starts with a letter or underscore + if let Some(first_char) = result.chars().next() { + if !first_char.is_alphabetic() && first_char != '_' { + result = format!("t_{}", result); + } + } + + // Ensure length is still within 64 characters after potential prefix + result.chars().take(64).collect() +} \ No newline at end of file diff --git a/shinkai-bin/shinkai-node/src/managers/tool_router.rs b/shinkai-bin/shinkai-node/src/managers/tool_router.rs index 8dba79b9b..d7ef214c3 100644 --- a/shinkai-bin/shinkai-node/src/managers/tool_router.rs +++ b/shinkai-bin/shinkai-node/src/managers/tool_router.rs @@ -596,6 +596,7 @@ impl ToolRouter { println!("Adding {} Rust tools", rust_tools.len()); let mut added_count = 0; let mut skipped_count = 0; + let mut upgraded_count = 0; for tool in rust_tools { let rust_tool = RustTool::new( @@ -605,26 +606,66 @@ impl ToolRouter { tool.output_arg, None, tool.tool_router_key, + tool.version, ); - let _ = match self.sqlite_manager.get_tool_by_key(&rust_tool.tool_router_key) { + let router_key = rust_tool.tool_router_key.clone(); + let new_version = IndexableVersion::from_string(&rust_tool.version).map_err(|e| { + ToolError::ParseError(format!("Invalid Rust tool version '{}': {}", rust_tool.version, e)) + })?; + + match self.sqlite_manager.get_tool_header_by_key(&router_key) { Err(SqliteManagerError::ToolNotFound(_)) => { added_count += 1; self.sqlite_manager .add_tool(ShinkaiTool::Rust(rust_tool, true)) .await - .map_err(|e| ToolError::DatabaseError(e.to_string())) + .map_err(|e| ToolError::DatabaseError(e.to_string()))?; } - Err(e) => Err(ToolError::DatabaseError(e.to_string())), - Ok(_db_tool) => { - skipped_count += 1; - continue; + Err(e) => return Err(ToolError::DatabaseError(e.to_string())), + Ok(header) => { + let current_version = IndexableVersion::from_string(&header.version).map_err(|e| { + ToolError::ParseError(format!( + "Invalid installed Rust tool version '{}': {}", + header.version, e + )) + })?; + + if new_version > current_version { + match self.sqlite_manager.get_tool_by_key(&router_key) { + Ok(ShinkaiTool::Rust(existing_rust_tool, is_enabled)) => { + let mut upgraded_tool = rust_tool.clone(); + if upgraded_tool.mcp_enabled.is_none() { + upgraded_tool.mcp_enabled = existing_rust_tool.mcp_enabled; + } + + upgraded_count += 1; + self.sqlite_manager + .upgrade_tool(ShinkaiTool::Rust(upgraded_tool, is_enabled)) + .await + .map_err(|e| ToolError::DatabaseError(e.to_string()))?; + } + Ok(other_variant) => { + skipped_count += 1; + eprintln!( + "Expected Rust tool for key '{}' but found {:?}, skipping", + router_key, + other_variant.tool_type() + ); + } + Err(err) => { + return Err(ToolError::DatabaseError(err.to_string())); + } + } + } else { + skipped_count += 1; + } } - }?; + } } println!( - "Rust tools installation complete - Added: {}, Skipped: {}", - added_count, skipped_count + "Rust tools installation complete - Added: {}, Upgraded: {}, Skipped: {}", + added_count, upgraded_count, skipped_count ); Ok(()) } diff --git a/shinkai-bin/shinkai-node/src/network/v2_api/api_v2_commands_jobs.rs b/shinkai-bin/shinkai-node/src/network/v2_api/api_v2_commands_jobs.rs index 2d9e4f82b..c19d9bed0 100644 --- a/shinkai-bin/shinkai-node/src/network/v2_api/api_v2_commands_jobs.rs +++ b/shinkai-bin/shinkai-node/src/network/v2_api/api_v2_commands_jobs.rs @@ -8,7 +8,7 @@ use serde_json::{json, Value}; use shinkai_http_api::node_api_router::{APIError, SendResponseBody, SendResponseBodyData}; use shinkai_message_primitives::{ schemas::{ - identity::Identity, inbox_name::InboxName, job::{ForkedJob, JobLike}, job_config::JobConfig, llm_providers::{common_agent_llm_provider::ProviderOrAgent, serialized_llm_provider::SerializedLLMProvider}, shinkai_name::{ShinkaiName, ShinkaiSubidentityType}, smart_inbox::{LLMProviderSubset, ProviderType, SmartInbox, V2SmartInbox} + identity::Identity, inbox_name::InboxName, inbox_permission::InboxPermission, job::{ForkedJob, JobLike}, job_config::JobConfig, llm_providers::{common_agent_llm_provider::ProviderOrAgent, serialized_llm_provider::SerializedLLMProvider}, shinkai_name::{ShinkaiName, ShinkaiSubidentityType}, smart_inbox::{LLMProviderSubset, ProviderType, SmartInbox, V2SmartInbox} }, shinkai_message::{ shinkai_message::{MessageBody, MessageData}, shinkai_message_schemas::{ APIChangeJobAgentRequest, ExportInboxMessagesFormat, JobCreationInfo, JobMessage, MessageSchemaType, V2ChatMessage @@ -1371,7 +1371,7 @@ impl Node { pub async fn fork_job( db: Arc, _node_name: ShinkaiName, - _identity_manager: Arc>, + identity_manager: Arc>, job_id: String, message_id: Option, node_encryption_sk: EncryptionStaticKey, @@ -1385,6 +1385,28 @@ impl Node { message: format!("Failed to retrieve job: {}", err), })?; + // Get the requesting identity to keep permissions aligned with the new forked job + let requesting_identity = { + let identity_manager = identity_manager.lock().await; + match identity_manager.get_main_identity() { + Some(Identity::Standard(identity)) => identity.clone(), + Some(_) => { + return Err(APIError { + code: StatusCode::INTERNAL_SERVER_ERROR.as_u16(), + error: "Internal Server Error".to_string(), + message: "Main identity is not a standard identity".to_string(), + }); + } + None => { + return Err(APIError { + code: StatusCode::INTERNAL_SERVER_ERROR.as_u16(), + error: "Internal Server Error".to_string(), + message: "Failed to get main identity".to_string(), + }); + } + } + }; + // Retrieve the message from the inbox let message_id = match message_id { Some(message_id) => message_id, @@ -1456,6 +1478,24 @@ impl Node { message: format!("Failed to create new job: {}", err), })?; + let forked_inbox_name = + InboxName::get_job_inbox_name_from_params(forked_job_id.clone()).map_err(|err| APIError { + code: StatusCode::INTERNAL_SERVER_ERROR.as_u16(), + error: "Internal Server Error".to_string(), + message: format!("Failed to build forked inbox name: {}", err), + })?; + + db.add_permission( + &forked_inbox_name.to_string(), + &requesting_identity, + InboxPermission::Admin, + ) + .map_err(|err| APIError { + code: StatusCode::INTERNAL_SERVER_ERROR.as_u16(), + error: "Internal Server Error".to_string(), + message: format!("Failed to add permissions for forked job: {}", err), + })?; + // Fork the messages let mut forked_message_map: HashMap = HashMap::new(); diff --git a/shinkai-bin/shinkai-node/src/tools/tool_implementation/native_tools/sql_processor.rs b/shinkai-bin/shinkai-node/src/tools/tool_implementation/native_tools/sql_processor.rs index e35ac298a..e3052e2f7 100644 --- a/shinkai-bin/shinkai-node/src/tools/tool_implementation/native_tools/sql_processor.rs +++ b/shinkai-bin/shinkai-node/src/tools/tool_implementation/native_tools/sql_processor.rs @@ -58,13 +58,16 @@ INSERT INTO table_name (field_1, field_3, field_4) -- Example read: SELECT * FROM table_name WHERE field_2 > datetime('now', '-1 day'); -SELECT field_1, field_3 FROM table_name WHERE field_3 > 100 ORDER BY field_2 DESC LIMIT 10;"# +SELECT field_1, field_3 FROM table_name WHERE field_3 > 100 ORDER BY field_2 DESC LIMIT 10; + +-- Changelog: +- 1.0.1: Fixed parameters to be compliant with JSON schema."# .to_string(), tool_router_key: "local:::__official_shinkai:::shinkai_sqlite_query_executor".to_string(), tool_type: "Rust".to_string(), formatted_tool_summary_for_ui: "Execute SQLite queries".to_string(), author: "@@official.shinkai".to_string(), - version: "1.0".to_string(), + version: "1.0.1".to_string(), enabled: true, mcp_enabled: Some(false), input_args: { @@ -311,6 +314,7 @@ mod tests { mcp_enabled: sql_processor_tool.tool.mcp_enabled.clone(), input_args: sql_processor_tool.tool.input_args.clone(), output_arg: sql_processor_tool.tool.output_arg.clone(), + version: sql_processor_tool.tool.version.clone(), tool_embedding: sql_processor_tool._tool_embedding.clone(), tool_router_key: sql_processor_tool.tool.tool_router_key.clone(), }; diff --git a/shinkai-libs/shinkai-sqlite/src/lib.rs b/shinkai-libs/shinkai-sqlite/src/lib.rs index 0e31008cb..a3e130d6e 100644 --- a/shinkai-libs/shinkai-sqlite/src/lib.rs +++ b/shinkai-libs/shinkai-sqlite/src/lib.rs @@ -1425,7 +1425,8 @@ impl SqliteManager { )?; } - // Step 4: Regenerate embeddings for file chunks + // Step 4: Regenerate embeddings for file chunks with smart truncation + // Keep existing chunks but truncate only if new model has smaller context window shinkai_log( ShinkaiLogOption::Database, ShinkaiLogLevel::Info, @@ -1445,8 +1446,20 @@ impl SqliteManager { chunk_iter.collect::, _>>()? }; + let new_model_max_tokens = new_model_type.max_input_token_count(); + let mut truncated_count = 0; + let total_chunks = chunks.len(); + for (chunk_id, parsed_file_id, text) in chunks { - let embedding = embedding_generator.generate_embedding_default(&text).await + // Only truncate if the chunk exceeds new model's token limit + let processed_text = if text.chars().count() > new_model_max_tokens { + truncated_count += 1; + text.chars().take(new_model_max_tokens).collect() + } else { + text + }; + + let embedding = embedding_generator.generate_embedding_default(&processed_text).await .map_err(|e| SqliteManagerError::SerializationError(format!("Chunk embedding generation failed: {}", e)))?; let conn = self.get_connection()?; @@ -1456,7 +1469,37 @@ impl SqliteManager { )?; } - // Step 5: Update the database with the new model type + if truncated_count > 0 { + shinkai_log( + ShinkaiLogOption::Database, + ShinkaiLogLevel::Info, + &format!("Truncated {} out of {} chunks to fit new model's {} token limit", + truncated_count, total_chunks, new_model_max_tokens), + ); + } else { + shinkai_log( + ShinkaiLogOption::Database, + ShinkaiLogLevel::Info, + "No chunks needed truncation - all fit within new model's token limit", + ); + } + + // Step 5: Update parsed_files.embedding_model_used column for consistency + shinkai_log( + ShinkaiLogOption::Database, + ShinkaiLogLevel::Info, + "Updating parsed_files.embedding_model_used column", + ); + + { + let conn = self.get_connection()?; + conn.execute( + "UPDATE parsed_files SET embedding_model_used = ?1", + rusqlite::params![new_model_type.to_string()], + )?; + } + + // Step 6: Update the database with the new model type self.update_default_embedding_model(new_model_type.clone())?; shinkai_log( diff --git a/shinkai-libs/shinkai-sqlite/src/shinkai_tool_manager.rs b/shinkai-libs/shinkai-sqlite/src/shinkai_tool_manager.rs index 22eff31b7..3e6de08b2 100644 --- a/shinkai-libs/shinkai-sqlite/src/shinkai_tool_manager.rs +++ b/shinkai-libs/shinkai-sqlite/src/shinkai_tool_manager.rs @@ -295,6 +295,9 @@ impl SqliteManager { new_python.config = merged_config; (old_config, ShinkaiTool::Python(new_python, is_enabled)) } + (ShinkaiTool::Rust(_old_rust, _), ShinkaiTool::Rust(new_rust, is_enabled)) => { + (Vec::new(), ShinkaiTool::Rust(new_rust, is_enabled)) + } _ => return Err(SqliteManagerError::ToolTypeMismatch), }; diff --git a/shinkai-libs/shinkai-tools-primitives/src/tools/rust_tools.rs b/shinkai-libs/shinkai-tools-primitives/src/tools/rust_tools.rs index 53f57a26a..4b4e698a5 100644 --- a/shinkai-libs/shinkai-tools-primitives/src/tools/rust_tools.rs +++ b/shinkai-libs/shinkai-tools-primitives/src/tools/rust_tools.rs @@ -27,12 +27,18 @@ impl fmt::Display for RustToolError { impl std::error::Error for RustToolError {} +fn default_rust_tool_version() -> String { + "1.0.0".to_string() +} + #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] pub struct RustTool { pub name: String, pub description: String, pub input_args: Parameters, pub output_arg: ToolOutputArg, + #[serde(default = "default_rust_tool_version")] + pub version: String, pub tool_embedding: Option>, pub tool_router_key: String, pub mcp_enabled: Option, @@ -46,12 +52,14 @@ impl RustTool { output_arg: ToolOutputArg, tool_embedding: Option>, tool_router_key: String, + version: String, ) -> Self { Self { name: utils::clean_string(&name), description, input_args, output_arg, + version, tool_embedding, tool_router_key, mcp_enabled: Some(false), @@ -79,6 +87,7 @@ impl RustTool { description: header.description.clone(), input_args: header.input_args.clone(), output_arg: header.output_arg.clone(), + version: header.version.clone(), tool_embedding: None, // Assuming no embedding is provided in the header tool_router_key: header.tool_router_key.clone(), mcp_enabled: header.mcp_enabled, @@ -106,7 +115,7 @@ impl RustTool { ToolPlaygroundMetadata { name: self.name.clone(), - version: "1.0.0".to_string(), + version: self.version.clone(), homepage: None, description: self.description.clone(), author: self.author(), diff --git a/shinkai-libs/shinkai-tools-primitives/src/tools/shinkai_tool.rs b/shinkai-libs/shinkai-tools-primitives/src/tools/shinkai_tool.rs index e4f1a29f9..85ec35d68 100644 --- a/shinkai-libs/shinkai-tools-primitives/src/tools/shinkai_tool.rs +++ b/shinkai-libs/shinkai-tools-primitives/src/tools/shinkai_tool.rs @@ -402,7 +402,7 @@ impl ShinkaiTool { /// Returns the version of the tool pub fn version(&self) -> String { match self { - ShinkaiTool::Rust(_r, _) => "1.0.0".to_string(), + ShinkaiTool::Rust(r, _) => r.version.clone(), ShinkaiTool::Network(n, _) => n.version.clone(), ShinkaiTool::Deno(d, _) => d.version.clone(), ShinkaiTool::Python(p, _) => p.version.clone(),