From 5be36b0d45c136c9cdb6f907fa598751a323f824 Mon Sep 17 00:00:00 2001 From: Jason Stollings Date: Mon, 15 Sep 2025 11:55:39 -0500 Subject: [PATCH] Update ollama_file_ops_agent.ipynb Fixes the error that is caused by the wrapped params --- .../01-ollama-model/ollama_file_ops_agent.ipynb | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/01-tutorials/01-fundamentals/02-model-providers/01-ollama-model/ollama_file_ops_agent.ipynb b/01-tutorials/01-fundamentals/02-model-providers/01-ollama-model/ollama_file_ops_agent.ipynb index e949626a..513591a8 100644 --- a/01-tutorials/01-fundamentals/02-model-providers/01-ollama-model/ollama_file_ops_agent.ipynb +++ b/01-tutorials/01-fundamentals/02-model-providers/01-ollama-model/ollama_file_ops_agent.ipynb @@ -285,16 +285,14 @@ "execution_count": null, "metadata": {}, "outputs": [], + "source": [ "source": [ "ollama_model = OllamaModel(\n", " model_id=model_id,\n", " host=\"http://localhost:11434\",\n", - " params={\n", - " \"max_tokens\": 4096, # Adjust based on your model's capabilities\n", - " \"temperature\": 0.7, # Lower for more deterministic responses, higher for more creative\n", - " \"top_p\": 0.9, # Nucleus sampling parameter\n", - " \"stream\": True, # Enable streaming responses\n", - " },\n", + " max_tokens=4096,\n", + " temperature=0.7,\n", + " top_p=0.9,\n", ")\n", "\n", "# Create the agent\n",