From 80f240b8a55b54cdbb91ef6f92c923d83c2141a6 Mon Sep 17 00:00:00 2001 From: D1m7asis Date: Thu, 7 Aug 2025 12:03:42 +0200 Subject: [PATCH] Add AI/ML API integration docs and package config Introduces documentation notebooks for AI/ML API integration covering chat, LLM, provider, and text embedding use cases. Also adds the langchain-aimlapi package to the packages.yml configuration for package management. --- docs/docs/integrations/chat/aimlapi.ipynb | 288 ++++++++++++++ docs/docs/integrations/llms/aimlapi.ipynb | 357 ++++++++++++++++++ .../docs/integrations/providers/aimlapi.ipynb | 272 +++++++++++++ .../integrations/text_embedding/aimlapi.ipynb | 319 ++++++++++++++++ docs/src/theme/FeatureTables.js | 23 ++ libs/packages.yml | 3 + 6 files changed, 1262 insertions(+) create mode 100644 docs/docs/integrations/chat/aimlapi.ipynb create mode 100644 docs/docs/integrations/llms/aimlapi.ipynb create mode 100644 docs/docs/integrations/providers/aimlapi.ipynb create mode 100644 docs/docs/integrations/text_embedding/aimlapi.ipynb diff --git a/docs/docs/integrations/chat/aimlapi.ipynb b/docs/docs/integrations/chat/aimlapi.ipynb new file mode 100644 index 0000000000000..de717e0e06fb6 --- /dev/null +++ b/docs/docs/integrations/chat/aimlapi.ipynb @@ -0,0 +1,288 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "fbeb3f1eb129d115", + "metadata": { + "collapsed": false + }, + "source": [ + "---\n", + "sidebar_label: AI/ML API\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "6051ba9cfc65a60a", + "metadata": { + "collapsed": false + }, + "source": [ + "# ChatAimlapi\n", + "\n", + "This page will help you get started with AI/ML API [chat models](/docs/concepts/chat_models.mdx). For detailed documentation of all ChatAimlapi features and configurations, head to the [API reference](https://docs.aimlapi.com/?utm_source=langchain&utm_medium=github&utm_campaign=integration).\n", + "\n", + "AI/ML API provides access to **300+ models** (Deepseek, Gemini, ChatGPT, etc.) via high-uptime and high-rate API." + ] + }, + { + "cell_type": "markdown", + "id": "512f94fa4bea2628", + "metadata": { + "collapsed": false + }, + "source": [ + "## Overview\n", + "### Integration details\n", + "\n", + "| Class | Package | Local | Serializable | JS support | Package downloads | Package latest |\n", + "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", + "| ChatAimlapi | langchain-aimlapi | ✅ | beta | ❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/langchain-aimlapi?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/langchain-aimlapi?style=flat-square&label=%20) |" + ] + }, + { + "cell_type": "markdown", + "id": "7163684608502d37", + "metadata": { + "collapsed": false + }, + "source": [ + "### Model features\n", + "| Tool calling | Structured output | JSON mode | Image input | Audio input | Video input | Token-level streaming | Native async | Token usage | Logprobs |\n", + "|:------------:|:-----------------:|:---------:|:-----------:|:-----------:|:-----------:|:---------------------:|:------------:|:-----------:|:--------:|\n", + "| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |\n" + ] + }, + { + "cell_type": "markdown", + "id": "bb9345d5b24a7741", + "metadata": { + "collapsed": false + }, + "source": [ + "## Setup\n", + "To access AI/ML API models, sign up at [aimlapi.com](https://aimlapi.com/app/?utm_source=langchain&utm_medium=github&utm_campaign=integration), generate an API key, and set the `AIMLAPI_API_KEY` environment variable:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "b26280519672f194", + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:16:58.837623Z", + "start_time": "2025-08-07T07:16:55.346214Z" + } + }, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "if \"AIMLAPI_API_KEY\" not in os.environ:\n", + " os.environ[\"AIMLAPI_API_KEY\"] = getpass.getpass(\"Enter your AI/ML API key: \")" + ] + }, + { + "cell_type": "markdown", + "id": "fa131229e62dfd47", + "metadata": { + "collapsed": false + }, + "source": [ + "### Installation\n", + "Install the `langchain-aimlapi` package:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "3777dc00d768299e", + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:17:11.195741Z", + "start_time": "2025-08-07T07:17:02.288142Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install -qU langchain-aimlapi" + ] + }, + { + "cell_type": "markdown", + "id": "d168108b0c4f9d7", + "metadata": { + "collapsed": false + }, + "source": [ + "## Instantiation\n", + "Now we can instantiate the `ChatAimlapi` model and generate chat completions:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "f29131e65e47bd16", + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:17:23.499746Z", + "start_time": "2025-08-07T07:17:11.196747Z" + } + }, + "outputs": [], + "source": [ + "from langchain_aimlapi import ChatAimlapi\n", + "\n", + "llm = ChatAimlapi(\n", + " model=\"meta-llama/Llama-3-70b-chat-hf\",\n", + " temperature=0.7,\n", + " max_tokens=512,\n", + " timeout=30,\n", + " max_retries=3,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "861b87289f8e146d", + "metadata": { + "collapsed": false + }, + "source": [ + "## Invocation\n", + "You can invoke the model with a list of messages:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "430b1cff2e6d77b4", + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:17:30.586261Z", + "start_time": "2025-08-07T07:17:29.074409Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "J'adore la programmation.\n" + ] + } + ], + "source": [ + "messages = [\n", + " (\"system\", \"You are a helpful assistant that translates English to French.\"),\n", + " (\"human\", \"I love programming.\"),\n", + "]\n", + "\n", + "ai_msg = llm.invoke(messages)\n", + "print(ai_msg.content)" + ] + }, + { + "cell_type": "markdown", + "id": "5463797524a19b2e", + "metadata": { + "collapsed": false + }, + "source": [ + "## Chaining\n", + "We can chain the model with a prompt template as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "bf6defc12a0c5d78", + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:17:36.368436Z", + "start_time": "2025-08-07T07:17:34.770581Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Ich liebe das Programmieren.\n" + ] + } + ], + "source": [ + "from langchain_core.prompts import ChatPromptTemplate\n", + "\n", + "prompt = ChatPromptTemplate.from_messages(\n", + " [\n", + " (\n", + " \"system\",\n", + " \"You are a helpful assistant that translates {input_language} to {output_language}.\",\n", + " ),\n", + " (\"human\", \"{input}\"),\n", + " ]\n", + ")\n", + "\n", + "chain = prompt | llm\n", + "response = chain.invoke(\n", + " {\n", + " \"input_language\": \"English\",\n", + " \"output_language\": \"German\",\n", + " \"input\": \"I love programming.\",\n", + " }\n", + ")\n", + "print(response.content)" + ] + }, + { + "cell_type": "markdown", + "id": "fcf0bf10a872355c", + "metadata": { + "collapsed": false + }, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all ChatAimlapi features and configurations, visit the [API Reference](https://docs.aimlapi.com/?utm_source=langchain&utm_medium=github&utm_campaign=integration)." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/llms/aimlapi.ipynb b/docs/docs/integrations/llms/aimlapi.ipynb new file mode 100644 index 0000000000000..48f54940c52c6 --- /dev/null +++ b/docs/docs/integrations/llms/aimlapi.ipynb @@ -0,0 +1,357 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "---\n", + "sidebar_label: AI/ML API\n", + "---" + ], + "metadata": { + "collapsed": false + }, + "id": "c74887ead73c5eb4" + }, + { + "cell_type": "markdown", + "source": [ + "# AimlapiLLM\n", + "\n", + "This page will help you get started with AI/ML API [text completion models](/docs/concepts/text_llms). For detailed documentation of all AimlapiLLM features and configurations, head to the [API reference](https://docs.aimlapi.com/?utm_source=langchain&utm_medium=github&utm_campaign=integration).\n", + "\n", + "AI/ML API provides access to **300+ models** (Deepseek, Gemini, ChatGPT, etc.) via high-uptime and high-rate API." + ], + "metadata": { + "collapsed": false + }, + "id": "c1895707cde83d90" + }, + { + "cell_type": "markdown", + "source": [ + "## Overview\n", + "### Integration details\n", + "\n", + "| Class | Package | Local | Serializable | JS support | Package downloads | Package latest |\n", + "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", + "| AimlapiLLM | langchain-aimlapi | ✅ | beta | ❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/langchain-aimlapi?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/langchain-aimlapi?style=flat-square&label=%20) |" + ], + "metadata": { + "collapsed": false + }, + "id": "72b0a510b6eac641" + }, + { + "cell_type": "markdown", + "source": [ + "### Model features\n", + "| Tool calling | Structured output | JSON mode | Image input | Audio input | Video input | Token-level streaming | Native async | Token usage | Logprobs |\n", + "|:------------:|:-----------------:|:---------:|:-----------:|:-----------:|:-----------:|:---------------------:|:------------:|:-----------:|:--------:|\n", + "| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |\n" + ], + "metadata": { + "collapsed": false + }, + "id": "4b87089494d8877d" + }, + { + "cell_type": "markdown", + "source": [ + "## Setup\n", + "To access AI/ML API models, sign up at [aimlapi.com](https://aimlapi.com/app/?utm_source=langchain&utm_medium=github&utm_campaign=integration), generate an API key, and set the `AIMLAPI_API_KEY` environment variable:" + ], + "metadata": { + "collapsed": false + }, + "id": "2c45017efcc36569" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "if \"AIMLAPI_API_KEY\" not in os.environ:\n", + " os.environ[\"AIMLAPI_API_KEY\"] = getpass.getpass(\"Enter your AI/ML API key: \")" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:24:48.681319Z", + "start_time": "2025-08-07T07:24:47.490206Z" + } + }, + "id": "86b05af725c45941", + "execution_count": 1 + }, + { + "cell_type": "markdown", + "source": [ + "### Installation\n", + "Install the `langchain-aimlapi` package:" + ], + "metadata": { + "collapsed": false + }, + "id": "51171ba92cb2b382" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install -qU langchain-aimlapi" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:18:08.606708Z", + "start_time": "2025-08-07T07:17:59.901457Z" + } + }, + "id": "2b15cbaf7d5e1560", + "execution_count": 2 + }, + { + "cell_type": "markdown", + "source": [ + "## Instantiation\n", + "Now we can instantiate the `AimlapiLLM` model and generate text completions:" + ], + "metadata": { + "collapsed": false + }, + "id": "e94379f9d37fe6b3" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "from langchain_aimlapi import AimlapiLLM\n", + "\n", + "llm = AimlapiLLM(\n", + " model=\"gpt-3.5-turbo-instruct\",\n", + " temperature=0.5,\n", + " max_tokens=256,\n", + ")" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:46:52.875867Z", + "start_time": "2025-08-07T07:46:52.869961Z" + } + }, + "id": "8a3af681997723b0", + "execution_count": 23 + }, + { + "cell_type": "markdown", + "source": [ + "## Invocation\n", + "You can invoke the model with a prompt:" + ], + "metadata": { + "collapsed": false + }, + "id": "c983ab1d95887e8f" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "Bubble sort is a simple sorting algorithm that repeatedly steps through the list to be sorted, compares each pair of adjacent items and swaps them if they are in the wrong order. This process is repeated until the entire list is sorted.\n", + "\n", + "The algorithm gets its name from the way smaller elements \"bubble\" to the top of the list. It is commonly used for educational purposes due to its simplicity, but it is not a very efficient sorting algorithm for large data sets.\n", + "\n", + "Here is an implementation of the bubble sort algorithm in Python:\n", + "\n", + "1. Start by defining a function that takes in a list as its argument.\n", + "2. Set a variable \"swapped\" to True, indicating that a swap has occurred.\n", + "3. Create a while loop that runs as long as the \"swapped\" variable is True.\n", + "4. Inside the loop, set the \"swapped\" variable to False.\n", + "5. Create a for loop that iterates through the list, starting from the first element and ending at the second to last element.\n", + "6. Inside the for loop, compare the current element with the next element. If the current element is larger than the next element, swap them and set the \"swapped\" variable to True.\n", + "7. After the for loop, if the \"swapped\" variable\n" + ] + } + ], + "source": [ + "response = llm.invoke(\"Explain the bubble sort algorithm in Python.\")\n", + "print(response)" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:46:57.209950Z", + "start_time": "2025-08-07T07:46:53.935975Z" + } + }, + "id": "9a193081f431a42a", + "execution_count": 24 + }, + { + "cell_type": "markdown", + "source": [ + "## Streaming Invocation\n", + "You can also stream responses token-by-token:" + ], + "metadata": { + "collapsed": false + }, + "id": "1afedb28f556c7bd" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " \n", + "\n", + "1. Python\n", + "Python has been consistently growing in popularity and has become one of the most widely used programming languages in recent years. It is used for a wide range of applications such as web development, data analysis, machine learning, and artificial intelligence. Its simple syntax and readability make it an attractive choice for beginners and experienced programmers alike. With the rise of data-driven technology and automation, Python is projected to be the most in-demand language in 2025.\n", + "\n", + "2. JavaScript\n", + "JavaScript continues to dominate the web development scene and is expected to maintain its position as a top programming language in 2025. With the increasing use of front-end frameworks like React and Angular, JavaScript is crucial for building dynamic and interactive user interfaces. Additionally, the rise of serverless architecture and the popularity of Node.js make JavaScript an essential language for both front-end and back-end development.\n", + "\n", + "3. Go\n", + "Go, also known as Golang, is a relatively new programming language developed by Google. It is designed for" + ] + } + ], + "source": [ + "llm = AimlapiLLM(\n", + " model=\"gpt-3.5-turbo-instruct\",\n", + ")\n", + "\n", + "for chunk in llm.stream(\"List top 5 programming languages in 2025 with reasons.\"):\n", + " print(chunk, end=\"\", flush=True)" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:49:25.223233Z", + "start_time": "2025-08-07T07:49:22.101498Z" + } + }, + "id": "a132c9183f648fb4", + "execution_count": 26 + }, + { + "cell_type": "markdown", + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all AimlapiLLM features and configurations, visit the [API Reference](https://docs.aimlapi.com/?utm_source=langchain&utm_medium=github&utm_campaign=integration).\n" + ], + "metadata": { + "collapsed": false + }, + "id": "7b4ab33058dc0974" + }, + { + "cell_type": "markdown", + "source": [ + "## Chaining\n", + "\n", + "You can also easily combine with a prompt template for easy structuring of user input. We can do this using [LCEL](/docs/concepts/lcel)" + ], + "metadata": { + "collapsed": false + }, + "id": "900f36a35477c8ae" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "from langchain_core.prompts import PromptTemplate\n", + "\n", + "prompt = PromptTemplate.from_template(\"Tell me a joke about {topic}\")\n", + "chain = prompt | llm" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:49:34.857042Z", + "start_time": "2025-08-07T07:49:34.853032Z" + } + }, + "id": "d7f10052eb4ff249", + "execution_count": 27 + }, + { + "cell_type": "code", + "outputs": [ + { + "data": { + "text/plain": "\"\\n\\nWhy do bears have fur coats?\\n\\nBecause they'd look silly in sweaters! \"" + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.invoke({\"topic\": \"bears\"})" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:49:48.565804Z", + "start_time": "2025-08-07T07:49:35.558426Z" + } + }, + "id": "184c333c60f94b05", + "execution_count": 28 + }, + { + "cell_type": "markdown", + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all `AI/ML API` llm features and configurations head to the API reference: [API Reference](https://docs.aimlapi.com/?utm_source=langchain&utm_medium=github&utm_campaign=integration)" + ], + "metadata": { + "collapsed": false + }, + "id": "804f3a79a8046ec1" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/providers/aimlapi.ipynb b/docs/docs/integrations/providers/aimlapi.ipynb new file mode 100644 index 0000000000000..8717b22edf530 --- /dev/null +++ b/docs/docs/integrations/providers/aimlapi.ipynb @@ -0,0 +1,272 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# AI/ML API LLM\n", + "\n", + "[AI/ML API](https://aimlapi.com/app/?utm_source=langchain&utm_medium=github&utm_campaign=integration) provides an API to query **300+ leading AI models** (Deepseek, Gemini, ChatGPT, etc.) with enterprise-grade performance.\n", + "\n", + "This example demonstrates how to use LangChain to interact with AI/ML API models." + ], + "metadata": { + "collapsed": false + }, + "id": "bb9dcd1ba7b0f560" + }, + { + "cell_type": "markdown", + "source": [ + "## Installation" + ], + "metadata": { + "collapsed": false + }, + "id": "e4c35f60c565d369" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: langchain-aimlapi in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (0.1.0)\n", + "Requirement already satisfied: langchain-core<0.4.0,>=0.3.15 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langchain-aimlapi) (0.3.67)\n", + "Requirement already satisfied: langsmith>=0.3.45 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (0.4.4)\n", + "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (9.1.2)\n", + "Requirement already satisfied: jsonpatch<2.0,>=1.33 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (1.33)\n", + "Requirement already satisfied: PyYAML>=5.3 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (6.0.2)\n", + "Requirement already satisfied: packaging<25,>=23.2 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (24.2)\n", + "Requirement already satisfied: typing-extensions>=4.7 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (4.14.0)\n", + "Requirement already satisfied: pydantic>=2.7.4 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (2.11.7)\n", + "Requirement already satisfied: jsonpointer>=1.9 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (3.0.0)\n", + "Requirement already satisfied: httpx<1,>=0.23.0 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (0.28.1)\n", + "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (3.10.18)\n", + "Requirement already satisfied: requests<3,>=2 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (2.32.4)\n", + "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (1.0.0)\n", + "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in c:\\users\\tuman\\appdata\\roaming\\python\\python312\\site-packages (from langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (0.23.0)\n", + "Requirement already satisfied: annotated-types>=0.6.0 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from pydantic>=2.7.4->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.33.2 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from pydantic>=2.7.4->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (2.33.2)\n", + "Requirement already satisfied: typing-inspection>=0.4.0 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from pydantic>=2.7.4->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (0.4.1)\n", + "Requirement already satisfied: anyio in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (4.9.0)\n", + "Requirement already satisfied: certifi in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (2025.6.15)\n", + "Requirement already satisfied: httpcore==1.* in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (1.0.9)\n", + "Requirement already satisfied: idna in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (3.10)\n", + "Requirement already satisfied: h11>=0.16 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (0.16.0)\n", + "Requirement already satisfied: charset_normalizer<4,>=2 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from requests<3,>=2->langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (3.4.2)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from requests<3,>=2->langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (2.5.0)\n", + "Requirement already satisfied: sniffio>=1.1 in c:\\users\\tuman\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from anyio->httpx<1,>=0.23.0->langsmith>=0.3.45->langchain-core<0.4.0,>=0.3.15->langchain-aimlapi) (1.3.1)\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "[notice] A new release of pip is available: 25.0.1 -> 25.2\n", + "[notice] To update, run: python.exe -m pip install --upgrade pip\n" + ] + } + ], + "source": [ + "%pip install --upgrade langchain-aimlapi" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-06T15:22:02.570792Z", + "start_time": "2025-08-06T15:21:32.377131Z" + } + }, + "id": "77d4a44909effc3c", + "execution_count": 4 + }, + { + "cell_type": "markdown", + "source": [ + "## Environment\n", + "\n", + "To use AI/ML API, you'll need an API key which you can generate at:\n", + "[https://aimlapi.com/app/](https://aimlapi.com/app/?utm_source=langchain&utm_medium=github&utm_campaign=integration)\n", + "\n", + "You can pass it via `aimlapi_api_key` parameter or set as environment variable `AIMLAPI_API_KEY`." + ], + "metadata": { + "collapsed": false + }, + "id": "c41eaf364c0b414f" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "import os\n", + "import getpass\n", + "\n", + "if \"AIMLAPI_API_KEY\" not in os.environ:\n", + " os.environ[\"AIMLAPI_API_KEY\"] = getpass.getpass(\"Enter your AI/ML API key: \")" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:15:37.147559Z", + "start_time": "2025-08-07T07:15:30.919160Z" + } + }, + "id": "421cd40d4e54de62", + "execution_count": 3 + }, + { + "cell_type": "markdown", + "source": [ + "## Example: Chat Model" + ], + "metadata": { + "collapsed": false + }, + "id": "d9cbe98904f4c5e4" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The city that never sleeps! New York City is a treasure trove of excitement, entertainment, and adventure. Here are some fun things to do in NYC:\n", + "\n", + "**Iconic Attractions:**\n", + "\n", + "1. **Statue of Liberty and Ellis Island**: Take a ferry to Liberty Island to see the iconic statue up close and visit the Ellis Island Immigration Museum.\n", + "2. **Central Park**: A tranquil oasis in the middle of Manhattan, perfect for a stroll, picnic, or bike ride.\n", + "3. **Empire State Building**: For a panoramic view of the city, head to the observation deck of this iconic skyscraper.\n", + "4. **The Metropolitan Museum of Art**: One of the world's largest and most famous museums, with a collection that spans over 5,000 years of human history.\n", + "\n", + "**Neighborhood Explorations:**\n", + "\n", + "1. **SoHo**: Known for its trendy boutiques, art galleries, and cast-iron buildings.\n", + "2. **Greenwich Village**: A charming neighborhood with a rich history, known for its bohemian vibe, jazz clubs, and historic brownstones.\n", + "3. **Chinatown and Little Italy**: Experience the vibrant cultures of these two iconic neighborhoods, with delicious food, street festivals, and unique shops.\n", + "4. **Williamsburg, Brooklyn**: A hip neighborhood with a thriving arts scene, trendy bars, and some of the best restaurants in the city.\n", + "\n", + "**Food and Drink:**\n", + "\n", + "1. **Try a classic NYC slice of pizza**: Visit Lombardi's, Joe's Pizza, or Patsy's Pizzeria for a taste of the city's famous pizza.\n", + "2. **Bagels with lox and cream cheese**: A classic NYC breakfast at a Jewish deli like Russ & Daughters Cafe or Ess-a-Bagel.\n", + "3. **Food markets**: Visit Smorgasburg in Brooklyn or Chelsea Market for a variety of artisanal foods and drinks.\n", + "4. **Rooftop bars**: Enjoy a drink with a view at 230 Fifth, the Top of the Strand, or the Roof at The Viceroy Central Park.\n", + "**Performing Arts:**\n", + "\n", + "1. **Broadway shows**: Catch a musical or play on the Great White Way, like Hamilton, The Lion King, or Wicked.\n", + "2. **Jazz clubs**: Visit Blue Note Jazz Club, the Village Vanguard, or the Jazz Standard for live music performances.\n", + "3. **Lincoln Center**: Home to the New York City Ballet, the Metropolitan Opera, and the Juilliard School.\n", + "4. **" + ] + } + ], + "source": [ + "from langchain_aimlapi import ChatAimlapi\n", + "\n", + "chat = ChatAimlapi(\n", + " model=\"meta-llama/Llama-3-70b-chat-hf\",\n", + ")\n", + "\n", + "# Stream response\n", + "for chunk in chat.stream(\"Tell me fun things to do in NYC\"):\n", + " print(chunk.content, end=\"\", flush=True)\n", + "\n", + "# Or use invoke()\n", + "# response = chat.invoke(\"Tell me fun things to do in NYC\")\n", + "# print(response)" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:15:59.612289Z", + "start_time": "2025-08-07T07:15:47.864231Z" + } + }, + "id": "3f73a8e113a58e9b", + "execution_count": 4 + }, + { + "cell_type": "markdown", + "source": [ + "## Example: Text Completion Model" + ], + "metadata": { + "collapsed": false + }, + "id": "7aca59af5cadce80" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " # Funkcja ponownie zwraca nową listę, bez zmienienia listy przekazanej jako argument w funkcji\n", + " my_list = [16, 12, 16, 3, 2, 6]\n", + " new_list = my_list[:]\n", + " for x in range(len(new_list)):\n", + " for y in range(len(new_list) - 1):\n", + " if new_list[y] > new_list[y + 1]:\n", + " new_list[y], new_list[y + 1] = new_list[y + 1], new_list[y]\n", + " return new_list, my_list\n", + "\n", + "\n", + "def bubble_sort_lib3(list): # Sortowanie z wykorzystaniem zewnętrznej biblioteki poza pętlą\n", + " from itertools import permutations\n", + " y = len(list)\n", + " perms = []\n", + " for a in range(0, y + 1):\n", + " for subset in permutations(list, a):\n", + " \n" + ] + } + ], + "source": [ + "from langchain_aimlapi import AimlapiLLM\n", + "\n", + "llm = AimlapiLLM(\n", + " model=\"gpt-3.5-turbo-instruct\",\n", + ")\n", + "\n", + "print(llm.invoke(\"def bubble_sort(): \"))" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:16:22.595703Z", + "start_time": "2025-08-07T07:16:19.410881Z" + } + }, + "id": "2af3be417769efc3", + "execution_count": 6 + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/text_embedding/aimlapi.ipynb b/docs/docs/integrations/text_embedding/aimlapi.ipynb new file mode 100644 index 0000000000000..a1b407aadc521 --- /dev/null +++ b/docs/docs/integrations/text_embedding/aimlapi.ipynb @@ -0,0 +1,319 @@ +{ + "cells": [ + { + "cell_type": "raw", + "source": [ + "---\n", + "sidebar_label: AI/ML API Embeddings\n", + "---" + ], + "metadata": { + "collapsed": false + }, + "id": "24ae9a5bcf0c8c19" + }, + { + "cell_type": "markdown", + "source": [ + "# AimlapiEmbeddings\n", + "\n", + "This will help you get started with AI/ML API embedding models using LangChain. For detailed documentation on `AimlapiEmbeddings` features and configuration options, please refer to the [API reference](https://docs.aimlapi.com/?utm_source=langchain&utm_medium=github&utm_campaign=integration).\n", + "\n", + "## Overview\n", + "### Integration details\n", + "\n", + "import { ItemTable } from \"@theme/FeatureTables\";\n", + "\n", + "\n", + "\n", + "## Setup\n", + "\n", + "To access AI/ML API embedding models you'll need to create an account, get an API key, and install the `langchain-aimlapi` integration package.\n", + "\n", + "### Credentials\n", + "\n", + "Head to [https://aimlapi.com/app/](https://aimlapi.com/app/?utm_source=langchain&utm_medium=github&utm_campaign=integration) to sign up and generate an API key. Once you've done this, set the `AIMLAPI_API_KEY` environment variable:" + ], + "metadata": { + "collapsed": false + }, + "id": "4af58f76e6ce897a" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "if not os.getenv(\"AIMLAPI_API_KEY\"):\n", + " os.environ[\"AIMLAPI_API_KEY\"] = getpass.getpass(\"Enter your AI/ML API key: \")" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:50:37.393789Z", + "start_time": "2025-08-07T07:50:27.679399Z" + } + }, + "id": "3297a770bc0b2b88", + "execution_count": 1 + }, + { + "cell_type": "markdown", + "source": [ + "To enable automated tracing of your model calls, set your [LangSmith](https://docs.smith.langchain.com/) API key:" + ], + "metadata": { + "collapsed": false + }, + "id": "da319ae795659a93" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"\n", + "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:50:40.840377Z", + "start_time": "2025-08-07T07:50:40.837144Z" + } + }, + "id": "6869f433a2f9dc3e", + "execution_count": 2 + }, + { + "cell_type": "markdown", + "source": [ + "### Installation\n", + "\n", + "The LangChain AI/ML API integration lives in the `langchain-aimlapi` package:" + ], + "metadata": { + "collapsed": false + }, + "id": "3f6de2cfc36a4dba" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install -qU langchain-aimlapi" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:50:50.693835Z", + "start_time": "2025-08-07T07:50:41.453138Z" + } + }, + "id": "23c22092f806aa31", + "execution_count": 3 + }, + { + "cell_type": "markdown", + "source": [ + "## Instantiation\n", + "\n", + "Now we can instantiate our embeddings model and perform embedding operations:" + ], + "metadata": { + "collapsed": false + }, + "id": "db718f4b551164f3" + }, + { + "cell_type": "code", + "outputs": [], + "source": [ + "from langchain_aimlapi import AimlapiEmbeddings\n", + "\n", + "embeddings = AimlapiEmbeddings(\n", + " model=\"text-embedding-ada-002\",\n", + ")" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:51:03.046723Z", + "start_time": "2025-08-07T07:50:50.694842Z" + } + }, + "id": "88b86f20598af88e", + "execution_count": 4 + }, + { + "cell_type": "markdown", + "source": [ + "## Indexing and Retrieval\n", + "\n", + "Embedding models are often used in retrieval-augmented generation (RAG) flows. Below is how to index and retrieve data using the `embeddings` object we initialized above with `InMemoryVectorStore`." + ], + "metadata": { + "collapsed": false + }, + "id": "847447f4ff1fe82a" + }, + { + "cell_type": "code", + "outputs": [ + { + "data": { + "text/plain": "'LangChain is the framework for building context-aware reasoning applications'" + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain_core.vectorstores import InMemoryVectorStore\n", + "\n", + "text = \"LangChain is the framework for building context-aware reasoning applications\"\n", + "\n", + "vectorstore = InMemoryVectorStore.from_texts(\n", + " [text],\n", + " embedding=embeddings,\n", + ")\n", + "\n", + "retriever = vectorstore.as_retriever()\n", + "\n", + "retrieved_documents = retriever.invoke(\"What is LangChain?\")\n", + "retrieved_documents[0].page_content" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:51:05.421030Z", + "start_time": "2025-08-07T07:51:03.047729Z" + } + }, + "id": "595ccebd97dabeef", + "execution_count": 5 + }, + { + "cell_type": "markdown", + "source": [ + "## Direct Usage\n", + "\n", + "You can directly call `embed_query` and `embed_documents` for custom embedding scenarios.\n", + "\n", + "### Embed single text:" + ], + "metadata": { + "collapsed": false + }, + "id": "aa922f78938d1ae1" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-0.0011368310078978539, 0.00714730704203248, -0.014703838154673576, -0.034064359962940216, 0.011239\n" + ] + } + ], + "source": [ + "single_vector = embeddings.embed_query(text)\n", + "print(str(single_vector)[:100])" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:51:06.285037Z", + "start_time": "2025-08-07T07:51:05.422035Z" + } + }, + "id": "c06952ac53aab22", + "execution_count": 6 + }, + { + "cell_type": "markdown", + "source": [ + "### Embed multiple texts:" + ], + "metadata": { + "collapsed": false + }, + "id": "52c9b7de79992a7b" + }, + { + "cell_type": "code", + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-0.0011398226488381624, 0.007080476265400648, -0.014682820066809654, -0.03407655283808708, 0.011276\n", + "[-0.005510928109288216, 0.016650190576910973, -0.011078780516982079, -0.03116573952138424, -0.003735\n" + ] + } + ], + "source": [ + "text2 = (\n", + " \"LangGraph is a library for building stateful, multi-actor applications with LLMs\"\n", + ")\n", + "two_vectors = embeddings.embed_documents([text, text2])\n", + "for vector in two_vectors:\n", + " print(str(vector)[:100])" + ], + "metadata": { + "collapsed": false, + "ExecuteTime": { + "end_time": "2025-08-07T07:51:07.954778Z", + "start_time": "2025-08-07T07:51:06.285544Z" + } + }, + "id": "f1dcf3c389e11cc1", + "execution_count": 7 + }, + { + "cell_type": "markdown", + "source": [ + "## API Reference\n", + "\n", + "For detailed documentation on `AimlapiEmbeddings` features and configuration options, please refer to the [API reference](https://docs.aimlapi.com/?utm_source=langchain&utm_medium=github&utm_campaign=integration).\n" + ], + "metadata": { + "collapsed": false + }, + "id": "a45ff6faef63cab2" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/src/theme/FeatureTables.js b/docs/src/theme/FeatureTables.js index 5aeb5fba86d95..1960fda4bfbae 100644 --- a/docs/src/theme/FeatureTables.js +++ b/docs/src/theme/FeatureTables.js @@ -39,6 +39,17 @@ const FEATURE_TABLES = { "local": false, "apiLink": "https://python.langchain.com/api_reference/mistralai/chat_models/langchain_mistralai.chat_models.ChatMistralAI.html" }, + { + "name": "ChatAIMLAPI", + "package": "langchain-aimlapi", + "link": "aimlapi/", + "structured_output": true, + "tool_calling": true, + "json_mode": true, + "multimodal": true, + "local": false, + "apiLink": "https://python.langchain.com/api_reference/aimlapi/chat_models/langchain_aimlapi.chat_models.ChatAIMLAPI.html" + }, { "name": "ChatFireworks", "package": "langchain-fireworks", @@ -301,6 +312,12 @@ const FEATURE_TABLES = { package: "langchain-fireworks", apiLink: "https://python.langchain.com/api_reference/fireworks/llms/langchain_fireworks.llms.Fireworks.html" }, + { + name: "AimlapiLLM", + link: "aimlapi", + package: "langchain-aimlapi", + apiLink: "https://python.langchain.com/api_reference/aimlapi/llms/langchain_aimlapi.llms.AimlapiLLM.html" + }, { name: "OllamaLLM", link: "ollama", @@ -382,6 +399,12 @@ const FEATURE_TABLES = { package: "langchain-fireworks", apiLink: "https://python.langchain.com/api_reference/fireworks/embeddings/langchain_fireworks.embeddings.FireworksEmbeddings.html" }, + { + name: "AI/ML API", + link: "/docs/integrations/text_embedding/aimlapi", + package: "langchain-aimlapi", + apiLink: "https://python.langchain.com/api_reference/aimlapi/embeddings/langchain_aimlapi.embeddings.AimlapiEmbeddings.html" + }, { name: "MistralAI", link: "/docs/integrations/text_embedding/mistralai", diff --git a/libs/packages.yml b/libs/packages.yml index 3b415effeafd5..3d5fec9a9ed5f 100644 --- a/libs/packages.yml +++ b/libs/packages.yml @@ -699,3 +699,6 @@ packages: - name: langchain-gradient path: . repo: digitalocean/langchain-gradient +- name: langchain-aimlapi + path: libs/aimlapi + repo: D1m7asis/langchain-aimlapi