Skip to content

Commit 169b37f

Browse files
ahgraberjjmachanclaude
authored
fix: update embedding extraction to use appropriate async method (#2068)
Embedding extraction should be async and use langchain's `embed_text` method rather than `embed_query` --------- Co-authored-by: jjmachan <jamesjithin97@gmail.com> Co-authored-by: Claude <noreply@anthropic.com>
1 parent 3fa4271 commit 169b37f

File tree

21 files changed

+50
-235
lines changed

21 files changed

+50
-235
lines changed

.github/workflows/ci.yaml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -31,16 +31,16 @@ jobs:
3131
token: ${{ github.token }}
3232
filters: |
3333
related: &related
34-
- .github/workflows/ci.yml
34+
- .github/workflows/ci.yaml
3535
- codecov.yml
3636
- pyproject.toml
3737
- requirements/test.txt
3838
ragas:
3939
- *related
40-
- "src/ragas/**"
41-
- "tests/**"
40+
- "ragas/src/ragas/**"
41+
- "ragas/tests/**"
4242
ragas_experimental:
43-
- "src/experimental/**"
43+
- "experimental/ragas_experimental/**"
4444
docs:
4545
- *related
4646
- requirements/docs-requirements.txt
@@ -85,7 +85,7 @@ jobs:
8585

8686
- name: Install dependencies
8787
run: |
88-
pip install "."
88+
pip install "./ragas"
8989
pip install -r requirements/test.txt
9090
9191
@@ -97,7 +97,7 @@ jobs:
9797
OPTS=(--dist loadfile -n auto)
9898
fi
9999
# Now run the unit tests
100-
pytest --nbmake tests/unit "${OPTS[@]}"
100+
pytest --nbmake ragas/tests/unit "${OPTS[@]}"
101101
env:
102102
__RAGAS_DEBUG_TRACKING: true
103103
RAGAS_DO_NOT_TRACK: true
@@ -140,7 +140,7 @@ jobs:
140140

141141
- name: Install dependencies
142142
run: |
143-
pip install .
143+
pip install ./ragas
144144
pip install -r requirements/dev.txt
145145
146146
- name: Lint check

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ cython_debug/
167167
# Ragas specific
168168
experiments/
169169
**/fil-result/
170-
src/ragas/_version.py
170+
ragas/src/ragas/_version.py
171171
experimental/ragas_experimental/_version.py
172172
.vscode
173173
.envrc

CLAUDE.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,4 +187,8 @@ console_handler.setFormatter(formatter)
187187

188188
# Add the handler to the logger
189189
analytics_logger.addHandler(console_handler)
190-
```
190+
```
191+
192+
## Memories
193+
194+
- whenever you create such docs put in in /experiments because that is gitignored and you can use it as a scratchpad or tmp directory for storing these

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ lint-all: lint lint-experimental ## Lint all code in the monorepo
3737

3838
type: ## Running type checker for ragas
3939
@echo "(pyright) Typechecking ragas codebase..."
40-
PYRIGHT_PYTHON_FORCE_VERSION=latest pyright ragas/src/ragas
40+
cd ragas && PYRIGHT_PYTHON_FORCE_VERSION=latest pyright src
4141

4242
type-experimental: ## Running type checker for experimental
4343
@echo "(pyright) Typechecking experimental codebase..."

docs/experimental/index.html.md

Lines changed: 0 additions & 44 deletions
This file was deleted.

docs/experimental/index.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# Ragas Experimental
2+
3+
Under the works but stay tuned :)

docs/howtos/applications/cost.ipynb

Lines changed: 3 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -24,33 +24,9 @@
2424
},
2525
{
2626
"cell_type": "code",
27-
"execution_count": 2,
2827
"metadata": {},
29-
"outputs": [
30-
{
31-
"data": {
32-
"text/plain": [
33-
"TokenUsage(input_tokens=9, output_tokens=9, model='')"
34-
]
35-
},
36-
"execution_count": 2,
37-
"metadata": {},
38-
"output_type": "execute_result"
39-
}
40-
],
41-
"source": [
42-
"from langchain_openai.chat_models import ChatOpenAI\n",
43-
"from langchain_core.prompt_values import StringPromptValue\n",
44-
"\n",
45-
"gpt4o = ChatOpenAI(model=\"gpt-4o\")\n",
46-
"p = StringPromptValue(text=\"hai there\")\n",
47-
"llm_result = gpt4o.generate_prompt([p])\n",
48-
"\n",
49-
"# lets import a parser for OpenAI\n",
50-
"from ragas.cost import get_token_usage_for_openai\n",
51-
"\n",
52-
"get_token_usage_for_openai(llm_result)"
53-
]
28+
"outputs": [],
29+
"source": "from langchain_openai.chat_models import ChatOpenAI\nfrom langchain_core.prompt_values import StringPromptValue\n# lets import a parser for OpenAI\nfrom ragas.cost import get_token_usage_for_openai\n\ngpt4o = ChatOpenAI(model=\"gpt-4o\")\np = StringPromptValue(text=\"hai there\")\nllm_result = gpt4o.generate_prompt([p])\n\nget_token_usage_for_openai(llm_result)"
5430
},
5531
{
5632
"cell_type": "markdown",
@@ -284,4 +260,4 @@
284260
},
285261
"nbformat": 4,
286262
"nbformat_minor": 2
287-
}
263+
}

docs/howtos/customizations/metrics/cost.ipynb

Lines changed: 3 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -35,41 +35,9 @@
3535
},
3636
{
3737
"cell_type": "code",
38-
"execution_count": 5,
3938
"metadata": {},
40-
"outputs": [
41-
{
42-
"name": "stderr",
43-
"output_type": "stream",
44-
"text": [
45-
"/opt/homebrew/Caskroom/miniforge/base/envs/ragas/lib/python3.9/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
46-
" from .autonotebook import tqdm as notebook_tqdm\n"
47-
]
48-
},
49-
{
50-
"data": {
51-
"text/plain": [
52-
"TokenUsage(input_tokens=9, output_tokens=9, model='')"
53-
]
54-
},
55-
"execution_count": 5,
56-
"metadata": {},
57-
"output_type": "execute_result"
58-
}
59-
],
60-
"source": [
61-
"from langchain_openai.chat_models import ChatOpenAI\n",
62-
"from langchain_core.prompt_values import StringPromptValue\n",
63-
"\n",
64-
"gpt4o = ChatOpenAI(model=\"gpt-4o\")\n",
65-
"p = StringPromptValue(text=\"hai there\")\n",
66-
"llm_result = gpt4o.generate_prompt([p])\n",
67-
"\n",
68-
"# lets import a parser for OpenAI\n",
69-
"from ragas.cost import get_token_usage_for_openai\n",
70-
"\n",
71-
"get_token_usage_for_openai(llm_result)"
72-
]
39+
"outputs": [],
40+
"source": "from langchain_openai.chat_models import ChatOpenAI\nfrom langchain_core.prompt_values import StringPromptValue\n# lets import a parser for OpenAI\nfrom ragas.cost import get_token_usage_for_openai\n\ngpt4o = ChatOpenAI(model=\"gpt-4o\")\np = StringPromptValue(text=\"hai there\")\nllm_result = gpt4o.generate_prompt([p])\n\nget_token_usage_for_openai(llm_result)"
7341
},
7442
{
7543
"cell_type": "markdown",
@@ -212,4 +180,4 @@
212180
},
213181
"nbformat": 4,
214182
"nbformat_minor": 2
215-
}
183+
}

docs/howtos/customizations/testgenerator/language_adaptation.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
}
5454
],
5555
"source": [
56-
"from langchain_community.document_loaders import DirectoryLoader, TextLoader\n",
56+
"from langchain_community.document_loaders import DirectoryLoader\n",
5757
"\n",
5858
"\n",
5959
"path = \"Sample_non_english_corpus/\"\n",

docs/howtos/customizations/testgenerator/testgen-custom-single-hop.ipynb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,6 @@
229229
"source": [
230230
"from ragas.testset.synthesizers.single_hop import (\n",
231231
" SingleHopQuerySynthesizer,\n",
232-
" SingleHopScenario,\n",
233232
")\n",
234233
"from dataclasses import dataclass\n",
235234
"from ragas.testset.synthesizers.prompts import (\n",

0 commit comments

Comments
 (0)