Skip to content

Commit 9b147ae

Browse files
authored
Tenant integration tests (#2913)
* check for index swap * initial bones * kk * k * k: * nit * nit * rebase + update * nit * minior update * k * minor integration test fixes * nit * ensure we build test docker image * remove one space * k * ensure we wipe volumes * remove log * typo * nit * k * k
1 parent bd63119 commit 9b147ae

File tree

20 files changed

+479
-540
lines changed

20 files changed

+479
-540
lines changed

.github/workflows/pr-Integration-tests.yml

Lines changed: 61 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ jobs:
7272
load: true
7373
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
7474
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
75-
75+
7676
- name: Build integration test Docker image
7777
uses: ./.github/actions/custom-build-and-push
7878
with:
@@ -85,7 +85,58 @@ jobs:
8585
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/integration/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
8686
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/integration/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
8787

88-
- name: Start Docker containers
88+
# Start containers for multi-tenant tests
89+
- name: Start Docker containers for multi-tenant tests
90+
run: |
91+
cd deployment/docker_compose
92+
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
93+
MULTI_TENANT=true \
94+
AUTH_TYPE=basic \
95+
REQUIRE_EMAIL_VERIFICATION=false \
96+
DISABLE_TELEMETRY=true \
97+
IMAGE_TAG=test \
98+
docker compose -f docker-compose.dev.yml -p danswer-stack up -d
99+
id: start_docker_multi_tenant
100+
101+
# In practice, `cloud` Auth type would require OAUTH credentials to be set.
102+
- name: Run Multi-Tenant Integration Tests
103+
run: |
104+
echo "Running integration tests..."
105+
docker run --rm --network danswer-stack_default \
106+
--name test-runner \
107+
-e POSTGRES_HOST=relational_db \
108+
-e POSTGRES_USER=postgres \
109+
-e POSTGRES_PASSWORD=password \
110+
-e POSTGRES_DB=postgres \
111+
-e VESPA_HOST=index \
112+
-e REDIS_HOST=cache \
113+
-e API_SERVER_HOST=api_server \
114+
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
115+
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
116+
-e TEST_WEB_HOSTNAME=test-runner \
117+
-e AUTH_TYPE=cloud \
118+
-e MULTI_TENANT=true \
119+
danswer/danswer-integration:test \
120+
/app/tests/integration/multitenant_tests
121+
continue-on-error: true
122+
id: run_multitenant_tests
123+
124+
- name: Check multi-tenant test results
125+
run: |
126+
if [ ${{ steps.run_tests.outcome }} == 'failure' ]; then
127+
echo "Integration tests failed. Exiting with error."
128+
exit 1
129+
else
130+
echo "All integration tests passed successfully."
131+
fi
132+
133+
- name: Stop multi-tenant Docker containers
134+
run: |
135+
cd deployment/docker_compose
136+
docker compose -f docker-compose.dev.yml -p danswer-stack down -v
137+
138+
139+
- name: Start Docker containers
89140
run: |
90141
cd deployment/docker_compose
91142
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
@@ -130,7 +181,7 @@ jobs:
130181
done
131182
echo "Finished waiting for service."
132183
133-
- name: Run integration tests
184+
- name: Run Standard Integration Tests
134185
run: |
135186
echo "Running integration tests..."
136187
docker run --rm --network danswer-stack_default \
@@ -145,7 +196,8 @@ jobs:
145196
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
146197
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
147198
-e TEST_WEB_HOSTNAME=test-runner \
148-
danswer/danswer-integration:test
199+
danswer/danswer-integration:test \
200+
/app/tests/integration/tests
149201
continue-on-error: true
150202
id: run_tests
151203

@@ -158,6 +210,11 @@ jobs:
158210
echo "All integration tests passed successfully."
159211
fi
160212
213+
- name: Stop Docker containers
214+
run: |
215+
cd deployment/docker_compose
216+
docker compose -f docker-compose.dev.yml -p danswer-stack down -v
217+
161218
- name: Save Docker logs
162219
if: success() || failure()
163220
run: |

backend/danswer/auth/users.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@
5858
from danswer.auth.schemas import UserUpdate
5959
from danswer.configs.app_configs import AUTH_TYPE
6060
from danswer.configs.app_configs import DISABLE_AUTH
61+
from danswer.configs.app_configs import DISABLE_VERIFICATION
6162
from danswer.configs.app_configs import EMAIL_FROM
6263
from danswer.configs.app_configs import MULTI_TENANT
6364
from danswer.configs.app_configs import REQUIRE_EMAIL_VERIFICATION
@@ -133,7 +134,9 @@ def get_display_email(email: str | None, space_less: bool = False) -> str:
133134
def user_needs_to_be_verified() -> bool:
134135
# all other auth types besides basic should require users to be
135136
# verified
136-
return AUTH_TYPE != AuthType.BASIC or REQUIRE_EMAIL_VERIFICATION
137+
return not DISABLE_VERIFICATION and (
138+
AUTH_TYPE != AuthType.BASIC or REQUIRE_EMAIL_VERIFICATION
139+
)
137140

138141

139142
def verify_email_is_invited(email: str) -> None:

backend/danswer/background/celery/apps/indexing.py

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -8,18 +8,11 @@
88
from celery.signals import worker_init
99
from celery.signals import worker_ready
1010
from celery.signals import worker_shutdown
11-
from sqlalchemy.orm import Session
1211

1312
import danswer.background.celery.apps.app_base as app_base
1413
from danswer.configs.constants import POSTGRES_CELERY_WORKER_INDEXING_APP_NAME
1514
from danswer.db.engine import SqlEngine
16-
from danswer.db.search_settings import get_current_search_settings
17-
from danswer.db.swap_index import check_index_swap
18-
from danswer.natural_language_processing.search_nlp_models import EmbeddingModel
19-
from danswer.natural_language_processing.search_nlp_models import warm_up_bi_encoder
2015
from danswer.utils.logger import setup_logger
21-
from shared_configs.configs import INDEXING_MODEL_SERVER_HOST
22-
from shared_configs.configs import MODEL_SERVER_PORT
2316

2417

2518
logger = setup_logger()
@@ -67,27 +60,6 @@ def on_worker_init(sender: Any, **kwargs: Any) -> None:
6760
SqlEngine.set_app_name(POSTGRES_CELERY_WORKER_INDEXING_APP_NAME)
6861
SqlEngine.init_engine(pool_size=8, max_overflow=0)
6962

70-
# TODO: why is this necessary for the indexer to do?
71-
engine = SqlEngine.get_engine()
72-
with Session(engine) as db_session:
73-
check_index_swap(db_session=db_session)
74-
search_settings = get_current_search_settings(db_session)
75-
76-
# So that the first time users aren't surprised by really slow speed of first
77-
# batch of documents indexed
78-
if search_settings.provider_type is None:
79-
logger.notice("Running a first inference to warm up embedding model")
80-
embedding_model = EmbeddingModel.from_db_model(
81-
search_settings=search_settings,
82-
server_host=INDEXING_MODEL_SERVER_HOST,
83-
server_port=MODEL_SERVER_PORT,
84-
)
85-
86-
warm_up_bi_encoder(
87-
embedding_model=embedding_model,
88-
)
89-
logger.notice("First inference complete.")
90-
9163
app_base.wait_for_redis(sender, **kwargs)
9264
app_base.on_secondary_worker_init(sender, **kwargs)
9365

0 commit comments

Comments
 (0)