Skip to content

Commit de8031b

Browse files
Enabled pushing timeseries data from local run (#424)
* Enabled pushing timeseries data from local run * Exposing memory metrics on local run
1 parent c3eccd6 commit de8031b

File tree

10 files changed

+228
-92
lines changed

10 files changed

+228
-92
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "redisbench-admin"
3-
version = "0.10.19"
3+
version = "0.10.24"
44
description = "Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... )."
55
authors = ["filipecosta90 <filipecosta.90@gmail.com>","Redis Performance Group <performance@redis.com>"]
66
readme = "README.md"

redisbench_admin/compare/compare.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -722,6 +722,7 @@ def from_rts_to_regression_table(
722722
total_comparison_points = 0
723723
noise_waterline = 3
724724
progress = tqdm(unit="benchmark time-series", total=len(test_names))
725+
at_comparison = 0
725726
for test_name in test_names:
726727
multi_value_baseline = check_multi_value_filter(baseline_str)
727728
multi_value_comparison = check_multi_value_filter(comparison_str)

redisbench_admin/run/args.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,10 @@
4040
PROFILERS = os.getenv("PROFILERS", PROFILERS_DEFAULT)
4141
MAX_PROFILERS_PER_TYPE = int(os.getenv("MAX_PROFILERS", 1))
4242
PROFILE_FREQ = os.getenv("PROFILE_FREQ", PROFILE_FREQ_DEFAULT)
43-
KEEP_ENV = bool(os.getenv("KEEP_ENV", False))
43+
KEEP_ENV = bool(int(os.getenv("KEEP_ENV", "0")))
4444
ALLOWED_TOOLS_DEFAULT = "memtier_benchmark,redis-benchmark,redisgraph-benchmark-go,ycsb,go-ycsb,tsbs_run_queries_redistimeseries,tsbs_load_redistimeseries,ftsb_redisearch,aibench_run_inference_redisai_vision,ann-benchmarks"
4545
ALLOWED_BENCH_TOOLS = os.getenv("ALLOWED_BENCH_TOOLS", ALLOWED_TOOLS_DEFAULT)
46+
SKIP_DB_SETUP = bool(int(os.getenv("SKIP_DB_SETUP", "0")))
4647

4748

4849
def common_run_args(parser):
@@ -53,6 +54,12 @@ def common_run_args(parser):
5354
action="store_true",
5455
help="Keep environment and topology up after benchmark.",
5556
)
57+
parser.add_argument(
58+
"--skip-db-setup",
59+
type=bool,
60+
default=SKIP_DB_SETUP,
61+
help="skip db setup/teardown steps. Usefull when you want to target an existing DB",
62+
)
5663
parser.add_argument(
5764
"--fail_fast",
5865
required=False,

redisbench_admin/run/common.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -699,6 +699,8 @@ def print_results_table_stdout(
699699
setup_name,
700700
test_name,
701701
cpu_usage=None,
702+
kv_overall={},
703+
metric_names=[],
702704
):
703705
# check which metrics to extract
704706
(_, metrics,) = merge_default_and_config_metrics(
@@ -714,6 +716,11 @@ def print_results_table_stdout(
714716
results_matrix = extract_results_table(metrics, results_dict)
715717
if cpu_usage is not None:
716718
results_matrix.append(["Total shards CPU usage %", "", "", cpu_usage])
719+
for metric_name in metric_names:
720+
if metric_name in kv_overall:
721+
metric_value = kv_overall[metric_name]
722+
results_matrix.append([f"Total shards {metric_name}", "", "", metric_value])
723+
717724
results_matrix = [[x[0], "{:.3f}".format(x[3])] for x in results_matrix]
718725
writer = MarkdownTableWriter(
719726
table_name=table_name,

redisbench_admin/run_async/async_terraform.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -189,6 +189,10 @@ def setup_remote_environment(
189189
"github_repo": tf_github_repo,
190190
"triggering_env": tf_triggering_env,
191191
"timeout_secs": tf_timeout_secs,
192+
"Project": tf_github_org,
193+
"project": tf_github_org,
194+
"Environment": tf_github_org,
195+
"environment": tf_github_org,
192196
},
193197
)
194198
return self.retrieve_tf_connection_vars(return_code, tf)

redisbench_admin/run_local/args.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,29 @@
33
# Copyright (c) 2021., Redis Labs Modules
44
# All rights reserved.
55
#
6+
import os
67

78
from redisbench_admin.run.args import common_run_args
89
from redisbench_admin.run.common import REDIS_BINARY
910

11+
FLUSHALL_AT_START = bool(int(os.getenv("FLUSHALL_AT_START", "0")))
12+
IGNORE_KEYSPACE_ERRORS = bool(int(os.getenv("IGNORE_KEYSPACE_ERRORS", "0")))
13+
1014

1115
def create_run_local_arguments(parser):
1216
parser = common_run_args(parser)
1317
parser.add_argument("--port", type=int, default=6379)
1418
parser.add_argument("--redis-binary", type=str, default=REDIS_BINARY)
19+
parser.add_argument(
20+
"--flushall_on_every_test_start",
21+
type=bool,
22+
default=FLUSHALL_AT_START,
23+
help="At the start of every test send a FLUSHALL",
24+
)
25+
parser.add_argument(
26+
"--ignore_keyspace_errors",
27+
type=bool,
28+
default=IGNORE_KEYSPACE_ERRORS,
29+
help="Ignore keyspace check errors. Will still log them as errors",
30+
)
1531
return parser

redisbench_admin/run_local/local_db.py

Lines changed: 103 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -46,59 +46,16 @@ def local_db_spin(
4646
required_modules,
4747
setup_type,
4848
shard_count,
49+
flushall_on_every_test_start=False,
50+
ignore_keyspace_errors=False,
4951
):
50-
# setup Redis
51-
# copy the rdb to DB machine
52+
redis_conns = []
53+
artifact_version = "n/a"
54+
result = True
5255
temporary_dir = tempfile.mkdtemp()
53-
redis_7 = args.redis_7
54-
logging.info(
55-
"Using local temporary dir to spin up Redis Instance. Path: {}".format(
56-
temporary_dir
57-
)
58-
)
59-
if dbdir_folder is not None:
60-
from distutils.dir_util import copy_tree
61-
62-
copy_tree(dbdir_folder, temporary_dir)
63-
logging.info(
64-
"Copied entire content of {} into temporary path: {}".format(
65-
dbdir_folder, temporary_dir
66-
)
67-
)
68-
(
69-
_,
70-
_,
71-
redis_configuration_parameters,
72-
dataset_load_timeout_secs,
73-
modules_configuration_parameters_map,
74-
) = extract_redis_dbconfig_parameters(benchmark_config, "dbconfig")
7556
cluster_api_enabled = False
76-
logging.info(
77-
"Using a dataset load timeout of {} seconds.".format(dataset_load_timeout_secs)
78-
)
79-
redis_conns = []
8057
if setup_type == "oss-cluster":
8158
cluster_api_enabled = True
82-
shard_host = "127.0.0.1"
83-
redis_processes, redis_conns = spin_up_local_redis_cluster(
84-
binary,
85-
temporary_dir,
86-
shard_count,
87-
shard_host,
88-
args.port,
89-
local_module_file,
90-
redis_configuration_parameters,
91-
dataset_load_timeout_secs,
92-
modules_configuration_parameters_map,
93-
redis_7,
94-
)
95-
96-
status = setup_redis_cluster_from_conns(
97-
redis_conns, shard_count, shard_host, args.port
98-
)
99-
if status is False:
100-
raise Exception("Redis cluster setup failed. Failing test.")
101-
10259
dataset, dataset_name, _, _ = check_dataset_local_requirements(
10360
benchmark_config,
10461
temporary_dir,
@@ -108,35 +65,108 @@ def local_db_spin(
10865
shard_count,
10966
cluster_api_enabled,
11067
)
111-
if setup_type == "oss-standalone":
112-
redis_processes = spin_up_local_redis(
113-
binary,
114-
args.port,
115-
temporary_dir,
116-
local_module_file,
68+
69+
if args.skip_db_setup:
70+
logging.info("Skipping DB Setup...")
71+
if dataset is not None:
72+
logging.info("Given this benchmark requires an RDB load will skip it...")
73+
result = False
74+
return (
75+
result,
76+
artifact_version,
77+
cluster_api_enabled,
78+
redis_conns,
79+
redis_processes,
80+
)
81+
else:
82+
# setup Redis
83+
# copy the rdb to DB machine
84+
redis_7 = args.redis_7
85+
logging.info(
86+
"Using local temporary dir to spin up Redis Instance. Path: {}".format(
87+
temporary_dir
88+
)
89+
)
90+
if dbdir_folder is not None:
91+
from distutils.dir_util import copy_tree
92+
93+
copy_tree(dbdir_folder, temporary_dir)
94+
logging.info(
95+
"Copied entire content of {} into temporary path: {}".format(
96+
dbdir_folder, temporary_dir
97+
)
98+
)
99+
(
100+
_,
101+
_,
117102
redis_configuration_parameters,
118-
dbdir_folder,
119103
dataset_load_timeout_secs,
120104
modules_configuration_parameters_map,
121-
redis_7,
105+
) = extract_redis_dbconfig_parameters(benchmark_config, "dbconfig")
106+
107+
logging.info(
108+
"Using a dataset load timeout of {} seconds.".format(
109+
dataset_load_timeout_secs
110+
)
122111
)
123112

113+
if setup_type == "oss-cluster":
114+
cluster_api_enabled = True
115+
shard_host = "127.0.0.1"
116+
redis_processes, redis_conns = spin_up_local_redis_cluster(
117+
binary,
118+
temporary_dir,
119+
shard_count,
120+
shard_host,
121+
args.port,
122+
local_module_file,
123+
redis_configuration_parameters,
124+
dataset_load_timeout_secs,
125+
modules_configuration_parameters_map,
126+
redis_7,
127+
)
128+
129+
status = setup_redis_cluster_from_conns(
130+
redis_conns, shard_count, shard_host, args.port
131+
)
132+
if status is False:
133+
raise Exception("Redis cluster setup failed. Failing test.")
134+
135+
if setup_type == "oss-standalone":
136+
redis_processes = spin_up_local_redis(
137+
binary,
138+
args.port,
139+
temporary_dir,
140+
local_module_file,
141+
redis_configuration_parameters,
142+
dbdir_folder,
143+
dataset_load_timeout_secs,
144+
modules_configuration_parameters_map,
145+
redis_7,
146+
)
147+
if setup_type == "oss-cluster":
148+
for shardn, redis_process in enumerate(redis_processes):
149+
logging.info(
150+
"Checking if shard #{} process with pid={} is alive".format(
151+
shardn + 1, redis_process.pid
152+
)
153+
)
154+
if is_process_alive(redis_process) is False:
155+
raise Exception("Redis process is not alive. Failing test.")
156+
cluster_init_steps(clusterconfig, redis_conns, local_module_file)
157+
158+
if setup_type == "oss-standalone":
124159
r = redis.Redis(port=args.port)
125160
r.ping()
126-
r.client_setname("redisbench-admin-stadalone")
161+
r.client_setname("redisbench-admin-standalone")
127162
redis_conns.append(r)
128-
if setup_type == "oss-cluster":
129-
for shardn, redis_process in enumerate(redis_processes):
130-
logging.info(
131-
"Checking if shard #{} process with pid={} is alive".format(
132-
shardn + 1, redis_process.pid
133-
)
134-
)
135-
if is_process_alive(redis_process) is False:
136-
raise Exception("Redis process is not alive. Failing test.")
137163

138-
if setup_type == "oss-cluster":
139-
cluster_init_steps(clusterconfig, redis_conns, local_module_file)
164+
if dataset is None:
165+
if flushall_on_every_test_start:
166+
logging.info("Will flush all data at test start...")
167+
for shard_n, shard_conn in enumerate(redis_conns):
168+
logging.info(f"Flushing all in shard {shard_n}...")
169+
shard_conn.flushall()
140170

141171
if check_dbconfig_tool_requirement(benchmark_config):
142172
logging.info("Detected the requirements to load data via client tool")
@@ -175,11 +205,10 @@ def local_db_spin(
175205
)
176206
)
177207

178-
dbconfig_keyspacelen_check(
179-
benchmark_config,
180-
redis_conns,
181-
)
208+
dbconfig_keyspacelen_check(benchmark_config, redis_conns, ignore_keyspace_errors)
182209

183-
run_redis_pre_steps(benchmark_config, redis_conns[0], required_modules)
210+
artifact_version = run_redis_pre_steps(
211+
benchmark_config, redis_conns[0], required_modules
212+
)
184213

185-
return cluster_api_enabled, redis_conns, redis_processes
214+
return result, artifact_version, cluster_api_enabled, redis_conns, redis_processes

0 commit comments

Comments
 (0)