diff --git a/test/cases/41-StreamProcessing/01-Snode/snode_checkpoint.py b/test/cases/41-StreamProcessing/01-Snode/snode_checkpoint.py index f78e8cfc463f..da75a89bba80 100644 --- a/test/cases/41-StreamProcessing/01-Snode/snode_checkpoint.py +++ b/test/cases/41-StreamProcessing/01-Snode/snode_checkpoint.py @@ -19,12 +19,12 @@ def test_stream_dev_basic(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None History: - - 2025-06-25 + - 2025-06-25 dapan created """ diff --git a/test/cases/41-StreamProcessing/01-Snode/streambuffersize_verify.py b/test/cases/41-StreamProcessing/01-Snode/snode_params_buffersize.py similarity index 62% rename from test/cases/41-StreamProcessing/01-Snode/streambuffersize_verify.py rename to test/cases/41-StreamProcessing/01-Snode/snode_params_buffersize.py index 315a6509257d..3e921b9e2ae4 100644 --- a/test/cases/41-StreamProcessing/01-Snode/streambuffersize_verify.py +++ b/test/cases/41-StreamProcessing/01-Snode/snode_params_buffersize.py @@ -1,43 +1,51 @@ import time import math import random -from new_test_framework.utils import tdLog, tdSql, tdStream, streamUtil,StreamTableType, StreamTable, cluster +from new_test_framework.utils import ( + tdLog, + tdSql, + tdStream, + streamUtil, + StreamTableType, + StreamTable, + cluster, +) from random import randint import os import subprocess import json -class TestSnodeMgmt: + +class TestStreamParametersMemoryUsage: caseName = "streambuffersize verify" currentDir = os.path.dirname(os.path.abspath(__file__)) runAll = False dbname = "test1" - stbname= "stba" + stbname = "stba" stName = "" resultIdx = "" sliding = 1 subTblNum = 3 tblRowNum = 10 tableList = [] - + def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): - """streambuffersize verify - - 1. create snode - 2. create stream - 3. modify streambuffersize - 4. check memory usage + def test_params_memory_usage(self): + """Parameter: check memory usage + 1. Create snode + 2. Create stream + 3. Modify streambuffersize + 4. Check memory usage Catalog: - Streams:Snode Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None @@ -46,42 +54,52 @@ def test_snode_mgmt(self): """ - tdStream.dropAllStreamsAndDbs() - - - self.data() self.createSnodeTest() self.createStream() self.checkStreamRunning() self.checkstreamBufferSize() - - - def data(self): random.seed(42) tdSql.execute("create database test1 vgroups 6;") - tdSql.execute("CREATE STABLE test1.`stba` (`ts` TIMESTAMP ENCODE 'delta-i' COMPRESS 'lz4' LEVEL 'medium', `cts` TIMESTAMP ENCODE 'delta-i' COMPRESS 'lz4' LEVEL 'medium', `cint` INT ENCODE 'simple8b' COMPRESS 'lz4' LEVEL 'medium', `i1` INT ENCODE 'simple8b' COMPRESS 'lz4' LEVEL 'medium') TAGS (`tint` INT, `tdouble` DOUBLE, `tvar` VARCHAR(100), `tnchar` NCHAR(100), `tts` TIMESTAMP, `tbool` BOOL)") - tdSql.execute('CREATE TABLE test1.`a0` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (44, 3.757254e+01, "klPqiAWzV1F6hSPMjm80YOOZEcSCF", "xOYc37COtmFYhKEUkL8hKVUmJmorOS30uOcmIC12OtNT4hE", 1943455971, true)') - tdSql.execute('CREATE TABLE test1.`a1` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (19, 6.525488e+01, "jMGdGyha8Q7WZxFBv6XO", "GvDFs3DREMcgidLGjJBZFmM2RbmLY", 439606400, false)') - tdSql.execute('CREATE TABLE test1.`a2` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (9, 4.416963e+01, "lE0hSUOVxfVkGrORvnnLiOJp", "TMxs9A8VS4", 1291130063, true)') - - tables = ['a0', 'a1', 'a2'] - base_ts = int(time.mktime(time.strptime("2025-01-01 00:00:00", "%Y-%m-%d %H:%M:%S"))) * 1000 - interval_ms = 600 * 1000 + tdSql.execute( + "CREATE STABLE test1.`stba` (`ts` TIMESTAMP ENCODE 'delta-i' COMPRESS 'lz4' LEVEL 'medium', `cts` TIMESTAMP ENCODE 'delta-i' COMPRESS 'lz4' LEVEL 'medium', `cint` INT ENCODE 'simple8b' COMPRESS 'lz4' LEVEL 'medium', `i1` INT ENCODE 'simple8b' COMPRESS 'lz4' LEVEL 'medium') TAGS (`tint` INT, `tdouble` DOUBLE, `tvar` VARCHAR(100), `tnchar` NCHAR(100), `tts` TIMESTAMP, `tbool` BOOL)" + ) + tdSql.execute( + 'CREATE TABLE test1.`a0` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (44, 3.757254e+01, "klPqiAWzV1F6hSPMjm80YOOZEcSCF", "xOYc37COtmFYhKEUkL8hKVUmJmorOS30uOcmIC12OtNT4hE", 1943455971, true)' + ) + tdSql.execute( + 'CREATE TABLE test1.`a1` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (19, 6.525488e+01, "jMGdGyha8Q7WZxFBv6XO", "GvDFs3DREMcgidLGjJBZFmM2RbmLY", 439606400, false)' + ) + tdSql.execute( + 'CREATE TABLE test1.`a2` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (9, 4.416963e+01, "lE0hSUOVxfVkGrORvnnLiOJp", "TMxs9A8VS4", 1291130063, true)' + ) + + tables = ["a0", "a1", "a2"] + base_ts = ( + int(time.mktime(time.strptime("2025-01-01 00:00:00", "%Y-%m-%d %H:%M:%S"))) + * 1000 + ) + interval_ms = 600 * 1000 total_rows = 10000 - + for i in range(total_rows): ts = base_ts + i * interval_ms c1 = random.randint(0, 1000) c2 = random.randint(1000, 2000) for tb in tables: - sql = "INSERT INTO test1.%s VALUES (%d,%d, %d, %d)" % (tb, ts,ts, c1, c2) + sql = "INSERT INTO test1.%s VALUES (%d,%d, %d, %d)" % ( + tb, + ts, + ts, + c1, + c2, + ) tdSql.execute(sql) - + def data2(self): random.seed(42) # tdSql.execute("create database test1 vgroups 6;") @@ -89,42 +107,50 @@ def data2(self): # tdSql.execute('CREATE TABLE test1.`a0` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (44, 3.757254e+01, "klPqiAWzV1F6hSPMjm80YOOZEcSCF", "xOYc37COtmFYhKEUkL8hKVUmJmorOS30uOcmIC12OtNT4hE", 1943455971, true)') # tdSql.execute('CREATE TABLE test1.`a1` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (19, 6.525488e+01, "jMGdGyha8Q7WZxFBv6XO", "GvDFs3DREMcgidLGjJBZFmM2RbmLY", 439606400, false)') # tdSql.execute('CREATE TABLE test1.`a2` USING test1.`stba` (`tint`, `tdouble`, `tvar`, `tnchar`, `tts`, `tbool`) TAGS (9, 4.416963e+01, "lE0hSUOVxfVkGrORvnnLiOJp", "TMxs9A8VS4", 1291130063, true)') - - tables = ['a0', 'a1', 'a2'] - base_ts = int(time.mktime(time.strptime("2025-01-05 00:00:00", "%Y-%m-%d %H:%M:%S"))) * 1000 - interval_ms = 30 * 1000 + + tables = ["a0", "a1", "a2"] + base_ts = ( + int(time.mktime(time.strptime("2025-01-05 00:00:00", "%Y-%m-%d %H:%M:%S"))) + * 1000 + ) + interval_ms = 30 * 1000 total_rows = 10000 - + for i in range(total_rows): ts = base_ts + i * interval_ms c1 = random.randint(0, 1000) c2 = random.randint(1000, 2000) for tb in tables: - sql = "INSERT INTO test1.%s VALUES (%d,%d, %d, %d)" % (tb, ts,ts, c1, c2) - tdSql.execute(sql) - + sql = "INSERT INTO test1.%s VALUES (%d,%d, %d, %d)" % ( + tb, + ts, + ts, + c1, + c2, + ) + tdSql.execute(sql) def prepareData(self): tdLog.info(f"prepare data") tdStream.dropAllStreamsAndDbs() - #wait all dnode ready + # wait all dnode ready time.sleep(5) tdStream.init_database(self.dbname) - + st1 = StreamTable(self.dbname, "st1", StreamTableType.TYPE_SUP_TABLE) st1.createTable(3) st1.append_data(0, self.tblRowNum) - + self.tableList.append("st1") for i in range(0, self.subTblNum + 1): self.tableList.append(f"st1_{i}") - + ntb = StreamTable(self.dbname, "ntb1", StreamTableType.TYPE_NORMAL_TABLE) ntb.createTable() ntb.append_data(0, self.tblRowNum) self.tableList.append(f"ntb1") - + def dataIn(self): tdLog.info(f"insert more data:") config = { @@ -141,7 +167,8 @@ def dataIn(self): "insert_interval": 0, "num_of_records_per_req": 1000, "max_sql_len": 1048576, - "databases": [{ + "databases": [ + { "dbinfo": { "name": "test1", "drop": "no", @@ -150,9 +177,10 @@ def dataIn(self): "precision": "ms", "keep": 36500, "minRows": 100, - "maxRows": 4096 + "maxRows": 4096, }, - "super_tables": [{ + "super_tables": [ + { "name": "stba", "child_table_exists": "no", "childtable_count": 3, @@ -175,28 +203,47 @@ def dataIn(self): "sample_file": "", "tags_file": "", "columns": [ - {"type": "timestamp","name":"cts","count": 1,"start":"2025-02-01 00:00:00.000"}, - {"type": "int","name":"cint","max":100,"min":-1}, - {"type": "int","name":"i1","max":100,"min":-1} + { + "type": "timestamp", + "name": "cts", + "count": 1, + "start": "2025-02-01 00:00:00.000", + }, + {"type": "int", "name": "cint", "max": 100, "min": -1}, + {"type": "int", "name": "i1", "max": 100, "min": -1}, ], "tags": [ - {"type": "int","name":"tint","max":100,"min":-1}, - {"type": "double","name":"tdouble","max":100,"min":0}, - {"type": "varchar","name":"tvar","len":100,"count": 1}, - {"type": "nchar","name":"tnchar","len":100,"count": 1}, - {"type": "timestamp","name":"tts"}, - {"type": "bool","name":"tbool"} - ] + {"type": "int", "name": "tint", "max": 100, "min": -1}, + { + "type": "double", + "name": "tdouble", + "max": 100, + "min": 0, + }, + { + "type": "varchar", + "name": "tvar", + "len": 100, + "count": 1, + }, + { + "type": "nchar", + "name": "tnchar", + "len": 100, + "count": 1, + }, + {"type": "timestamp", "name": "tts"}, + {"type": "bool", "name": "tbool"}, + ], } - - ] + ], } - ] + ], } - - with open('insert_config.json','w') as f: - json.dump(config,f,indent=4) - tdLog.info('config file ready') + + with open("insert_config.json", "w") as f: + json.dump(config, f, indent=4) + tdLog.info("config file ready") cmd = f"taosBenchmark -f insert_config.json " # output = subprocess.check_output(cmd, shell=True).decode().strip() ret = os.system(cmd) @@ -204,19 +251,19 @@ def dataIn(self): raise Exception("taosBenchmark run failed") time.sleep(5) tdLog.info(f"Insert data:taosBenchmark -f insert_config.json") - def checkResultRows(self, expectedRows): tdSql.checkResultsByFunc( f"select * from information_schema.ins_snodes order by id;", lambda: tdSql.getRows() == expectedRows, - delay=0.5, retry=2 + delay=0.5, + retry=2, ) - def get_memory_usage_mb(self,pid): + def get_memory_usage_mb(self, pid): status_file = "/proc/{}/status".format(pid) try: - with open(status_file, 'r') as f: + with open(status_file, "r") as f: for line in f: if line.startswith("VmRSS:"): parts = line.split() @@ -224,83 +271,87 @@ def get_memory_usage_mb(self,pid): # kB -> MB tdLog.info(f"taosd memory: {int(parts[1]) / 1024.0}") return int(parts[1]) / 1024.0 - + except IOError: print("Cannot open status file for pid {}".format(pid)) return 0.0 - - def get_pid_by_cmdline(self,pattern): + + def get_pid_by_cmdline(self, pattern): try: - cmd = "unset LD_PRELOAD;ps -eo pid,cmd | grep '{}' | grep -v grep | grep -v SCREEN".format(pattern) + cmd = "unset LD_PRELOAD;ps -eo pid,cmd | grep '{}' | grep -v grep | grep -v SCREEN".format( + pattern + ) output = subprocess.check_output(cmd, shell=True).decode().strip() # 可多行,默认取第一行 - lines = output.split('\n') + lines = output.split("\n") if lines: pid = int(lines[0].strip().split()[0]) return pid except subprocess.CalledProcessError: return None - - + def checkstreamBufferSize(self): tdLog.info(f"check streamBufferSize") tdSql.query(f"show dnode 1 variables like 'streamBufferSize';") result = tdSql.getData(0, 2) - tdLog.info(f"streamBufferSize is {result}") - - pid = self.get_pid_by_cmdline('taosd -c') - + tdLog.info(f"streamBufferSize is {result}") + + pid = self.get_pid_by_cmdline("taosd -c") + for i in range(15): mem = self.get_memory_usage_mb(pid) time.sleep(2) if mem > float(result): raise Exception(f"ERROR:taosd memory large than streamBufferSize!") - i = i +1 - + i = i + 1 + def createSnodeTest(self): tdLog.info(f"create snode test") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() + numOfNodes = tdSql.getRows() tdLog.info(f"numOfNodes: {numOfNodes}") - + for i in range(1, numOfNodes + 1): tdSql.execute(f"create snode on dnode {i}") tdLog.info(f"create snode on dnode {i} success") self.checkResultRows(numOfNodes) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == numOfNodes, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == numOfNodes, delay=0.5, retry=2 ) - + def createStream(self): tdLog.info(f"create stream ") for i in range(2): - tdSql.execute(f"create stream {self.dbname}.`s{i}` interval(1s) sliding(1s) from {self.dbname}.{self.stbname} stream_options(fill_history('2025-01-01 00:00:00')) into {self.dbname}.`s{i}out` as select _wstart, sum(cint) from %%trows interval(10a) order by _wstart;") + tdSql.execute( + f"create stream {self.dbname}.`s{i}` interval(1s) sliding(1s) from {self.dbname}.{self.stbname} stream_options(fill_history('2025-01-01 00:00:00')) into {self.dbname}.`s{i}out` as select _wstart, sum(cint) from %%trows interval(10a) order by _wstart;" + ) tdLog.info(f"create stream s{i} success!") - + def getMemoryMB(self): cmd = "unset LD_PRELOAD;free -m | grep Mem | awk '{print $2}'" output = subprocess.check_output(cmd, shell=True).decode().strip() tdLog.info(f"total memory is {output} MB") return int(output) # 单位:MB - - + def checkStreamRunning(self): tdLog.info(f"check stream running status:") - timeout = 60 + timeout = 60 start_time = time.time() while True: if time.time() - start_time > timeout: tdLog.error("Timeout waiting for all streams to be running.") tdLog.error(f"Final stream running status: {streamRunning}") - raise TimeoutError(f"Stream status did not reach 'Running' within {timeout}s timeout.") - - tdSql.query(f"select status from information_schema.ins_streams order by stream_name;") - streamRunning=tdSql.getColData(0) + raise TimeoutError( + f"Stream status did not reach 'Running' within {timeout}s timeout." + ) + + tdSql.query( + f"select status from information_schema.ins_streams order by stream_name;" + ) + streamRunning = tdSql.getColData(0) if all(status == "Running" for status in streamRunning): tdLog.info("All Stream running!") diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_basic.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_basic.py index 9b359f3ac830..5105d000b5a1 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_basic.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_basic.py @@ -12,12 +12,12 @@ from random import randint -class TestSnodeMgmt: +class TestSnodeMgmtBasic: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_snode_mgmt_basic(self): """Snode: create and drop 1. Create an 8-node dnode and create snodes on each dnode. diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_replica3.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_replica3.py index e57a9d288412..19b5a1216592 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_replica3.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_replica3.py @@ -16,7 +16,7 @@ import subprocess -class TestSnodeMgmt: +class TestSnodeMgmtReplica3: caseName = "test_stream_sliding_trigger" currentDir = os.path.dirname(os.path.abspath(__file__)) runAll = False @@ -34,7 +34,7 @@ class TestSnodeMgmt: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_snode_mgmt_relica3(self): """Snode: repeatedly drop with stream 1. Create a 6-node dnode and a 3-replica database. diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_replicas.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_replicas.py index c0e7edc11ffe..b04f5e22984b 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_replicas.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_mgmt_replicas.py @@ -16,7 +16,7 @@ class TestSnodeReplicas: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_snode_mgmt_replicas(self): """Snode: replica test Test the failover of 2-replica snodes. diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_params_alter_value.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_params_alter_value.py index 1e26239cb7aa..cb4396faf548 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_params_alter_value.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_params_alter_value.py @@ -15,7 +15,7 @@ import subprocess -class TestStreamParametersAlter: +class TestStreamParametersAlterParam: currentDir = os.path.dirname(os.path.abspath(__file__)) dbname = "test1" dbname2 = "test2" @@ -28,8 +28,8 @@ class TestStreamParametersAlter: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): - """Parameter: alter value + def test_params_alter_value(self): + """Parameter: alter config Modify the parameters streamBufferSize and numOfMnodeStreamMgmtThreads. diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_default.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_default.py index 09ee34c5cc42..05977e3bff62 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_default.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_default.py @@ -28,7 +28,7 @@ class TestStreamParametersCheckDefault: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_params_check_default(self): """Parameter: check default value Check the default values of the following parameters: diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_maxvalue.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_maxvalue.py index e3ec1b3db6a4..9effef1d0c24 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_maxvalue.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_maxvalue.py @@ -15,7 +15,7 @@ import subprocess -class TestStreamParametersCheckMaxValue: +class TestStreamParametersCheckMaxVal: currentDir = os.path.dirname(os.path.abspath(__file__)) dbname = "test1" dbname2 = "test2" @@ -28,7 +28,7 @@ class TestStreamParametersCheckMaxValue: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_params_check_maxval(self): """Parameter: check maximum value Check the maximum values of the following parameters: diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_minvalue.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_minvalue.py index 12de3f24b64d..8136cd5966d8 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_minvalue.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_params_check_minvalue.py @@ -15,7 +15,7 @@ import subprocess -class TestStreamParametersCheck: +class TestStreamParametersCheckMinVal: currentDir = os.path.dirname(os.path.abspath(__file__)) dbname = "test1" dbname2 = "test2" @@ -28,7 +28,7 @@ class TestStreamParametersCheck: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_params_check_minval(self): """Parameter: check maximum value Check the maximum values of the following parameters: diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_recalc.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_recalc.py index 3e6b1bb257a1..6572d77dfc1f 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_recalc.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_recalc.py @@ -15,7 +15,7 @@ import subprocess -class TestStreamRecalc: +class TestStreamPrivilegesRecalc: currentDir = os.path.dirname(os.path.abspath(__file__)) dbname = "test1" dbname2 = "test2" @@ -28,7 +28,7 @@ class TestStreamRecalc: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_params_privilage_recalc(self): """Privilege: recalculate 1. Check normal user no write privilege to recalc stream. diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_stream.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_stream.py index 78bd236aae7b..6b08acec9fd8 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_stream.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_stream.py @@ -15,7 +15,7 @@ import subprocess -class TestStreamPrivileges: +class TestStreamPrivilegesSnodeStream: currentDir = os.path.dirname(os.path.abspath(__file__)) dbname = "test1" dbname2 = "test2" @@ -28,7 +28,7 @@ class TestStreamPrivileges: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_params_snode_stream(self): """Privilege: snode and stream 1. Check normal user create snode. diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_systable.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_systable.py index 8fe3b8398247..d7a76f764511 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_systable.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_systable.py @@ -15,7 +15,7 @@ import subprocess -class TestStreamPrivilegesMonitorTable: +class TestStreamPrivilegesSysTable: currentDir = os.path.dirname(os.path.abspath(__file__)) dbname = "test1" dbname2 = "test2" @@ -28,7 +28,7 @@ class TestStreamPrivilegesMonitorTable: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_params_privilage_systable(self): """Privilege: show and systables 1. Check normal user query ins_streams. diff --git a/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_twodb.py b/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_twodb.py index 4273447b8a31..cfc986d3057d 100644 --- a/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_twodb.py +++ b/test/cases/41-StreamProcessing/01-Snode/test_snode_privileges_twodb.py @@ -15,7 +15,7 @@ import subprocess -class TestStreamPrivileges: +class TestStreamPrivilegesTwoDb: currentDir = os.path.dirname(os.path.abspath(__file__)) dbname = "test1" dbname2 = "test2" @@ -28,14 +28,13 @@ class TestStreamPrivileges: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): + def test_params_privilage_two_db(self): """Privilege: on multi database 1. Check normal user create stream in two db. 2. Check normal no source db read privilege to create stream. 3. Check grant read privilege on source db to user. - Catalog: - Streams:Snode diff --git a/test/cases/41-StreamProcessing/02-Stream/stream_recalc_bug1.py b/test/cases/41-StreamProcessing/02-Stream/stream_recalc_bug1.py index 9ca4f181d8eb..360a6c8a04fa 100644 --- a/test/cases/41-StreamProcessing/02-Stream/stream_recalc_bug1.py +++ b/test/cases/41-StreamProcessing/02-Stream/stream_recalc_bug1.py @@ -26,11 +26,11 @@ def test_snode_mgmt(self): Catalog: - - Streams:recalc + - Streams:Stream Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/02-Stream/test_create_stream_syntax.py b/test/cases/41-StreamProcessing/02-Stream/stream_syntax.py similarity index 99% rename from test/cases/41-StreamProcessing/02-Stream/test_create_stream_syntax.py rename to test/cases/41-StreamProcessing/02-Stream/stream_syntax.py index 400ddbcbe42b..2cc8c8bac107 100644 --- a/test/cases/41-StreamProcessing/02-Stream/test_create_stream_syntax.py +++ b/test/cases/41-StreamProcessing/02-Stream/stream_syntax.py @@ -288,7 +288,7 @@ 'replace', 'ltrim', 'rtrim', 'trim' ] -class TestStreamSubqueryBasic: +class TestStreamSynatx: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") @@ -303,7 +303,7 @@ def test_create_stream_syntax(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/02-Stream/stream_checkname.py b/test/cases/41-StreamProcessing/02-Stream/test_stream_check_name.py similarity index 65% rename from test/cases/41-StreamProcessing/02-Stream/stream_checkname.py rename to test/cases/41-StreamProcessing/02-Stream/test_stream_check_name.py index d38a4d0e8504..cd71b1280ee6 100644 --- a/test/cases/41-StreamProcessing/02-Stream/stream_checkname.py +++ b/test/cases/41-StreamProcessing/02-Stream/test_stream_check_name.py @@ -1,12 +1,21 @@ import time import math import random -from new_test_framework.utils import tdLog, tdSql, tdStream, streamUtil,StreamTableType, StreamTable, cluster +from new_test_framework.utils import ( + tdLog, + tdSql, + tdStream, + streamUtil, + StreamTableType, + StreamTable, + cluster, +) from random import randint import os import subprocess -class TestSnodeMgmt: + +class TestStreamCheckName: caseName = "" currentDir = os.path.dirname(os.path.abspath(__file__)) runAll = False @@ -20,17 +29,17 @@ class TestSnodeMgmt: subTblNum = 3 tblRowNum = 10 tableList = [] - + def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): - """Check stream name test - - 1. Check stream name + def test_stream_illegal_name(self): + """Stream: check illegal name + + Test stream with illegal names. Catalog: - - Streams:Snode + - Streams:Stream Since: v3.3.3.7 @@ -43,10 +52,7 @@ def test_snode_mgmt(self): """ - tdStream.dropAllStreamsAndDbs() - - self.prepareData() self.createSnodeTest() @@ -57,15 +63,19 @@ def test_snode_mgmt(self): tdSql.query("select * from test1.`A123!@#$%^&*,;'[]-=+_out`;") if tdSql.getRows() < 4: raise Exception(f"ERROR: check stream out table error") - + tdSql.query("select * from test1.`123456out`;") if tdSql.getRows() < 4: raise Exception(f"ERROR: check stream out table error") - - #check stream name length - self.createOneStream("ashdjfklhgt49hg84g89j4hjq904j9m9vm94jg9j4gj94jg90qj490j2390hr823h8bnbuhu4h8gh48gj834g894j0g4j30gj0g4jg2ij9t0j2498gn498gn894ng9843ng894gk9j4e9gj49gh9jg90qj490j2390hr823hfj38jg84gh84h89gh48h8") + + # check stream name length + self.createOneStream( + "ashdjfklhgt49hg84g89j4hjq904j9m9vm94jg9j4gj94jg90qj490j2390hr823h8bnbuhu4h8gh48gj834g894j0g4j30gj0g4jg2ij9t0j2498gn498gn894ng9843ng894gk9j4e9gj49gh9jg90qj490j2390hr823hfj38jg84gh84h89gh48h8" + ) self.checkStreamRunning() - tdSql.query("select * from test1.`ashdjfklhgt49hg84g89j4hjq904j9m9vm94jg9j4gj94jg90qj490j2390hr823h8bnbuhu4h8gh48gj834g894j0g4j30gj0g4jg2ij9t0j2498gn498gn894ng9843ng894gk9j4e9gj49gh9jg90qj490j2390hr823hfj38jg84gh84h89gh48h8out`;") + tdSql.query( + "select * from test1.`ashdjfklhgt49hg84g89j4hjq904j9m9vm94jg9j4gj94jg90qj490j2390hr823h8bnbuhu4h8gh48gj834g894j0g4j30gj0g4jg2ij9t0j2498gn498gn894ng9843ng894gk9j4e9gj49gh9jg90qj490j2390hr823hfj38jg84gh84h89gh48h8out`;" + ) time.sleep(10) if tdSql.getRows() < 4: raise Exception(f"ERROR: check stream out table error") @@ -74,62 +84,62 @@ def prepareData(self): tdLog.info(f"prepare data") tdStream.dropAllStreamsAndDbs() - #wait all dnode ready + # wait all dnode ready time.sleep(5) tdStream.init_database(self.dbname) - + st1 = StreamTable(self.dbname, "st1", StreamTableType.TYPE_SUP_TABLE) st1.createTable(3) st1.append_data(0, self.tblRowNum) - + self.tableList.append("st1") for i in range(0, self.subTblNum + 1): self.tableList.append(f"st1_{i}") - + ntb = StreamTable(self.dbname, "ntb1", StreamTableType.TYPE_NORMAL_TABLE) ntb.createTable() ntb.append_data(0, self.tblRowNum) self.tableList.append(f"ntb1") - - - def createOneStream(self,stname): + + def createOneStream(self, stname): sql = ( - f"create stream `{stname}` sliding(1s) from st1 partition by tbname " - "stream_options(fill_history('2025-01-01 00:00:00')) " - f"into `{stname}out` as " - "select cts, cint, %%tbname from st1 " - "where cint > 5 and tint > 0 and %%tbname like '%%2' " - "order by cts;" + f"create stream `{stname}` sliding(1s) from st1 partition by tbname " + "stream_options(fill_history('2025-01-01 00:00:00')) " + f"into `{stname}out` as " + "select cts, cint, %%tbname from st1 " + "where cint > 5 and tint > 0 and %%tbname like '%%2' " + "order by cts;" ) try: tdSql.execute(sql) except Exception as e: - if "No stream available snode now" not in str(e): - raise Exception(f" user cant create stream no snode ,but create success") - + if "No stream available snode now" not in str(e): + raise Exception( + f" user cant create stream no snode ,but create success" + ) + def checkResultRows(self, expectedRows): tdSql.checkResultsByFunc( f"select * from information_schema.ins_snodes order by id;", lambda: tdSql.getRows() == expectedRows, - delay=0.5, retry=2 + delay=0.5, + retry=2, ) - + def createSnodeTest(self): tdLog.info(f"create snode test") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() + numOfNodes = tdSql.getRows() tdLog.info(f"numOfNodes: {numOfNodes}") - + for i in range(1, numOfNodes + 1): tdSql.execute(f"create snode on dnode {i}") tdLog.info(f"create snode on dnode {i} success") self.checkResultRows(numOfNodes) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == numOfNodes, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == numOfNodes, delay=0.5, retry=2 ) def dropAllSnodeTest(self): @@ -137,20 +147,18 @@ def dropAllSnodeTest(self): tdSql.query("select * from information_schema.ins_snodes order by id;") numOfSnodes = tdSql.getRows() tdLog.info(f"numOfSnodes: {numOfSnodes}") - - for i in range(1, numOfSnodes ): + + for i in range(1, numOfSnodes): tdSql.execute(f"drop snode on dnode {i}") tdLog.info(f"drop snode {i} success") - + self.checkResultRows(1) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == 1, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == 1, delay=0.5, retry=2 ) - - numOfRows=tdSql.execute(f"drop snode on dnode {numOfSnodes}") + + numOfRows = tdSql.execute(f"drop snode on dnode {numOfSnodes}") if numOfRows != 0: raise Exception(f" drop all snodes failed! ") tdSql.query("select * from information_schema.ins_snodes order by id;") @@ -159,106 +167,116 @@ def dropAllSnodeTest(self): def dropOneSnodeTest(self): tdSql.query("select * from information_schema.ins_snodes order by id;") - numOfSnodes=tdSql.getRows() - #只有一个 snode 的时候不再执行删除 - if numOfSnodes >1: + numOfSnodes = tdSql.getRows() + # 只有一个 snode 的时候不再执行删除 + if numOfSnodes > 1: tdLog.info(f"drop one snode test") - tdSql.query("select * from information_schema.ins_streams order by stream_name;") - snodeid = tdSql.getData(0,6) + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) + snodeid = tdSql.getData(0, 6) tdSql.execute(f"drop snode on dnode {snodeid}") tdLog.info(f"drop snode {snodeid} success") - #drop snode后流状态有延迟,需要等待才能看到 failed 状态出现 + # drop snode后流状态有延迟,需要等待才能看到 failed 状态出现 time.sleep(15) - - + def createStream(self): tdLog.info(f"create stream ") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() - for i in range(1,numOfNodes+1): - tdSql.execute(f"create stream `s{i}` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s{i}out` as select cts, cint from st1 where _tcurrent_ts % 2 = 0 order by cts;") + numOfNodes = tdSql.getRows() + for i in range(1, numOfNodes + 1): + tdSql.execute( + f"create stream `s{i}` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s{i}out` as select cts, cint from st1 where _tcurrent_ts % 2 = 0 order by cts;" + ) tdLog.info(f"create stream s{i} success!") # tdSql.execute("create stream `s2` sliding(1s) from st1 partition by tint, tbname stream_options(fill_history('2025-01-01 00:00:00')) into `s2out` as select cts, cint from st1 order by cts limit 3;") # tdSql.execute("create stream `s3` sliding(1s) from st1 partition by tbname stream_options(pre_filter(cint>2)|fill_history('2025-01-01 00:00:00')) into `s3out` as select cts, cint, %%tbname from %%trows where cint >15 and tint >0 and %%tbname like '%2' order by cts;") # tdSql.execute("create stream `s4` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s4out` as select _tcurrent_ts, cint from st1 order by cts limit 4;") - - def dropOneStream(self): tdLog.info(f"drop one stream: ") - tdSql.query("select * from information_schema.ins_streams order by stream_name;") + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) numOfStreams = tdSql.getRows() tdLog.info(f"Total streams:{numOfStreams}") - streamid = tdSql.getData(0,0) + streamid = tdSql.getData(0, 0) tdSql.execute(f"drop stream {streamid}") tdLog.info(f"drop stream {streamid} success") - - tdSql.query("select * from information_schema.ins_streams order by stream_name;") + + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) numOfStreams = tdSql.getRows() tdLog.info(f"Total streams:{numOfStreams}") - + def dropOneDnode(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() tdLog.info(f"Total dnodes:{numOfDnodes}") - - tdSql.query(f"select `replica` from information_schema.ins_databases where name='{self.dbname}'") - numOfReplica = tdSql.getData(0,0) - - if numOfDnodes ==3 and numOfReplica == 3: + + tdSql.query( + f"select `replica` from information_schema.ins_databases where name='{self.dbname}'" + ) + numOfReplica = tdSql.getData(0, 0) + + if numOfDnodes == 3 and numOfReplica == 3: tdLog.info(f"Total dndoes: 3,replica:3, can not drop dnode.") return - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"drop one dnode: ") tdSql.query("select * from information_schema.ins_dnodes order by id;") - dnodeid = tdSql.getData(2,0) + dnodeid = tdSql.getData(2, 0) tdSql.execute(f"drop dnode {dnodeid}") tdLog.info(f"drop dnode {dnodeid} success") - + tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() tdLog.info(f"Total dnodes:{numOfDnodes}") # time.sleep(3) - + def killOneDnode(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"kill one dnode: ") cmd = ( - f"ps -ef | grep -wi taosd | grep 'dnode{numOfDnodes}/cfg' " - "| grep -v grep | awk '{print $2}' | xargs kill -9 > /dev/null 2>&1" + f"ps -ef | grep -wi taosd | grep 'dnode{numOfDnodes}/cfg' " + "| grep -v grep | awk '{print $2}' | xargs kill -9 > /dev/null 2>&1" ) subprocess.run(cmd, shell=True) tdLog.info(f"kill dndoe {numOfDnodes} success") - #kill dnode后流状态有延迟,需要等待才能看到 failed 状态出现 + # kill dnode后流状态有延迟,需要等待才能看到 failed 状态出现 time.sleep(15) - + def killOneDnode2(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"kill one dnode: ") - tdDnodes=cluster.dnodes + tdDnodes = cluster.dnodes tdDnodes[numOfDnodes].stoptaosd() # tdDnodes[numOfDnodes].starttaosd() - - + def checkStreamRunning(self): tdLog.info(f"check stream running status:") - timeout = 60 + timeout = 60 start_time = time.time() while True: if time.time() - start_time > timeout: tdLog.error("Timeout waiting for all streams to be running.") tdLog.error(f"Final stream running status: {streamRunning}") - raise TimeoutError(f"Stream status did not reach 'Running' within {timeout}s timeout.") - - tdSql.query(f"select status from information_schema.ins_streams order by stream_name;") - streamRunning=tdSql.getColData(0) + raise TimeoutError( + f"Stream status did not reach 'Running' within {timeout}s timeout." + ) + + tdSql.query( + f"select status from information_schema.ins_streams order by stream_name;" + ) + streamRunning = tdSql.getColData(0) if all(status == "Running" for status in streamRunning): tdLog.info("All Stream running!") diff --git a/test/cases/41-StreamProcessing/02-Stream/stream_long_name.py b/test/cases/41-StreamProcessing/02-Stream/test_stream_long_name.py similarity index 66% rename from test/cases/41-StreamProcessing/02-Stream/stream_long_name.py rename to test/cases/41-StreamProcessing/02-Stream/test_stream_long_name.py index 09c6b3e93b19..93aed78c5402 100644 --- a/test/cases/41-StreamProcessing/02-Stream/stream_long_name.py +++ b/test/cases/41-StreamProcessing/02-Stream/test_stream_long_name.py @@ -1,12 +1,21 @@ import time import math import random -from new_test_framework.utils import tdLog, tdSql, tdStream, streamUtil,StreamTableType, StreamTable, cluster +from new_test_framework.utils import ( + tdLog, + tdSql, + tdStream, + streamUtil, + StreamTableType, + StreamTable, + cluster, +) from random import randint import os import subprocess -class TestSnodeMgmt: + +class TestStreamLongName: caseName = "" currentDir = os.path.dirname(os.path.abspath(__file__)) runAll = False @@ -20,17 +29,17 @@ class TestSnodeMgmt: subTblNum = 3 tblRowNum = 10 tableList = [] - + def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): - """Check stream long name test - - 1. Check stream long name + def test_stream_long_name(self): + """Stream: check long name + + Test stream with excessively long names. Catalog: - - Streams:Snode + - Streams:Stream Since: v3.3.3.7 @@ -43,10 +52,7 @@ def test_snode_mgmt(self): """ - tdStream.dropAllStreamsAndDbs() - - self.prepareData() self.createSnodeTest() @@ -58,74 +64,72 @@ def test_snode_mgmt(self): # 检查是否已经存在 tdSql.query(f"select stream_name from information_schema.ins_streams;") - stname=tdSql.getData(0,0) + stname = tdSql.getData(0, 0) tdLog.info(f"stream name of ins_streams:{stname}") if stream_name != stname: raise Exception("stream name of ins_streams != stream_name") - - def prepareData(self): tdLog.info(f"prepare data") tdStream.dropAllStreamsAndDbs() - #wait all dnode ready + # wait all dnode ready time.sleep(5) tdStream.init_database(self.dbname) - + st1 = StreamTable(self.dbname, "st1", StreamTableType.TYPE_SUP_TABLE) st1.createTable(3) st1.append_data(0, self.tblRowNum) - + self.tableList.append("st1") for i in range(0, self.subTblNum + 1): self.tableList.append(f"st1_{i}") - + ntb = StreamTable(self.dbname, "ntb1", StreamTableType.TYPE_NORMAL_TABLE) ntb.createTable() ntb.append_data(0, self.tblRowNum) self.tableList.append(f"ntb1") - - - def createOneStream(self,stname): + + def createOneStream(self, stname): tdLog.info(f"create stream:") sql = ( - f"create stream `{stname}` sliding(1s) from st1 partition by tbname " - "stream_options(fill_history('2025-01-01 00:00:00')) " - f"into `{stname}out` as " - "select cts, cint, %%tbname from st1 " - "where cint > 5 and tint > 0 and %%tbname like '%%2' " - "order by cts;" + f"create stream `{stname}` sliding(1s) from st1 partition by tbname " + "stream_options(fill_history('2025-01-01 00:00:00')) " + f"into `{stname}out` as " + "select cts, cint, %%tbname from st1 " + "where cint > 5 and tint > 0 and %%tbname like '%%2' " + "order by cts;" ) tdLog.info(f"create stream:{sql}") try: tdSql.execute(sql) except Exception as e: - if "No stream available snode now" not in str(e): - raise Exception(f" user cant create stream no snode ,but create success") - + if "No stream available snode now" not in str(e): + raise Exception( + f" user cant create stream no snode ,but create success" + ) + def checkResultRows(self, expectedRows): tdSql.checkResultsByFunc( f"select * from information_schema.ins_snodes order by id;", lambda: tdSql.getRows() == expectedRows, - delay=0.5, retry=2 + delay=0.5, + retry=2, ) - + def createSnodeTest(self): tdLog.info(f"create snode test") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() + numOfNodes = tdSql.getRows() tdLog.info(f"numOfNodes: {numOfNodes}") - + for i in range(1, numOfNodes + 1): tdSql.execute(f"create snode on dnode {i}") tdLog.info(f"create snode on dnode {i} success") self.checkResultRows(numOfNodes) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == numOfNodes, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == numOfNodes, delay=0.5, retry=2 ) def dropAllSnodeTest(self): @@ -133,20 +137,18 @@ def dropAllSnodeTest(self): tdSql.query("select * from information_schema.ins_snodes order by id;") numOfSnodes = tdSql.getRows() tdLog.info(f"numOfSnodes: {numOfSnodes}") - - for i in range(1, numOfSnodes ): + + for i in range(1, numOfSnodes): tdSql.execute(f"drop snode on dnode {i}") tdLog.info(f"drop snode {i} success") - + self.checkResultRows(1) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == 1, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == 1, delay=0.5, retry=2 ) - - numOfRows=tdSql.execute(f"drop snode on dnode {numOfSnodes}") + + numOfRows = tdSql.execute(f"drop snode on dnode {numOfSnodes}") if numOfRows != 0: raise Exception(f" drop all snodes failed! ") tdSql.query("select * from information_schema.ins_snodes order by id;") @@ -155,103 +157,113 @@ def dropAllSnodeTest(self): def dropOneSnodeTest(self): tdSql.query("select * from information_schema.ins_snodes order by id;") - numOfSnodes=tdSql.getRows() - #只有一个 snode 的时候不再执行删除 - if numOfSnodes >1: + numOfSnodes = tdSql.getRows() + # 只有一个 snode 的时候不再执行删除 + if numOfSnodes > 1: tdLog.info(f"drop one snode test") - tdSql.query("select * from information_schema.ins_streams order by stream_name;") - snodeid = tdSql.getData(0,6) + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) + snodeid = tdSql.getData(0, 6) tdSql.execute(f"drop snode on dnode {snodeid}") tdLog.info(f"drop snode {snodeid} success") - #drop snode后流状态有延迟,需要等待才能看到 failed 状态出现 + # drop snode后流状态有延迟,需要等待才能看到 failed 状态出现 time.sleep(15) - - + def createStream(self): tdLog.info(f"create stream ") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() - for i in range(1,numOfNodes+1): - tdSql.execute(f"create stream `s{i}` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s{i}out` as select cts, cint from st1 where _tcurrent_ts % 2 = 0 order by cts;") + numOfNodes = tdSql.getRows() + for i in range(1, numOfNodes + 1): + tdSql.execute( + f"create stream `s{i}` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s{i}out` as select cts, cint from st1 where _tcurrent_ts % 2 = 0 order by cts;" + ) tdLog.info(f"create stream s{i} success!") - - def dropOneStream(self): tdLog.info(f"drop one stream: ") - tdSql.query("select * from information_schema.ins_streams order by stream_name;") + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) numOfStreams = tdSql.getRows() tdLog.info(f"Total streams:{numOfStreams}") - streamid = tdSql.getData(0,0) + streamid = tdSql.getData(0, 0) tdSql.execute(f"drop stream {streamid}") tdLog.info(f"drop stream {streamid} success") - - tdSql.query("select * from information_schema.ins_streams order by stream_name;") + + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) numOfStreams = tdSql.getRows() tdLog.info(f"Total streams:{numOfStreams}") - + def dropOneDnode(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() tdLog.info(f"Total dnodes:{numOfDnodes}") - - tdSql.query(f"select `replica` from information_schema.ins_databases where name='{self.dbname}'") - numOfReplica = tdSql.getData(0,0) - - if numOfDnodes ==3 and numOfReplica == 3: + + tdSql.query( + f"select `replica` from information_schema.ins_databases where name='{self.dbname}'" + ) + numOfReplica = tdSql.getData(0, 0) + + if numOfDnodes == 3 and numOfReplica == 3: tdLog.info(f"Total dndoes: 3,replica:3, can not drop dnode.") return - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"drop one dnode: ") tdSql.query("select * from information_schema.ins_dnodes order by id;") - dnodeid = tdSql.getData(2,0) + dnodeid = tdSql.getData(2, 0) tdSql.execute(f"drop dnode {dnodeid}") tdLog.info(f"drop dnode {dnodeid} success") - + tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() tdLog.info(f"Total dnodes:{numOfDnodes}") # time.sleep(3) - + def killOneDnode(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"kill one dnode: ") cmd = ( - f"ps -ef | grep -wi taosd | grep 'dnode{numOfDnodes}/cfg' " - "| grep -v grep | awk '{print $2}' | xargs kill -9 > /dev/null 2>&1" + f"ps -ef | grep -wi taosd | grep 'dnode{numOfDnodes}/cfg' " + "| grep -v grep | awk '{print $2}' | xargs kill -9 > /dev/null 2>&1" ) subprocess.run(cmd, shell=True) tdLog.info(f"kill dndoe {numOfDnodes} success") - #kill dnode后流状态有延迟,需要等待才能看到 failed 状态出现 + # kill dnode后流状态有延迟,需要等待才能看到 failed 状态出现 time.sleep(15) - + def killOneDnode2(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"kill one dnode: ") - tdDnodes=cluster.dnodes + tdDnodes = cluster.dnodes tdDnodes[numOfDnodes].stoptaosd() # tdDnodes[numOfDnodes].starttaosd() - - + def checkStreamRunning(self): tdLog.info(f"check stream running status:") - timeout = 60 + timeout = 60 start_time = time.time() while True: if time.time() - start_time > timeout: tdLog.error("Timeout waiting for all streams to be running.") tdLog.error(f"Final stream running status: {streamRunning}") - raise TimeoutError(f"Stream status did not reach 'Running' within {timeout}s timeout.") - - tdSql.query(f"select status from information_schema.ins_streams order by stream_name;") - streamRunning=tdSql.getColData(0) + raise TimeoutError( + f"Stream status did not reach 'Running' within {timeout}s timeout." + ) + + tdSql.query( + f"select status from information_schema.ins_streams order by stream_name;" + ) + streamRunning = tdSql.getColData(0) if all(status == "Running" for status in streamRunning): tdLog.info("All Stream running!") diff --git a/test/cases/41-StreamProcessing/02-Stream/stream_nosnode.py b/test/cases/41-StreamProcessing/02-Stream/test_stream_no_snode.py similarity index 67% rename from test/cases/41-StreamProcessing/02-Stream/stream_nosnode.py rename to test/cases/41-StreamProcessing/02-Stream/test_stream_no_snode.py index 3f2f6c04faa5..07455a0d0c7a 100644 --- a/test/cases/41-StreamProcessing/02-Stream/stream_nosnode.py +++ b/test/cases/41-StreamProcessing/02-Stream/test_stream_no_snode.py @@ -1,12 +1,21 @@ import time import math import random -from new_test_framework.utils import tdLog, tdSql, tdStream, streamUtil,StreamTableType, StreamTable, cluster +from new_test_framework.utils import ( + tdLog, + tdSql, + tdStream, + streamUtil, + StreamTableType, + StreamTable, + cluster, +) from random import randint import os import subprocess -class TestSnodeMgmt: + +class TestStreamNoSnode: caseName = "" currentDir = os.path.dirname(os.path.abspath(__file__)) runAll = False @@ -20,17 +29,17 @@ class TestSnodeMgmt: subTblNum = 3 tblRowNum = 10 tableList = [] - + def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): - """No Snode create stream test - - 1. create stream no snode + def test_stream_no_snode(self): + """Stream: check no snode + + Test that streams cannot be created without snode. Catalog: - - Streams:Snode + - Streams:Stream Since: v3.3.3.7 @@ -43,76 +52,71 @@ def test_snode_mgmt(self): """ - tdStream.dropAllStreamsAndDbs() - - self.prepareData() self.createOneStream() - - def prepareData(self): tdLog.info(f"prepare data") tdStream.dropAllStreamsAndDbs() - #wait all dnode ready + # wait all dnode ready time.sleep(5) tdStream.init_database(self.dbname) - + st1 = StreamTable(self.dbname, "st1", StreamTableType.TYPE_SUP_TABLE) st1.createTable(3) st1.append_data(0, self.tblRowNum) - + self.tableList.append("st1") for i in range(0, self.subTblNum + 1): self.tableList.append(f"st1_{i}") - + ntb = StreamTable(self.dbname, "ntb1", StreamTableType.TYPE_NORMAL_TABLE) ntb.createTable() ntb.append_data(0, self.tblRowNum) self.tableList.append(f"ntb1") - - + def createOneStream(self): sql = ( - "create stream `s99` sliding(1s) from st1 partition by tbname " - "stream_options(fill_history('2025-01-01 00:00:00')) " - "into `s99out` as " - "select cts, cint, %%tbname from st1 " - "where cint > 5 and tint > 0 and %%tbname like '%%2' " - "order by cts;" + "create stream `s99` sliding(1s) from st1 partition by tbname " + "stream_options(fill_history('2025-01-01 00:00:00')) " + "into `s99out` as " + "select cts, cint, %%tbname from st1 " + "where cint > 5 and tint > 0 and %%tbname like '%%2' " + "order by cts;" ) try: tdSql.execute(sql) except Exception as e: - if "No Snode is available" not in str(e): - raise Exception(f" user cant create stream no snode ,but create success") - + if "No Snode is available" not in str(e): + raise Exception( + f" user cant create stream no snode ,but create success" + ) + def checkResultRows(self, expectedRows): tdSql.checkResultsByFunc( f"select * from information_schema.ins_snodes order by id;", lambda: tdSql.getRows() == expectedRows, - delay=0.5, retry=2 + delay=0.5, + retry=2, ) - + def createSnodeTest(self): tdLog.info(f"create snode test") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() + numOfNodes = tdSql.getRows() tdLog.info(f"numOfNodes: {numOfNodes}") - + for i in range(1, numOfNodes + 1): tdSql.execute(f"create snode on dnode {i}") tdLog.info(f"create snode on dnode {i} success") self.checkResultRows(numOfNodes) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == numOfNodes, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == numOfNodes, delay=0.5, retry=2 ) def dropAllSnodeTest(self): @@ -120,20 +124,18 @@ def dropAllSnodeTest(self): tdSql.query("select * from information_schema.ins_snodes order by id;") numOfSnodes = tdSql.getRows() tdLog.info(f"numOfSnodes: {numOfSnodes}") - - for i in range(1, numOfSnodes ): + + for i in range(1, numOfSnodes): tdSql.execute(f"drop snode on dnode {i}") tdLog.info(f"drop snode {i} success") - + self.checkResultRows(1) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == 1, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == 1, delay=0.5, retry=2 ) - - numOfRows=tdSql.execute(f"drop snode on dnode {numOfSnodes}") + + numOfRows = tdSql.execute(f"drop snode on dnode {numOfSnodes}") if numOfRows != 0: raise Exception(f" drop all snodes failed! ") tdSql.query("select * from information_schema.ins_snodes order by id;") @@ -142,106 +144,116 @@ def dropAllSnodeTest(self): def dropOneSnodeTest(self): tdSql.query("select * from information_schema.ins_snodes order by id;") - numOfSnodes=tdSql.getRows() - #只有一个 snode 的时候不再执行删除 - if numOfSnodes >1: + numOfSnodes = tdSql.getRows() + # 只有一个 snode 的时候不再执行删除 + if numOfSnodes > 1: tdLog.info(f"drop one snode test") - tdSql.query("select * from information_schema.ins_streams order by stream_name;") - snodeid = tdSql.getData(0,6) + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) + snodeid = tdSql.getData(0, 6) tdSql.execute(f"drop snode on dnode {snodeid}") tdLog.info(f"drop snode {snodeid} success") - #drop snode后流状态有延迟,需要等待才能看到 failed 状态出现 + # drop snode后流状态有延迟,需要等待才能看到 failed 状态出现 time.sleep(15) - - + def createStream(self): tdLog.info(f"create stream ") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() - for i in range(1,numOfNodes+1): - tdSql.execute(f"create stream `s{i}` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s{i}out` as select cts, cint from st1 where _tcurrent_ts % 2 = 0 order by cts;") + numOfNodes = tdSql.getRows() + for i in range(1, numOfNodes + 1): + tdSql.execute( + f"create stream `s{i}` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s{i}out` as select cts, cint from st1 where _tcurrent_ts % 2 = 0 order by cts;" + ) tdLog.info(f"create stream s{i} success!") # tdSql.execute("create stream `s2` sliding(1s) from st1 partition by tint, tbname stream_options(fill_history('2025-01-01 00:00:00')) into `s2out` as select cts, cint from st1 order by cts limit 3;") # tdSql.execute("create stream `s3` sliding(1s) from st1 partition by tbname stream_options(pre_filter(cint>2)|fill_history('2025-01-01 00:00:00')) into `s3out` as select cts, cint, %%tbname from %%trows where cint >15 and tint >0 and %%tbname like '%2' order by cts;") # tdSql.execute("create stream `s4` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s4out` as select _tcurrent_ts, cint from st1 order by cts limit 4;") - - def dropOneStream(self): tdLog.info(f"drop one stream: ") - tdSql.query("select * from information_schema.ins_streams order by stream_name;") + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) numOfStreams = tdSql.getRows() tdLog.info(f"Total streams:{numOfStreams}") - streamid = tdSql.getData(0,0) + streamid = tdSql.getData(0, 0) tdSql.execute(f"drop stream {streamid}") tdLog.info(f"drop stream {streamid} success") - - tdSql.query("select * from information_schema.ins_streams order by stream_name;") + + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) numOfStreams = tdSql.getRows() tdLog.info(f"Total streams:{numOfStreams}") - + def dropOneDnode(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() tdLog.info(f"Total dnodes:{numOfDnodes}") - - tdSql.query(f"select `replica` from information_schema.ins_databases where name='{self.dbname}'") - numOfReplica = tdSql.getData(0,0) - - if numOfDnodes ==3 and numOfReplica == 3: + + tdSql.query( + f"select `replica` from information_schema.ins_databases where name='{self.dbname}'" + ) + numOfReplica = tdSql.getData(0, 0) + + if numOfDnodes == 3 and numOfReplica == 3: tdLog.info(f"Total dndoes: 3,replica:3, can not drop dnode.") return - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"drop one dnode: ") tdSql.query("select * from information_schema.ins_dnodes order by id;") - dnodeid = tdSql.getData(2,0) + dnodeid = tdSql.getData(2, 0) tdSql.execute(f"drop dnode {dnodeid}") tdLog.info(f"drop dnode {dnodeid} success") - + tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() tdLog.info(f"Total dnodes:{numOfDnodes}") # time.sleep(3) - + def killOneDnode(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"kill one dnode: ") cmd = ( - f"ps -ef | grep -wi taosd | grep 'dnode{numOfDnodes}/cfg' " - "| grep -v grep | awk '{print $2}' | xargs kill -9 > /dev/null 2>&1" + f"ps -ef | grep -wi taosd | grep 'dnode{numOfDnodes}/cfg' " + "| grep -v grep | awk '{print $2}' | xargs kill -9 > /dev/null 2>&1" ) subprocess.run(cmd, shell=True) tdLog.info(f"kill dndoe {numOfDnodes} success") - #kill dnode后流状态有延迟,需要等待才能看到 failed 状态出现 + # kill dnode后流状态有延迟,需要等待才能看到 failed 状态出现 time.sleep(15) - + def killOneDnode2(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"kill one dnode: ") - tdDnodes=cluster.dnodes + tdDnodes = cluster.dnodes tdDnodes[numOfDnodes].stoptaosd() # tdDnodes[numOfDnodes].starttaosd() - - + def checkStreamRunning(self): tdLog.info(f"check stream running status:") - timeout = 60 + timeout = 60 start_time = time.time() while True: if time.time() - start_time > timeout: tdLog.error("Timeout waiting for all streams to be running.") tdLog.error(f"Final stream running status: {streamRunning}") - raise TimeoutError(f"Stream status did not reach 'Running' within {timeout}s timeout.") - - tdSql.query(f"select status from information_schema.ins_streams order by stream_name;") - streamRunning=tdSql.getColData(0) + raise TimeoutError( + f"Stream status did not reach 'Running' within {timeout}s timeout." + ) + + tdSql.query( + f"select status from information_schema.ins_streams order by stream_name;" + ) + streamRunning = tdSql.getColData(0) if all(status == "Running" for status in streamRunning): tdLog.info("All Stream running!") diff --git a/test/cases/41-StreamProcessing/02-Stream/stream_samename.py b/test/cases/41-StreamProcessing/02-Stream/test_stream_same_name.py similarity index 66% rename from test/cases/41-StreamProcessing/02-Stream/stream_samename.py rename to test/cases/41-StreamProcessing/02-Stream/test_stream_same_name.py index ba6e83bb1925..0a60479a04d2 100644 --- a/test/cases/41-StreamProcessing/02-Stream/stream_samename.py +++ b/test/cases/41-StreamProcessing/02-Stream/test_stream_same_name.py @@ -1,12 +1,21 @@ import time import math import random -from new_test_framework.utils import tdLog, tdSql, tdStream, streamUtil,StreamTableType, StreamTable, cluster +from new_test_framework.utils import ( + tdLog, + tdSql, + tdStream, + streamUtil, + StreamTableType, + StreamTable, + cluster, +) from random import randint import os import subprocess -class TestSnodeMgmt: + +class TestStreamSameName: caseName = "" currentDir = os.path.dirname(os.path.abspath(__file__)) runAll = False @@ -20,17 +29,17 @@ class TestSnodeMgmt: subTblNum = 3 tblRowNum = 10 tableList = [] - + def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_snode_mgmt(self): - """Check stream same name test - - 1. Check stream same name + def test_stream_same_name(self): + """Stream: check same name + + Test stream with duplicate names. Catalog: - - Streams:Snode + - Streams:Stream Since: v3.3.3.7 @@ -43,118 +52,109 @@ def test_snode_mgmt(self): """ - tdStream.dropAllStreamsAndDbs() - - self.prepareData() self.createSnodeTest() - + self.createOneStream("testst") - sql = ( - "create stream `testst` sliding(1s) from st1 partition by tbname stream_options(fill_history('2025-01-01 00:00:00')) into `testout` as select cts, cint, %%tbname from st1 where cint > 5 and tint > 0 and %%tbname like '%%2' order by cts;" - ) + sql = "create stream `testst` sliding(1s) from st1 partition by tbname stream_options(fill_history('2025-01-01 00:00:00')) into `testout` as select cts, cint, %%tbname from st1 where cint > 5 and tint > 0 and %%tbname like '%%2' order by cts;" tdLog.info(f"create same name stream:{sql}") try: tdSql.execute(sql) except Exception as e: - if "Stream already exists" in str(e): - tdLog.info(f"cant create same name stream") - else: - raise Exception(f"cant create same name stream ,but create success") - + if "Stream already exists" in str(e): + tdLog.info(f"cant create same name stream") + else: + raise Exception(f"cant create same name stream ,but create success") + # error case stream_name = "ashdjfklhgt49hg84g89j4hjq904j9m9vm94jg9j4gj94jg90qj490j2390hr823h8bnbuhu4h8gh48gj834g894j0g4j30gj0g4jg2ij9t0j2498gn498gn894ng9843ng894gk9j4e9gj49gh9jg90qj490j2390hr823hfj38jg84gh84h89gh48h8" # create stream self.createOneStream(stream_name) - # recreate sql = ( "create stream `%s` sliding(1s) from st1 partition by tbname " "stream_options(fill_history('2025-01-01 00:00:00')) into `%sout` " - "as select cts, cint, %%%%tbname from st1 where cint > 5 and tint > 0 and %%%%tbname like '%%2' order by cts;" + "as select cts, cint, %%%%tbname from st1 where cint > 5 and tint > 0 and %%%%tbname like '%%2' order by cts;" % (stream_name, stream_name) ) tdLog.info("create same name stream: %s" % sql) try: tdSql.execute(sql) - raise Exception("ERROR:recrate same name stream success, cant be recreate same name stream!") + raise Exception( + "ERROR:recrate same name stream success, cant be recreate same name stream!" + ) except Exception as e: if "Stream already exists" in str(e): tdLog.info("stream already exists!, test passed") else: raise Exception("cant be recreate same name stream:%s" % str(e)) - - - - - def prepareData(self): tdLog.info(f"prepare data") tdStream.dropAllStreamsAndDbs() - #wait all dnode ready + # wait all dnode ready time.sleep(5) tdStream.init_database(self.dbname) - + st1 = StreamTable(self.dbname, "st1", StreamTableType.TYPE_SUP_TABLE) st1.createTable(3) st1.append_data(0, self.tblRowNum) - + self.tableList.append("st1") for i in range(0, self.subTblNum + 1): self.tableList.append(f"st1_{i}") - + ntb = StreamTable(self.dbname, "ntb1", StreamTableType.TYPE_NORMAL_TABLE) ntb.createTable() ntb.append_data(0, self.tblRowNum) self.tableList.append(f"ntb1") - - - def createOneStream(self,stname): + + def createOneStream(self, stname): tdLog.info(f"create stream:") sql = ( - f"create stream `{stname}` sliding(1s) from st1 partition by tbname " - "stream_options(fill_history('2025-01-01 00:00:00')) " - f"into `{stname}out` as " - "select cts, cint, %%tbname from st1 " - "where cint > 5 and tint > 0 and %%tbname like '%%2' " - "order by cts;" + f"create stream `{stname}` sliding(1s) from st1 partition by tbname " + "stream_options(fill_history('2025-01-01 00:00:00')) " + f"into `{stname}out` as " + "select cts, cint, %%tbname from st1 " + "where cint > 5 and tint > 0 and %%tbname like '%%2' " + "order by cts;" ) tdLog.info(f"create stream:{sql}") try: tdSql.execute(sql) except Exception as e: - if "No stream available snode now" not in str(e): - raise Exception(f" user cant create stream no snode ,but create success") - + if "No stream available snode now" not in str(e): + raise Exception( + f" user cant create stream no snode ,but create success" + ) + def checkResultRows(self, expectedRows): tdSql.checkResultsByFunc( f"select * from information_schema.ins_snodes order by id;", lambda: tdSql.getRows() == expectedRows, - delay=0.5, retry=2 + delay=0.5, + retry=2, ) - + def createSnodeTest(self): tdLog.info(f"create snode test") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() + numOfNodes = tdSql.getRows() tdLog.info(f"numOfNodes: {numOfNodes}") - + for i in range(1, numOfNodes + 1): tdSql.execute(f"create snode on dnode {i}") tdLog.info(f"create snode on dnode {i} success") self.checkResultRows(numOfNodes) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == numOfNodes, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == numOfNodes, delay=0.5, retry=2 ) def dropAllSnodeTest(self): @@ -162,20 +162,18 @@ def dropAllSnodeTest(self): tdSql.query("select * from information_schema.ins_snodes order by id;") numOfSnodes = tdSql.getRows() tdLog.info(f"numOfSnodes: {numOfSnodes}") - - for i in range(1, numOfSnodes ): + + for i in range(1, numOfSnodes): tdSql.execute(f"drop snode on dnode {i}") tdLog.info(f"drop snode {i} success") - + self.checkResultRows(1) - + tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == 1, - delay=0.5, retry=2 + f"show snodes;", lambda: tdSql.getRows() == 1, delay=0.5, retry=2 ) - - numOfRows=tdSql.execute(f"drop snode on dnode {numOfSnodes}") + + numOfRows = tdSql.execute(f"drop snode on dnode {numOfSnodes}") if numOfRows != 0: raise Exception(f" drop all snodes failed! ") tdSql.query("select * from information_schema.ins_snodes order by id;") @@ -184,106 +182,116 @@ def dropAllSnodeTest(self): def dropOneSnodeTest(self): tdSql.query("select * from information_schema.ins_snodes order by id;") - numOfSnodes=tdSql.getRows() - #只有一个 snode 的时候不再执行删除 - if numOfSnodes >1: + numOfSnodes = tdSql.getRows() + # 只有一个 snode 的时候不再执行删除 + if numOfSnodes > 1: tdLog.info(f"drop one snode test") - tdSql.query("select * from information_schema.ins_streams order by stream_name;") - snodeid = tdSql.getData(0,6) + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) + snodeid = tdSql.getData(0, 6) tdSql.execute(f"drop snode on dnode {snodeid}") tdLog.info(f"drop snode {snodeid} success") - #drop snode后流状态有延迟,需要等待才能看到 failed 状态出现 + # drop snode后流状态有延迟,需要等待才能看到 failed 状态出现 time.sleep(15) - - + def createStream(self): tdLog.info(f"create stream ") tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() - for i in range(1,numOfNodes+1): - tdSql.execute(f"create stream `s{i}` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s{i}out` as select cts, cint from st1 where _tcurrent_ts % 2 = 0 order by cts;") + numOfNodes = tdSql.getRows() + for i in range(1, numOfNodes + 1): + tdSql.execute( + f"create stream `s{i}` sliding(1s) from st1 stream_options(fill_history('2025-01-01 00:00:00')) into `s{i}out` as select cts, cint from st1 where _tcurrent_ts % 2 = 0 order by cts;" + ) tdLog.info(f"create stream s{i} success!") # tdSql.execute("create stream `s2` sliding(1s) from st1 partition by tint, tbname options(fill_history('2025-01-01 00:00:00')) into `s2out` as select cts, cint from st1 order by cts limit 3;") # tdSql.execute("create stream `s3` sliding(1s) from st1 partition by tbname options(pre_filter(cint>2)|fill_history('2025-01-01 00:00:00')) into `s3out` as select cts, cint, %%tbname from %%trows where cint >15 and tint >0 and %%tbname like '%2' order by cts;") # tdSql.execute("create stream `s4` sliding(1s) from st1 options(fill_history('2025-01-01 00:00:00')) into `s4out` as select _tcurrent_ts, cint from st1 order by cts limit 4;") - - def dropOneStream(self): tdLog.info(f"drop one stream: ") - tdSql.query("select * from information_schema.ins_streams order by stream_name;") + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) numOfStreams = tdSql.getRows() tdLog.info(f"Total streams:{numOfStreams}") - streamid = tdSql.getData(0,0) + streamid = tdSql.getData(0, 0) tdSql.execute(f"drop stream {streamid}") tdLog.info(f"drop stream {streamid} success") - - tdSql.query("select * from information_schema.ins_streams order by stream_name;") + + tdSql.query( + "select * from information_schema.ins_streams order by stream_name;" + ) numOfStreams = tdSql.getRows() tdLog.info(f"Total streams:{numOfStreams}") - + def dropOneDnode(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() tdLog.info(f"Total dnodes:{numOfDnodes}") - - tdSql.query(f"select `replica` from information_schema.ins_databases where name='{self.dbname}'") - numOfReplica = tdSql.getData(0,0) - - if numOfDnodes ==3 and numOfReplica == 3: + + tdSql.query( + f"select `replica` from information_schema.ins_databases where name='{self.dbname}'" + ) + numOfReplica = tdSql.getData(0, 0) + + if numOfDnodes == 3 and numOfReplica == 3: tdLog.info(f"Total dndoes: 3,replica:3, can not drop dnode.") return - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"drop one dnode: ") tdSql.query("select * from information_schema.ins_dnodes order by id;") - dnodeid = tdSql.getData(2,0) + dnodeid = tdSql.getData(2, 0) tdSql.execute(f"drop dnode {dnodeid}") tdLog.info(f"drop dnode {dnodeid} success") - + tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() tdLog.info(f"Total dnodes:{numOfDnodes}") # time.sleep(3) - + def killOneDnode(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"kill one dnode: ") cmd = ( - f"ps -ef | grep -wi taosd | grep 'dnode{numOfDnodes}/cfg' " - "| grep -v grep | awk '{print $2}' | xargs kill -9 > /dev/null 2>&1" + f"ps -ef | grep -wi taosd | grep 'dnode{numOfDnodes}/cfg' " + "| grep -v grep | awk '{print $2}' | xargs kill -9 > /dev/null 2>&1" ) subprocess.run(cmd, shell=True) tdLog.info(f"kill dndoe {numOfDnodes} success") - #kill dnode后流状态有延迟,需要等待才能看到 failed 状态出现 + # kill dnode后流状态有延迟,需要等待才能看到 failed 状态出现 time.sleep(15) - + def killOneDnode2(self): tdSql.query("select * from information_schema.ins_dnodes order by id;") numOfDnodes = tdSql.getRows() - if numOfDnodes >2: + if numOfDnodes > 2: tdLog.info(f"kill one dnode: ") - tdDnodes=cluster.dnodes + tdDnodes = cluster.dnodes tdDnodes[numOfDnodes].stoptaosd() # tdDnodes[numOfDnodes].starttaosd() - - + def checkStreamRunning(self): tdLog.info(f"check stream running status:") - timeout = 60 + timeout = 60 start_time = time.time() while True: if time.time() - start_time > timeout: tdLog.error("Timeout waiting for all streams to be running.") tdLog.error(f"Final stream running status: {streamRunning}") - raise TimeoutError(f"Stream status did not reach 'Running' within {timeout}s timeout.") - - tdSql.query(f"select status from information_schema.ins_streams order by stream_name;") - streamRunning=tdSql.getColData(0) + raise TimeoutError( + f"Stream status did not reach 'Running' within {timeout}s timeout." + ) + + tdSql.query( + f"select status from information_schema.ins_streams order by stream_name;" + ) + streamRunning = tdSql.getColData(0) if all(status == "Running" for status in streamRunning): tdLog.info("All Stream running!") diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_count_dbg.py b/test/cases/41-StreamProcessing/03-TriggerMode/count_dbg.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_count_dbg.py rename to test/cases/41-StreamProcessing/03-TriggerMode/count_dbg.py index 5191d65c16db..1d2e83bacc95 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_count_dbg.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/count_dbg.py @@ -18,7 +18,7 @@ def test_stream_count_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_count_disorder.py b/test/cases/41-StreamProcessing/03-TriggerMode/count_disorder.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_count_disorder.py rename to test/cases/41-StreamProcessing/03-TriggerMode/count_disorder.py index 61a78d77d9c1..fe23b820d094 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_count_disorder.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/count_disorder.py @@ -18,7 +18,7 @@ def test_stream_count_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_count_new_dbg.py b/test/cases/41-StreamProcessing/03-TriggerMode/count_new_dbg.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_count_new_dbg.py rename to test/cases/41-StreamProcessing/03-TriggerMode/count_new_dbg.py index f4298785c628..872e1d0e6691 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_count_new_dbg.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/count_new_dbg.py @@ -21,7 +21,7 @@ def test_stream_count_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_event_new_dbg.py b/test/cases/41-StreamProcessing/03-TriggerMode/event_new_dbg.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_event_new_dbg.py rename to test/cases/41-StreamProcessing/03-TriggerMode/event_new_dbg.py index 2fa4c09f8174..5a4f75af317d 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_event_new_dbg.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/event_new_dbg.py @@ -21,7 +21,7 @@ def test_stream_state_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_period.py b/test/cases/41-StreamProcessing/03-TriggerMode/period.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_period.py rename to test/cases/41-StreamProcessing/03-TriggerMode/period.py index d5203d8bc193..42ae92699740 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_period.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/period.py @@ -117,7 +117,7 @@ def test_stream_period_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_period_output_subtable.py b/test/cases/41-StreamProcessing/03-TriggerMode/period_output_subtable.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_period_output_subtable.py rename to test/cases/41-StreamProcessing/03-TriggerMode/period_output_subtable.py index c882fef0105d..a0b7f3db5175 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_period_output_subtable.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/period_output_subtable.py @@ -30,7 +30,7 @@ def test_snode_mgmt(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_session_1.py b/test/cases/41-StreamProcessing/03-TriggerMode/session_1.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_session_1.py rename to test/cases/41-StreamProcessing/03-TriggerMode/session_1.py index a0af9b9b7403..9a68cf6bbba9 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_session_1.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/session_1.py @@ -132,7 +132,7 @@ def test_stream_dev_basic(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_session_zlv.py b/test/cases/41-StreamProcessing/03-TriggerMode/session_zlv.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_session_zlv.py rename to test/cases/41-StreamProcessing/03-TriggerMode/session_zlv.py index bb123de4705d..a8ea32c449f3 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_session_zlv.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/session_zlv.py @@ -308,7 +308,7 @@ def test_stream_session_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_1.py b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_1.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_1.py rename to test/cases/41-StreamProcessing/03-TriggerMode/sliding_1.py index ac1dca83cde2..76d8539df22d 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_1.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_1.py @@ -21,7 +21,7 @@ def test_stream_dev_basic(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_case1.py b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_case1.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_case1.py rename to test/cases/41-StreamProcessing/03-TriggerMode/sliding_case1.py index a9c360588be3..a42aebe9232a 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_case1.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_case1.py @@ -31,7 +31,7 @@ def test_sliding_case1(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_case2.py b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_case2.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_case2.py rename to test/cases/41-StreamProcessing/03-TriggerMode/sliding_case2.py index 2191d964b324..4ac4e873b9dd 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_case2.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_case2.py @@ -31,7 +31,7 @@ def test_sliding_case1(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_output_subtable_zlv.py b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_output_subtable_zlv.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_output_subtable_zlv.py rename to test/cases/41-StreamProcessing/03-TriggerMode/sliding_output_subtable_zlv.py index 8ef0b9fdd1df..e719586e7907 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_output_subtable_zlv.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_output_subtable_zlv.py @@ -31,7 +31,7 @@ def test_snode_mgmt(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_output_subtable_zlv_2.py b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_output_subtable_zlv_2.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_output_subtable_zlv_2.py rename to test/cases/41-StreamProcessing/03-TriggerMode/sliding_output_subtable_zlv_2.py index 7a41460c6f41..0379a115fffd 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding_output_subtable_zlv_2.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/sliding_output_subtable_zlv_2.py @@ -31,7 +31,7 @@ def test_stream(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_2.py b/test/cases/41-StreamProcessing/03-TriggerMode/state_2.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_state_2.py rename to test/cases/41-StreamProcessing/03-TriggerMode/state_2.py index 4ee7c7760d1e..bd9748c2adc7 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_2.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/state_2.py @@ -18,7 +18,7 @@ def test_stream_state_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_addnewsub.py b/test/cases/41-StreamProcessing/03-TriggerMode/state_addnewsub.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_state_addnewsub.py rename to test/cases/41-StreamProcessing/03-TriggerMode/state_addnewsub.py index 7b40a26b256b..8eb01a6e79e5 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_addnewsub.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/state_addnewsub.py @@ -18,7 +18,7 @@ def test_stream_state_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_bak.py b/test/cases/41-StreamProcessing/03-TriggerMode/state_bak.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_state_bak.py rename to test/cases/41-StreamProcessing/03-TriggerMode/state_bak.py index 1c49d2f8a86d..4ca4453befa8 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_bak.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/state_bak.py @@ -161,7 +161,7 @@ def test_stream_state_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorderNupdate_new_dbg.py b/test/cases/41-StreamProcessing/03-TriggerMode/state_disorder_update_new_dbg.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorderNupdate_new_dbg.py rename to test/cases/41-StreamProcessing/03-TriggerMode/state_disorder_update_new_dbg.py index f650f531c418..21252dcf1623 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorderNupdate_new_dbg.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/state_disorder_update_new_dbg.py @@ -15,7 +15,7 @@ def test_stream_state_trigger(self): Since: xxx - Labels: xxx + Labels: skip Jira: xxx diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_new_dbg.py b/test/cases/41-StreamProcessing/03-TriggerMode/state_new_dbg.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_state_new_dbg.py rename to test/cases/41-StreamProcessing/03-TriggerMode/state_new_dbg.py index d1e8f2969521..ba28cb6e1e34 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_new_dbg.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/state_new_dbg.py @@ -21,7 +21,7 @@ def test_stream_state_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_statewindow_output_subtable_zlv.py b/test/cases/41-StreamProcessing/03-TriggerMode/state_output_subtable_zlv.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_statewindow_output_subtable_zlv.py rename to test/cases/41-StreamProcessing/03-TriggerMode/state_output_subtable_zlv.py index f1f6484f4a83..012dfdfccd6d 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_statewindow_output_subtable_zlv.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/state_output_subtable_zlv.py @@ -31,7 +31,7 @@ def test_snode_mgmt(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_count.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_count.py index 1f2258899d96..c1bca436de61 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_count.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_count.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_count_trigger(self): - """basic qdb 2 + """Count: Verification testing during the development process. diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_count_new.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_count_new.py index 22f7315445e6..c89565a40c68 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_count_new.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_count_new.py @@ -8,7 +8,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_count_trigger(self): - """basic test + """Count: Verification testing during the development process. @@ -21,7 +21,7 @@ def test_stream_count_trigger(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_event.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_event.py index 2103e8235cd7..ee9f75e8d7ae 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_event.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_event.py @@ -9,22 +9,26 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_event_trigger(self): - """summary: xxx + """Event: - description: xxx + Verification testing during the development process. - Since: xxx + Catalog: + - Streams: 03-TriggerMode + Description: + - create 14 streams, each stream has 1 source tables + - write data to source tables + - check stream results - Labels: xxx + Since: v3.3.3.7 - Jira: xxx + Labels: common,ci + + Jira: None - Catalog: - - xxx:xxx - History: - - xxx - - xxx + - 2025-07-22 + """ tdStream.dropAllStreamsAndDbs() diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_event_new.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_event_new.py index 216537203d5a..3c7bd152f01b 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_event_new.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_event_new.py @@ -8,7 +8,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_state_trigger(self): - """basic test + """Event: Verification testing during the development process. diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_fill_history.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_fill_history.py index de69ef06fe53..bfa50d3892f7 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_fill_history.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_fill_history.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_fill_history(self): - """basic qdb 2 + """Fill History Verification testing during the development process. diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_period_1.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_period_1.py index f55209a1d679..030659e67762 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_period_1.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_period_1.py @@ -164,16 +164,14 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_dev_basic(self): - """basic test - - Verification testing during the development process. + """Period: + + 1. Create 10 streams, each stream has 10 source tables + 2. Write data to source tables + 3. Check stream resultsVerification testing during the development process. Catalog: - Streams: 03-TriggerMode - Description: - - create 10 streams, each stream has 10 source tables - - write data to source tables - - check stream results Since: v3.3.3.7 diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding.py index 26fa4d183554..acc8f716b719 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_sliding.py @@ -222,7 +222,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_sliding_trigger(self): - """Stream sliding trigger + """Sliding: 1. create table 2. insert data diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_state.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_state.py index 7ca5e1bf2a2a..cf214b4516a1 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_state.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_state.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_state_trigger(self): - """basic qdb 2 + """State: Verification testing during the development process. diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorderNupdate_new.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorder_update_new.py similarity index 99% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorderNupdate_new.py rename to test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorder_update_new.py index 6a2a6941fc04..33744146f910 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorderNupdate_new.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_state_disorder_update_new.py @@ -9,22 +9,22 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_state_trigger(self): - """summary: xxx + """State: - description: xxx + Verification testing during the development process. - Since: xxx + Catalog: + - Streams:Others - Labels: xxx + Since: v3.3.3.7 - Jira: xxx + Labels: common,ci + + Jira: None - Catalog: - - xxx:xxx - History: - - xxx - - xxx + - 2025-5-26 Guoxy Created + """ tdStream.createSnode() diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_new.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_state_new.py index bb22e264d1e4..b735604eec44 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_state_new.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_state_new.py @@ -8,7 +8,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_state_trigger(self): - """basic test + """State: Verification testing during the development process. diff --git a/test/cases/41-StreamProcessing/03-TriggerMode/test_window_close_state_window.py b/test/cases/41-StreamProcessing/03-TriggerMode/test_state_window_close.py similarity index 95% rename from test/cases/41-StreamProcessing/03-TriggerMode/test_window_close_state_window.py rename to test/cases/41-StreamProcessing/03-TriggerMode/test_state_window_close.py index fa658b374921..f6c005654bfc 100644 --- a/test/cases/41-StreamProcessing/03-TriggerMode/test_window_close_state_window.py +++ b/test/cases/41-StreamProcessing/03-TriggerMode/test_state_window_close.py @@ -12,18 +12,15 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_window_close_state_window(self): - """Window close state window basic test - - Test window close trigger mode with state window windows. - - Verification testing during the development process. + """State: window close + Test window close trigger mode with state window windows + 1. create streams with state window windows + 2. write data to source tables with state window gaps + 3. check stream results + Catalog: - Streams: 03-TriggerMode - Description: - - create streams with state window windows - - write data to source tables with state window gaps - - check stream results Since: v3.3.3.7 diff --git a/test/cases/41-StreamProcessing/04-Options/test_disorderUpdateDelete.py b/test/cases/41-StreamProcessing/04-Options/test_abnormal_data_table.py similarity index 99% rename from test/cases/41-StreamProcessing/04-Options/test_disorderUpdateDelete.py rename to test/cases/41-StreamProcessing/04-Options/test_abnormal_data_table.py index f466e538719d..3386e95293e0 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_disorderUpdateDelete.py +++ b/test/cases/41-StreamProcessing/04-Options/test_abnormal_data_table.py @@ -3,14 +3,14 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamDisorderTrigger: +class TestStreamDisorderTable: precision = 'ms' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_state_trigger(self): - """stream options + def test_stream_disnorder_table(self): + """Abnormal Data: table test data disorder/update/delete change cases to stream @@ -50,7 +50,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -302,7 +302,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -462,7 +462,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -852,7 +852,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") diff --git a/test/cases/41-StreamProcessing/04-Options/test_disorderUpdateDelete_vtbl.py b/test/cases/41-StreamProcessing/04-Options/test_abnormal_data_vtable.py similarity index 99% rename from test/cases/41-StreamProcessing/04-Options/test_disorderUpdateDelete_vtbl.py rename to test/cases/41-StreamProcessing/04-Options/test_abnormal_data_vtable.py index d19c28b8a88f..b83e12e68911 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_disorderUpdateDelete_vtbl.py +++ b/test/cases/41-StreamProcessing/04-Options/test_abnormal_data_vtable.py @@ -3,14 +3,14 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamDisorderTrigger: +class TestStreamDisorderVtable: precision = 'ms' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_state_trigger(self): - """stream options + def test_stream_disnorder_vtable(self): + """Abnormal Data: virtual table test data disorder/update/delete change cases to stream for virtual table @@ -45,7 +45,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -306,7 +306,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -473,7 +473,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -870,7 +870,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamDisorderVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") diff --git a/test/cases/41-StreamProcessing/04-Options/test_meta.py b/test/cases/41-StreamProcessing/04-Options/test_meta_change_table.py similarity index 99% rename from test/cases/41-StreamProcessing/04-Options/test_meta.py rename to test/cases/41-StreamProcessing/04-Options/test_meta_change_table.py index 301946098239..a4fd990a9d48 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_meta.py +++ b/test/cases/41-StreamProcessing/04-Options/test_meta_change_table.py @@ -2,14 +2,14 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamMetaTrigger: +class TestStreamMetaChangeTable: precision = 'ms' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_meta_trigger(self): - """stream options + def test_stream_meta_change_table(self): + """Meta Change: table test meta change (add/drop/modify) cases to stream @@ -53,7 +53,7 @@ def __init__(self): self.ntbName = 'ntb' def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.ntbName} (cts timestamp, cint int)") @@ -295,7 +295,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") @@ -560,7 +560,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int, tbigint bigint)") @@ -856,7 +856,7 @@ def __init__(self): self.ntbName = 'ntb' def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int, tbigint bigint, tfloat float)") tdSql.execute(f"create table if not exists {self.db}.{self.ntbName} (cts timestamp, cint int, cbigint bigint, cfloat float)") @@ -1071,7 +1071,7 @@ def __init__(self): self.ntbName1 = 'ntb1' # source data normal table def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int, tbigint bigint, tfloat float)") tdSql.execute(f"create table if not exists {self.db}.{self.stbName1} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int, tbigint bigint, tfloat float)") @@ -1387,7 +1387,7 @@ def __init__(self): self.ntbName2 = 'ntb2' # source data normal table def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.ntbName} (cts timestamp, cint int, cbigint bigint, cfloat float)") @@ -1560,7 +1560,7 @@ def __init__(self): self.ntbName = 'ntb' def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.ntbName} (cts timestamp, cint int, cbigint bigint, cfloat float)") @@ -1869,12 +1869,12 @@ def __init__(self): self.ntbName1 = 'ntb1' # source data normal table def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db1} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db2} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db3} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db4} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db5} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") + tdSql.execute(f"create database {self.db1} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") + tdSql.execute(f"create database {self.db2} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") + tdSql.execute(f"create database {self.db3} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") + tdSql.execute(f"create database {self.db4} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") + tdSql.execute(f"create database {self.db5} vgroups 1 buffer 8 precision '{TestStreamMetaChangeTable.precision}'") # db1 tdSql.execute(f"create table if not exists {self.db1}.{self.stbName} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int, tbigint bigint, tfloat float)") diff --git a/test/cases/41-StreamProcessing/04-Options/test_meta_vtbl.py b/test/cases/41-StreamProcessing/04-Options/test_meta_change_vtable.py similarity index 99% rename from test/cases/41-StreamProcessing/04-Options/test_meta_vtbl.py rename to test/cases/41-StreamProcessing/04-Options/test_meta_change_vtable.py index 54c9390fe054..510aed3ec445 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_meta_vtbl.py +++ b/test/cases/41-StreamProcessing/04-Options/test_meta_change_vtable.py @@ -2,14 +2,14 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamMetaTrigger: +class TestStreamMetaChangeVTable: precision = 'ms' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_meta_trigger(self): - """stream options + def test_stream_meta_change_vtable(self): + """Meta Change: virtual table test meta change (add/drop/modify) cases to stream for virtual table @@ -59,7 +59,7 @@ def __init__(self): self.vntbName = "vntb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.ntbName} (cts timestamp, cint int)") @@ -310,7 +310,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -584,7 +584,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int, tbigint bigint)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int, tbigint bigint) virtual 1") @@ -893,7 +893,7 @@ def __init__(self): self.vntbName = "vntb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int, tbigint bigint, tfloat float)") tdSql.execute(f"create table if not exists {self.db}.{self.ntbName} (cts timestamp, cint int, cbigint bigint, cfloat float)") @@ -1138,7 +1138,7 @@ def __init__(self): self.vntbName1 = "vntb1" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int, tbigint bigint, tfloat float)") tdSql.execute(f"create table if not exists {self.db}.{self.stbName1} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int, tbigint bigint, tfloat float)") @@ -1484,7 +1484,7 @@ def __init__(self): self.vntbName2 = "vntb2" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.ntbName} (cts timestamp, cint int, cbigint bigint, cfloat float)") @@ -1674,7 +1674,7 @@ def __init__(self): def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.ntbName} (cts timestamp, cint int, cbigint bigint, cfloat float)") @@ -1994,12 +1994,12 @@ def __init__(self): self.vntbName1 = "vntb1" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db1} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db2} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db3} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db4} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") - tdSql.execute(f"create database {self.db5} vgroups 1 buffer 8 precision '{TestStreamMetaTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") + tdSql.execute(f"create database {self.db1} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") + tdSql.execute(f"create database {self.db2} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") + tdSql.execute(f"create database {self.db3} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") + tdSql.execute(f"create database {self.db4} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") + tdSql.execute(f"create database {self.db5} vgroups 1 buffer 8 precision '{TestStreamMetaChangeVTable.precision}'") # db1 tdSql.execute(f"create table if not exists {self.db1}.{self.stbName} (cts timestamp, cint int, cbigint bigint, cfloat float) tags (tint int, tbigint bigint, tfloat float)") diff --git a/test/cases/41-StreamProcessing/04-Options/test_options_abnormal.py b/test/cases/41-StreamProcessing/04-Options/test_options_abnormal.py index c79d519194d0..0da7255fee1c 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_options_abnormal.py +++ b/test/cases/41-StreamProcessing/04-Options/test_options_abnormal.py @@ -2,14 +2,14 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamOptionsTrigger: +class TestStreamOptionsAbnormal: precision = 'ms' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_options_trigger(self): - """stream options + def test_stream_options_abnormal(self): + """Options: abnormal test abnormal cases to stream @@ -31,6 +31,7 @@ def test_stream_options_trigger(self): streams = [] streams.append(self.Basic0()) + streams.append(self.Basic1()) tdStream.checkAll(streams) @@ -44,7 +45,7 @@ def __init__(self): self.ntbName = "ntb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsAbnormal.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.stbName2} (cts timestamp, cint int, cdouble double, cvarchar varchar(16)) tags (tint int)") @@ -142,5 +143,104 @@ def create(self): def insert1(self): pass + def check1(self): + pass + + class Basic1(StreamCheckItem): + def __init__(self): + self.db = "sdb1" + self.stbName = "stb" + self.stbName2 = "stb2" + self.ntbName = "ntb" + + # vtable + self.vstbName = "vstb" + self.vstbName2 = "vstb2" + self.vntbName = "vntb" + + def create(self): + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsAbnormal.precision}'") + tdSql.execute(f"use {self.db}") + tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") + tdSql.execute(f"create table if not exists {self.stbName2} (cts timestamp, cint int, cdouble double, cvarchar varchar(16)) tags (tint int)") + tdSql.execute(f"create table if not exists {self.ntbName} (cts timestamp, cint int, cdouble double, cvarchar varchar(16))") + + tdSql.execute(f"create table if not exists {self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") + tdSql.execute(f"create table if not exists {self.vstbName2} (cts timestamp, cint int, cdouble double, cvarchar varchar(16)) tags (tint int) virtual 1") + tdSql.execute(f"create vtable if not exists {self.vntbName} (cts timestamp, cint int from {self.ntbName}.cint, cdouble double from {self.ntbName}.cdouble, cvarchar varchar(16) from {self.ntbName}.cvarchar)") + + tdSql.execute(f"create table ct1 using {self.stbName} tags(1)") + tdSql.execute(f"create table ct2 using {self.stbName} tags(2)") + tdSql.execute(f"create table ct101 using {self.stbName2} tags(1)") + tdSql.execute(f"create table ct102 using {self.stbName2} tags(2)") + + # vtables + tdSql.execute(f"create vtable vct1 (cint from {self.db}.ct1.cint) using {self.db}.{self.vstbName} tags(1)") + tdSql.execute(f"create vtable vct2 (cint from {self.db}.ct2.cint) using {self.db}.{self.vstbName} tags(2)") + tdSql.execute(f"create vtable vct101 (cint from ct101.cint, cdouble from ct101.cdouble, cvarchar from ct101.cvarchar) using {self.db}.{self.vstbName2} tags(1)") + tdSql.execute(f"create vtable vct102 (cint from ct102.cint, cdouble from ct102.cdouble, cvarchar from ct102.cvarchar) using {self.db}.{self.vstbName2} tags(2)") + + tdSql.error( + f"create stream sn0 state_window(cint) from vct1 stream_options(watermark(10s) | expired_time(5s)) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + tdSql.error( + f"create stream sn0_g state_window(cint) from {self.vstbName} partition by tbname, tint stream_options(watermark(10s) | expired_time(5s)) into res_stb OUTPUT_SUBTABLE(CONCAT('res_stb_', tbname)) (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.error( + f"create stream sn1 state_window(cint) from vct1 stream_options(watermark(0.5s)) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.error( + f"create stream sn1x state_window(cint) from vct1 stream_options(watermark(0.1d)) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.error( + f"create stream sn2 state_window(cint) from vct1 stream_options(fill_history(1733368671))) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.error( + f"create stream sn3 state_window(cint) from vct1 stream_options(fill_history | fill_history_first) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.error( + f"create stream sn4 period(10s) from vct1 stream_options(fill_history) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.error( + f"create stream sn5 period(10s) from vct1 stream_options(fill_history_first) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.error( + f"create stream sn6 state_window(cint) from vct1 stream_options(pre_filter(cdouble < 5)) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.execute( + f"create stream sn7 state_window(cint) from vntb stream_options(pre_filter(cint < 5 and cvarchar like '%abc%')) into res_ntb (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.error( + f"create stream sn8 state_window(cint) from {self.vntbName} into res_ntb (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows where cbigint > 1;" + ) + + tdSql.error( + f"create stream sn9 state_window(cint) from vntb options(pre_filter(cint < 5 and cvarchar like '%abc%')) into res_ntb (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + # %%trows must not use with WINDOW_OPEN in event_type + tdSql.error( + f"create stream sn10 state_window(cint) from vct1 stream_options(event_type(WINDOW_OPEN|WINDOW_CLOSE)) into res_ct1 (lastts, firstts, cnt_v, sum_v, avg_v) as select last_row(_c0), first(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + + tdSql.execute( + f"create stream sn11_g state_window(cint) from {self.vstbName} partition by tbname, tint stream_options(watermark(10s) | expired_time(500s)) into res_stb OUTPUT_SUBTABLE(CONCAT('res_stb_', tbname)) (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" + ) + tdSql.error( + f"alter table vct1 set tag tint = 999;" + ) + + def insert1(self): + pass + def check1(self): pass \ No newline at end of file diff --git a/test/cases/41-StreamProcessing/04-Options/test_options_abnormal_vtbl.py b/test/cases/41-StreamProcessing/04-Options/test_options_abnormal_vtbl.py deleted file mode 100644 index 75c6a1eba671..000000000000 --- a/test/cases/41-StreamProcessing/04-Options/test_options_abnormal_vtbl.py +++ /dev/null @@ -1,134 +0,0 @@ -import time -from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) - - -class TestStreamOptionsTrigger: - precision = 'ms' - - def setup_class(cls): - tdLog.debug(f"start to execute {__file__}") - - def test_stream_options_trigger(self): - """stream options - - test abnormal cases to stream for virtual table - - Catalog: - - Streams:UseCases - - Since: v3.3.3.7 - - Labels: common,ci - - Jira: None - - History: - - 2025-6-16 Lihui Created - - """ - - tdStream.createSnode() - - streams = [] - streams.append(self.Basic0()) - - tdStream.checkAll(streams) - - class Basic0(StreamCheckItem): - def __init__(self): - self.db = "sdb0" - self.stbName = "stb" - self.stbName2 = "stb2" - self.ntbName = "ntb" - - # vtable - self.vstbName = "vstb" - self.vstbName2 = "vstb2" - self.vntbName = "vntb" - - def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") - tdSql.execute(f"use {self.db}") - tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") - tdSql.execute(f"create table if not exists {self.stbName2} (cts timestamp, cint int, cdouble double, cvarchar varchar(16)) tags (tint int)") - tdSql.execute(f"create table if not exists {self.ntbName} (cts timestamp, cint int, cdouble double, cvarchar varchar(16))") - - tdSql.execute(f"create table if not exists {self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") - tdSql.execute(f"create table if not exists {self.vstbName2} (cts timestamp, cint int, cdouble double, cvarchar varchar(16)) tags (tint int) virtual 1") - tdSql.execute(f"create vtable if not exists {self.vntbName} (cts timestamp, cint int from {self.ntbName}.cint, cdouble double from {self.ntbName}.cdouble, cvarchar varchar(16) from {self.ntbName}.cvarchar)") - - tdSql.execute(f"create table ct1 using {self.stbName} tags(1)") - tdSql.execute(f"create table ct2 using {self.stbName} tags(2)") - tdSql.execute(f"create table ct101 using {self.stbName2} tags(1)") - tdSql.execute(f"create table ct102 using {self.stbName2} tags(2)") - - # vtables - tdSql.execute(f"create vtable vct1 (cint from {self.db}.ct1.cint) using {self.db}.{self.vstbName} tags(1)") - tdSql.execute(f"create vtable vct2 (cint from {self.db}.ct2.cint) using {self.db}.{self.vstbName} tags(2)") - tdSql.execute(f"create vtable vct101 (cint from ct101.cint, cdouble from ct101.cdouble, cvarchar from ct101.cvarchar) using {self.db}.{self.vstbName2} tags(1)") - tdSql.execute(f"create vtable vct102 (cint from ct102.cint, cdouble from ct102.cdouble, cvarchar from ct102.cvarchar) using {self.db}.{self.vstbName2} tags(2)") - - tdSql.error( - f"create stream sn0 state_window(cint) from vct1 stream_options(watermark(10s) | expired_time(5s)) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - tdSql.error( - f"create stream sn0_g state_window(cint) from {self.vstbName} partition by tbname, tint stream_options(watermark(10s) | expired_time(5s)) into res_stb OUTPUT_SUBTABLE(CONCAT('res_stb_', tbname)) (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.error( - f"create stream sn1 state_window(cint) from vct1 stream_options(watermark(0.5s)) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.error( - f"create stream sn1x state_window(cint) from vct1 stream_options(watermark(0.1d)) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.error( - f"create stream sn2 state_window(cint) from vct1 stream_options(fill_history(1733368671))) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.error( - f"create stream sn3 state_window(cint) from vct1 stream_options(fill_history | fill_history_first) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.error( - f"create stream sn4 period(10s) from vct1 stream_options(fill_history) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.error( - f"create stream sn5 period(10s) from vct1 stream_options(fill_history_first) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.error( - f"create stream sn6 state_window(cint) from vct1 stream_options(pre_filter(cdouble < 5)) into res_ct1 (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.execute( - f"create stream sn7 state_window(cint) from vntb stream_options(pre_filter(cint < 5 and cvarchar like '%abc%')) into res_ntb (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.error( - f"create stream sn8 state_window(cint) from {self.vntbName} into res_ntb (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows where cbigint > 1;" - ) - - tdSql.error( - f"create stream sn9 state_window(cint) from vntb options(pre_filter(cint < 5 and cvarchar like '%abc%')) into res_ntb (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - # %%trows must not use with WINDOW_OPEN in event_type - tdSql.error( - f"create stream sn10 state_window(cint) from vct1 stream_options(event_type(WINDOW_OPEN|WINDOW_CLOSE)) into res_ct1 (lastts, firstts, cnt_v, sum_v, avg_v) as select last_row(_c0), first(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - - tdSql.execute( - f"create stream sn11_g state_window(cint) from {self.vstbName} partition by tbname, tint stream_options(watermark(10s) | expired_time(500s)) into res_stb OUTPUT_SUBTABLE(CONCAT('res_stb_', tbname)) (firstts, lastts, cnt_v, sum_v, avg_v) as select first(_c0), last_row(_c0), count(cint), sum(cint), avg(cint) from %%trows;" - ) - tdSql.error( - f"alter table vct1 set tag tint = 999;" - ) - - def insert1(self): - pass - - def check1(self): - pass \ No newline at end of file diff --git a/test/cases/41-StreamProcessing/04-Options/test_options.py b/test/cases/41-StreamProcessing/04-Options/test_options_basic.py similarity index 99% rename from test/cases/41-StreamProcessing/04-Options/test_options.py rename to test/cases/41-StreamProcessing/04-Options/test_options_basic.py index d3a3367ddcb6..e3a20bad64f1 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_options.py +++ b/test/cases/41-StreamProcessing/04-Options/test_options_basic.py @@ -2,19 +2,19 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamOptionsTrigger: +class TestStreamOptionsBasic: precision = 'ms' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_options_trigger(self): - """stream options + def test_stream_options_basic(self): + """Options: basic test test options item of stream Catalog: - - Streams:UseCases + - Streams:Options Since: v3.3.3.7 @@ -63,7 +63,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -247,7 +247,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -447,7 +447,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -611,7 +611,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -804,7 +804,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") # tdSql.query(f"show stables") @@ -964,7 +964,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1190,7 +1190,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1417,7 +1417,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1524,7 +1524,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1663,7 +1663,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1827,7 +1827,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int, ctiny tinyint) tags (tint int)") tdSql.query(f"show stables") @@ -2017,7 +2017,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int, cuint INT UNSIGNED) tags (tint int)") tdSql.query(f"show stables") @@ -2425,7 +2425,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -2574,7 +2574,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -2719,7 +2719,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsBasic.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") diff --git a/test/cases/41-StreamProcessing/04-Options/test_options_ns.py b/test/cases/41-StreamProcessing/04-Options/test_options_ns.py index 80ea3dccbf13..71fa9af8c812 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_options_ns.py +++ b/test/cases/41-StreamProcessing/04-Options/test_options_ns.py @@ -2,19 +2,19 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamOptionsTrigger: +class TestStreamOptionsNs: precision = 'ns' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_options_trigger(self): - """stream options + def test_stream_options_ns(self): + """Options: precision ns test options item of stream to precision ns Catalog: - - Streams:UseCases + - Streams:Options Since: v3.3.3.7 @@ -61,7 +61,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -245,7 +245,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -445,7 +445,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -609,7 +609,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -802,7 +802,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") # tdSql.query(f"show stables") @@ -962,7 +962,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1188,7 +1188,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1415,7 +1415,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1522,7 +1522,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1661,7 +1661,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1825,7 +1825,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int, ctiny tinyint) tags (tint int)") tdSql.query(f"show stables") @@ -2015,7 +2015,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int, cuint INT UNSIGNED) tags (tint int)") tdSql.query(f"show stables") @@ -2424,7 +2424,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -2573,7 +2573,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -2718,7 +2718,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsNs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") diff --git a/test/cases/41-StreamProcessing/04-Options/test_options_us.py b/test/cases/41-StreamProcessing/04-Options/test_options_us.py index f507b73648b7..6638e6fabf03 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_options_us.py +++ b/test/cases/41-StreamProcessing/04-Options/test_options_us.py @@ -2,19 +2,19 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamOptionsTrigger: +class TestStreamOptionsUs: precision = 'us' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_options_trigger(self): - """stream options + def test_stream_options_us(self): + """Options: precision us test options item of stream Catalog: - - Streams:UseCases + - Streams:Options Since: v3.3.3.7 @@ -63,7 +63,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -247,7 +247,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -447,7 +447,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -611,7 +611,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -804,7 +804,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") # tdSql.query(f"show stables") @@ -964,7 +964,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1190,7 +1190,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1417,7 +1417,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1524,7 +1524,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1663,7 +1663,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -1827,7 +1827,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int, ctiny tinyint) tags (tint int)") tdSql.query(f"show stables") @@ -2017,7 +2017,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int, cuint INT UNSIGNED) tags (tint int)") tdSql.query(f"show stables") @@ -2426,7 +2426,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -2575,7 +2575,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") @@ -2720,7 +2720,7 @@ def __init__(self): self.stbName = "stb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsUs.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.query(f"show stables") diff --git a/test/cases/41-StreamProcessing/04-Options/test_options_vtbl.py b/test/cases/41-StreamProcessing/04-Options/test_options_vtable.py similarity index 99% rename from test/cases/41-StreamProcessing/04-Options/test_options_vtbl.py rename to test/cases/41-StreamProcessing/04-Options/test_options_vtable.py index 0e03306dfaa0..7e9d5a791ef3 100644 --- a/test/cases/41-StreamProcessing/04-Options/test_options_vtbl.py +++ b/test/cases/41-StreamProcessing/04-Options/test_options_vtable.py @@ -2,19 +2,19 @@ from new_test_framework.utils import (tdLog,tdSql,tdStream,StreamCheckItem,) -class TestStreamOptionsTrigger: +class TestStreamOptionsVtable: precision = 'ms' def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_stream_options_trigger(self): - """stream options + def test_stream_options_vtable(self): + """Options: virtual table test options item of stream to virtual table Catalog: - - Streams:UseCases + - Streams:Options Since: v3.3.3.7 @@ -62,7 +62,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -253,7 +253,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -462,7 +462,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -631,7 +631,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -828,7 +828,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -995,7 +995,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -1228,7 +1228,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -1462,7 +1462,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -1576,7 +1576,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -1722,7 +1722,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -1895,7 +1895,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int, ctiny tinyint) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int, ctiny tinyint) tags (tint int) virtual 1") @@ -2092,7 +2092,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int, cuint INT UNSIGNED) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int, cuint INT UNSIGNED) tags (tint int) virtual 1") @@ -2464,7 +2464,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.db}.{self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -2645,7 +2645,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") @@ -2797,7 +2797,7 @@ def __init__(self): self.vstbName = "vstb" def create(self): - tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsTrigger.precision}'") + tdSql.execute(f"create database {self.db} vgroups 1 buffer 8 precision '{TestStreamOptionsVtable.precision}'") tdSql.execute(f"use {self.db}") tdSql.execute(f"create table if not exists {self.stbName} (cts timestamp, cint int) tags (tint int)") tdSql.execute(f"create table if not exists {self.db}.{self.vstbName} (cts timestamp, cint int) tags (tint int) virtual 1") diff --git a/test/cases/41-StreamProcessing/05-Notify/test_notify.py b/test/cases/41-StreamProcessing/05-Notify/test_notify.py index c36ad084e81e..1d2d0be7d5f6 100644 --- a/test/cases/41-StreamProcessing/05-Notify/test_notify.py +++ b/test/cases/41-StreamProcessing/05-Notify/test_notify.py @@ -25,7 +25,7 @@ def test_stream_notify_trigger(self): Jira: None History: - - 2025-5-26 Guoxy Created + - 2025-5-26 HaojunLiao Created """ @@ -35,12 +35,12 @@ def test_stream_notify_trigger(self): streams = [] # streams.append(self.Basic1()) # OK - # streams.append(self.Basic2()) # OK + streams.append(self.Basic2()) # OK # streams.append(self.Basic3()) # failed - # streams.append(self.Basic4()) # OK - # streams.append(self.Basic5()) # OK + streams.append(self.Basic4()) # OK + streams.append(self.Basic5()) # OK # streams.append(self.Basic6()) # failed - # streams.append(self.Basic7()) # OK + streams.append(self.Basic7()) # OK # streams.append(self.Basic8()) # OK # streams.append(self.Basic9()) # OK # streams.append(self.Basic10()) # failed diff --git a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_bug_1.py b/test/cases/41-StreamProcessing/06-ResultSaved/result_saved_bug_1.py similarity index 99% rename from test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_bug_1.py rename to test/cases/41-StreamProcessing/06-ResultSaved/result_saved_bug_1.py index a09f9ce47ef7..36f8307ee758 100644 --- a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_bug_1.py +++ b/test/cases/41-StreamProcessing/06-ResultSaved/result_saved_bug_1.py @@ -69,7 +69,7 @@ def test_stream_result_saved_comprehensive(self): Since: v3.0.0.0 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_bug_2.py b/test/cases/41-StreamProcessing/06-ResultSaved/result_saved_bug_2.py similarity index 99% rename from test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_bug_2.py rename to test/cases/41-StreamProcessing/06-ResultSaved/result_saved_bug_2.py index 426d50de29cd..ac817baafcbe 100644 --- a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_bug_2.py +++ b/test/cases/41-StreamProcessing/06-ResultSaved/result_saved_bug_2.py @@ -72,7 +72,7 @@ def test_stream_result_saved_comprehensive(self): 5.1.4 Verify error handling for missing target table Catalog: - - Streams:ResultSaved + - Streams:ResultSaved,skip Since: v3.0.0.0 diff --git a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_comprehensive.py b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_comprehensive.py index cde0377aa871..c20f039890af 100644 --- a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_comprehensive.py +++ b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_comprehensive.py @@ -8,7 +8,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_result_saved_comprehensive(self): - """Stream Result Saved Comprehensive + """ResultSaved: comprehensive 1. Test [INTO [db_name.]table_name] 1.1 Test whether this option exists diff --git a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_datatype_precision.py b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_datatype_precision.py index 683b72ca089e..4c9a8b599ec7 100644 --- a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_datatype_precision.py +++ b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_datatype_precision.py @@ -8,7 +8,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_result_saved_datatype_precision(self): - """Stream Result Saved Datatype Precision Tests + """ResultSaved: datatype precision This test focuses on precise datatype validation and edge cases: diff --git a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_errors.py b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_errors.py index acee30cb2be0..10190bd4f5c1 100644 --- a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_errors.py +++ b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_errors.py @@ -8,7 +8,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_result_saved_errors(self): - """Stream Result Saved Error Cases + """ResultSaved: error cases This test covers error scenarios and boundary conditions for stream result saving: diff --git a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_primary_key.py b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_primary_key.py index b9715ce9e4b4..1b0c65c70ae1 100644 --- a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_primary_key.py +++ b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_primary_key.py @@ -8,7 +8,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_result_saved_primary_key(self): - """Stream Result Saved Primary Key Tests + """ResultSaved: primary key tests This test focuses specifically on PRIMARY KEY functionality in stream result saving: diff --git a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_schema_validation.py b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_schema_validation.py index 6a119625842d..ebd79149fa7b 100644 --- a/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_schema_validation.py +++ b/test/cases/41-StreamProcessing/06-ResultSaved/test_result_saved_schema_validation.py @@ -8,7 +8,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_result_saved_schema_validation(self): - """Stream Result Saved Schema Validation + """ResultSaved: schema validation This test focuses on precise validation of result table schemas: diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_10.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_bug_10.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_10.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_bug_10.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_11.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_bug_11.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_11.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_bug_11.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_12.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_bug_12.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_12.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_bug_12.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_5.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_bug_5.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_5.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_bug_5.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_6.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_bug_6.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_6.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_bug_6.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_7.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_bug_7.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_7.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_bug_7.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_8.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_bug_8.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_8.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_bug_8.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_9.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_bug_9.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_bug_9.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_bug_9.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_bug_1.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_expired_bug_1.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_bug_1.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_expired_bug_1.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_bug_2.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_expired_bug_2.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_bug_2.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_expired_bug_2.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_bug_3.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_expired_bug_3.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_bug_3.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_expired_bug_3.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_bug_4.py b/test/cases/41-StreamProcessing/08-Recalc/recalc_expired_bug_4.py similarity index 100% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_bug_4.py rename to test/cases/41-StreamProcessing/08-Recalc/recalc_expired_bug_4.py diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_combined_options.py b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_combined_options.py index d64a19d13689..6834b16ac91c 100644 --- a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_combined_options.py +++ b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_combined_options.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_recalc_combined_options(self): - """Stream Recalculation Combined Options Test + """Recalc: mixed options Test complex interactions between multiple stream recalculation options: diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_delete_recalc.py b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_delete_recalc.py index c57c0e5043d5..b169887e366b 100644 --- a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_delete_recalc.py +++ b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_delete_recalc.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_recalc_delete_recalc(self): - """Stream Recalculation DELETE_RECALC Option Test + """Recalc: DELETE_RECALC Test DELETE_RECALC option behavior with various data deletion scenarios: diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_time.py b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_time.py index 7587067eb20a..f67d5fc50701 100644 --- a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_time.py +++ b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_expired_time.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_recalc_expired_time(self): - """Stream Recalculation EXPIRED_TIME Option Test + """Recalc: EXPIRED_TIME Test EXPIRED_TIME(1h) option with 6 different window types and verify expired data handling: diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_ignore_disorder.py b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_ignore_disorder.py index f9f624f1ee15..ba37bec24e4e 100644 --- a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_ignore_disorder.py +++ b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_ignore_disorder.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_recalc_ignore_disorder(self): - """Stream Recalculation IGNORE_DISORDER Option Test + """Recalc: IGNORE_DISORDER Test IGNORE_DISORDER option behavior with six different window types to verify out-of-order data handling: diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual.py b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual_basic.py similarity index 99% rename from test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual.py rename to test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual_basic.py index 8a8f9dc3b74a..bf848b708a31 100644 --- a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual.py +++ b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual_basic.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_recalc_manual(self): - """Stream Manual Recalculation Test + """Recalc: Manual basic testing Test manual recalculation functionality for four different window types, verifying the recalculate stream command in various window scenarios: diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual_with_options.py b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual_with_options.py index 72b6053745b6..929b32aec675 100644 --- a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual_with_options.py +++ b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_manual_with_options.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_recalc_with_options(self): - """Stream Manual Recalculation with Options Test + """Recalc: Manual testing with options Test manual recalculation functionality combined with four different stream options: diff --git a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_watermark.py b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_watermark.py index 30262b14963d..b8997fe1e216 100644 --- a/test/cases/41-StreamProcessing/08-Recalc/test_recalc_watermark.py +++ b/test/cases/41-StreamProcessing/08-Recalc/test_recalc_watermark.py @@ -9,7 +9,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_recalc_watermark(self): - """Stream Recalculation WATERMARK Option Test + """Recalc: WATERMARK Test WATERMARK option behavior with six different window types and out-of-order data handling: diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_big_press.py b/test/cases/41-StreamProcessing/20-UseCase/big_press.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_big_press.py rename to test/cases/41-StreamProcessing/20-UseCase/big_press.py index adffc0723031..dccc7ae8a46a 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_big_press.py +++ b/test/cases/41-StreamProcessing/20-UseCase/big_press.py @@ -76,11 +76,11 @@ def prepare(self): tdSql.execute(f"create snode on dnode {i + 1}", show = True) # create meters db - etool.benchmark(f"-f cases/13-StreamProcessing/20-UseCase/json/idmp_meters.json") + etool.benchmark(f"-f cases/41-StreamProcessing/20-UseCase/json/idmp_meters.json") tdLog.info(f"import data to db: asset01 successfully.") # create vehicle db - etool.benchmark(f"-f cases/13-StreamProcessing/20-UseCase/json/idmp_vehicle.json") + etool.benchmark(f"-f cases/41-StreamProcessing/20-UseCase/json/idmp_vehicle.json") tdLog.info(f"import data to db: vehicle successfully.") # @@ -214,8 +214,8 @@ def startWriteJob(self): # meters nThreads = 1 jsons = [ - "cases/13-StreamProcessing/20-UseCase/json/exist_idmp_meters.json", - "cases/13-StreamProcessing/20-UseCase/json/exist_idmp_vehicle.json" + "cases/41-StreamProcessing/20-UseCase/json/exist_idmp_meters.json", + "cases/41-StreamProcessing/20-UseCase/json/exist_idmp_vehicle.json" ] for json in jsons: for i in range(nThreads): diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug1.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug1.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug1.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug1.py index fc75384d1ccc..1971d27d9eba 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug1.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug1.py @@ -75,7 +75,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug10.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug10.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug10.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug10.py index 5253cb6c2a1b..d7485364743f 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug10.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug10.py @@ -98,7 +98,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug11.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug11.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug11.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug11.py index d9659018b483..c049723578aa 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug11.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug11.py @@ -98,7 +98,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug2.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug2.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug2.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug2.py index 4d84eae9fbc7..f80df1c7fcd9 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug2.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug2.py @@ -75,7 +75,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug3.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug3.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug3.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug3.py index 9968a23d4686..9e05580d8854 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug3.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug3.py @@ -75,7 +75,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug4.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug4.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug4.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug4.py index 072f188ddcfa..3f6346c90dfd 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug4.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug4.py @@ -75,7 +75,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug5.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug5.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug5.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug5.py index c54dd660ca77..d2861f737a6c 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug5.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug5.py @@ -93,7 +93,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug6.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug6.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug6.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug6.py index 06e0c864978e..4bebcefae69e 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug6.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug6.py @@ -75,7 +75,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug7.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug7.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug7.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug7.py index df5de6bd8895..8636b1aa5b6b 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug7.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug7.py @@ -75,7 +75,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug9.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug9.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug9.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug9.py index a80079a7d707..b52a4a244c1f 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_bug9.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_meters_bug9.py @@ -75,7 +75,7 @@ def prepare(self): self.start2 = 1752574200000 # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/meters_data/data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/meters_data/data/") tdLog.info(f"import data to db={self.db} successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco_bug3.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_tobacco_bug3.py similarity index 73% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco_bug3.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_tobacco_bug3.py index 823f65dd33f0..841c3e1926ad 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco_bug3.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_tobacco_bug3.py @@ -26,10 +26,10 @@ def test_idmp_tobacco_bug3(self): "tobacco", "idmp_sample_tobacco", "idmp", - "cases/13-StreamProcessing/20-UseCase/tobacco_data/idmp_sample_tobacco", - "cases/13-StreamProcessing/20-UseCase/tobacco_data/idmp/vstb.sql", - "cases/13-StreamProcessing/20-UseCase/tobacco_data/idmp/vtb.sql", - "cases/13-StreamProcessing/20-UseCase/tobacco_data/idmp/stream.json", + "cases/41-StreamProcessing/20-UseCase/tobacco_data/idmp_sample_tobacco", + "cases/41-StreamProcessing/20-UseCase/tobacco_data/idmp/vstb.sql", + "cases/41-StreamProcessing/20-UseCase/tobacco_data/idmp/vtb.sql", + "cases/41-StreamProcessing/20-UseCase/tobacco_data/idmp/stream.json", ) tobac.stream_ids = [3] tobac.assert_retry = 60 diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug11.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug11.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug11.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug11.py index fdd01995aaa1..7f981e2e3dff 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug11.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug11.py @@ -93,7 +93,7 @@ def prepare(self): # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/vehicle_data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/vehicle_data/") tdLog.info(f"import data to db={self.db}. successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug12.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug12.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug12.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug12.py index c0767907fb24..4b2126a29435 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug12.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug12.py @@ -86,7 +86,7 @@ def prepare(self): # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/vehicle_data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/vehicle_data/") tdLog.info(f"import data to db={self.db}. successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug13.py b/test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug13.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug13.py rename to test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug13.py index c7c25ee83a1d..b7f2a9ce24ad 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle_bug13.py +++ b/test/cases/41-StreamProcessing/20-UseCase/idmp_vehicle_bug13.py @@ -93,7 +93,7 @@ def prepare(self): # import data - etool.taosdump(f"-i cases/13-StreamProcessing/20-UseCase/vehicle_data/") + etool.taosdump(f"-i cases/41-StreamProcessing/20-UseCase/vehicle_data/") tdLog.info(f"import data to db={self.db}. successfully.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_nevados_dbg.py b/test/cases/41-StreamProcessing/20-UseCase/nevados_dbg.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_nevados_dbg.py rename to test/cases/41-StreamProcessing/20-UseCase/nevados_dbg.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_period_long.py b/test/cases/41-StreamProcessing/20-UseCase/period_long.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_period_long.py rename to test/cases/41-StreamProcessing/20-UseCase/period_long.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_2.py b/test/cases/41-StreamProcessing/20-UseCase/sdny_2.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_sdny_2.py rename to test/cases/41-StreamProcessing/20-UseCase/sdny_2.py index 2939d73694fb..662c559ae62d 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_2.py +++ b/test/cases/41-StreamProcessing/20-UseCase/sdny_2.py @@ -255,8 +255,8 @@ def sdnydata(self): tdSql.execute(stbsql) tdSql.execute(tb1sql) tdSql.execute(tb2sql) - tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") - tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") tdLog.info("load csv file success.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug1.py b/test/cases/41-StreamProcessing/20-UseCase/sdny_bug1.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug1.py rename to test/cases/41-StreamProcessing/20-UseCase/sdny_bug1.py index 27d761c9eee5..5e11ce16bf00 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug1.py +++ b/test/cases/41-StreamProcessing/20-UseCase/sdny_bug1.py @@ -264,8 +264,8 @@ def sdnydata(self): tdSql.execute(stbsql) tdSql.execute(tb1sql) tdSql.execute(tb2sql) - tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") - tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") tdLog.info("load csv file success.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug2.py b/test/cases/41-StreamProcessing/20-UseCase/sdny_bug2.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug2.py rename to test/cases/41-StreamProcessing/20-UseCase/sdny_bug2.py index 8408f924c715..53b419446057 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug2.py +++ b/test/cases/41-StreamProcessing/20-UseCase/sdny_bug2.py @@ -264,8 +264,8 @@ def sdnydata(self): tdSql.execute(stbsql) tdSql.execute(tb1sql) tdSql.execute(tb2sql) - tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") - tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") tdLog.info("load csv file success.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug3.py b/test/cases/41-StreamProcessing/20-UseCase/sdny_bug3.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug3.py rename to test/cases/41-StreamProcessing/20-UseCase/sdny_bug3.py index 188fdd838f0d..e4c5870d0908 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_bug3.py +++ b/test/cases/41-StreamProcessing/20-UseCase/sdny_bug3.py @@ -260,8 +260,8 @@ def sdnydata(self): tdSql.execute(stbsql) tdSql.execute(tb1sql) tdSql.execute(tb2sql) - tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") - tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") tdLog.info("load csv file success.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_case1_bug1.py b/test/cases/41-StreamProcessing/20-UseCase/sdny_case1_bug1.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_sdny_case1_bug1.py rename to test/cases/41-StreamProcessing/20-UseCase/sdny_case1_bug1.py index 0c26dff3234d..a2cc5d8d77da 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_sdny_case1_bug1.py +++ b/test/cases/41-StreamProcessing/20-UseCase/sdny_case1_bug1.py @@ -263,8 +263,8 @@ def sdnydata(self): tdSql.execute(stbsql) tdSql.execute(tb1sql) tdSql.execute(tb2sql) - tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") - tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") tdLog.info("load csv file success.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters.py b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters.py index 1ddec844d3f4..066f16cd45b1 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_usecase_em(self): - """Nevados + """IDMP: meters scenario Refer: https://taosdata.feishu.cn/wiki/Zkb2wNkHDihARVkGHYEcbNhmnxb diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_td36808.py b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_public.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_td36808.py rename to test/cases/41-StreamProcessing/20-UseCase/test_idmp_public.py index e22f666e2112..355bc95a4e2d 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_meters_td36808.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_public.py @@ -10,7 +10,7 @@ class Test_IDMP_Meters_TD36808: def test_td36808(self): - """公共事业场景测试 + """IDMP: public utility scenario 尝试复现 IDMP 会产生 core 的场景,建 2 个流, STOP STREAM, START STREAM, DROP STREAM diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_pv.py b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_pv.py index eed122ed3d5c..15004c104838 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_pv.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_pv.py @@ -3,7 +3,7 @@ class TestIdmpPhotovoltaic: def test_pv(self): - """IDMP 光伏场景测试 + """IDMP: photovoltaic scenario Refer: https://taosdata.feishu.cn/wiki/Zkb2wNkHDihARVkGHYEcbNhmnxb#share-Ygqld907hoMESmx04GBcRlaVnZz 1. 测试 AI 推荐生成的分析, 创建 Stream, 验证流的正确性 diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco.py b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco.py index fb44801c80b5..cdfee7314bcc 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco.py @@ -8,7 +8,7 @@ class TestIdmpTobacco: def test_idmp_tobacco(self): - """IDMP 烟草场景测试 + """IDMP: tobacco scenario Refer: https://taosdata.feishu.cn/wiki/Zkb2wNkHDihARVkGHYEcbNhmnxb#share-I9GwdF26PoWk6uxx2zJcxZYrn1d 1. 测试 AI 推荐生成的分析,创建 Stream,验证流的正确性 diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle.py b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle.py index 9abfd29db25a..716ab1a76d01 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_usecase_em(self): - """Nevados + """IDMP: vehicle scenario Refer: https://taosdata.feishu.cn/wiki/Zkb2wNkHDihARVkGHYEcbNhmnxb diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_sdny.py b/test/cases/41-StreamProcessing/20-UseCase/test_sdny.py index 67d90109fbc3..52b32bd45b54 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_sdny.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_sdny.py @@ -31,7 +31,7 @@ def test_sdny_case1(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None @@ -264,8 +264,8 @@ def sdnydata(self): tdSql.execute(stbsql) tdSql.execute(tb1sql) tdSql.execute(tb2sql) - tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") - tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/13-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990001 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") + tdSql.execute(f"insert into {self.dbname}.e010720169990002 file 'cases/41-StreamProcessing/20-UseCase/e010720169990001.csv';") tdLog.info("load csv file success.") diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4.py b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4.py index b5073eb374fa..e66541d1333f 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4.py @@ -6,9 +6,12 @@ import os import subprocess import json +import random +import time +import datetime class Test_ThreeGorges: - caseName = "test_three_gorges_case4" + caseName = "test_three_gorges_case4_bug1" currentDir = os.path.dirname(os.path.abspath(__file__)) runAll = False dbname = "test1" @@ -27,7 +30,7 @@ class Test_ThreeGorges: def setup_class(cls): tdLog.debug(f"start to execute {__file__}") - def test_three_gorges_case4(self): + def test_three_gorges_case4_bug1(self): """test_three_gorges_case 1. create snode @@ -55,11 +58,38 @@ def test_three_gorges_case4(self): self.createSnodeTest() self.createStream() self.checkStreamRunning() - # tdSql.query(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test") - # if tdSql.getRows() == 0: - # raise Exception("ERROR:no result!") - tdSql.checkRowsLoop(5,f"select val,senid,senid_name from {self.dbname}.{self.outTbname} order by _c0;",100,1) + tdSql.checkResultsByFunc( + f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test", + lambda: tdSql.getRows() > 0 + ) + + # insert expired time data (expired_time:3d) + today = datetime.date.today() + yesterday = today - datetime.timedelta(days=5) + base_ts = int(time.mktime(datetime.datetime.combine(yesterday, datetime.time.min).timetuple())) * 1000 + tdLog.info(f"write a data from 5 days ago") + tdSql.execute(f"insert into {self.dbname}.a1 values({base_ts},998,998) ;") #先写入一条当前时间5天前数据 (比如 7.14 号) + tdLog.info(f"Write the time data for the next 2 days") + tdSql.execute(f"insert into {self.dbname}.a1 values({base_ts+86400000*7},999,999) ;") #写入未来2天时间数据(7.21号) + + time.sleep(3) + tdLog.info(f"Write the time data for the next 1 days") + tdSql.execute(f"insert into {self.dbname}.a1 values({base_ts+86400000*6},997,997) ;")#写入未来1天时间数据(7.20 号) + # time.sleep(5) + # #检查过去 5 天数据是否写入 + # tdSql.query(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test where _c0 <= today()-5d") + # tdLog.info(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test where _c0 <= today()-5d") + # if tdSql.getRows() != 1: + # raise Exception("ERROR: result is now right!") + + # #检查未来 1 天数据是否写入 + # tdSql.query(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test where _c0 >today()") + # tdLog.info(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test where _c0 >today()") + # if tdSql.getRows() != 1: + # raise Exception("ERROR: result is now right!") + tdSql.checkRowsLoop(6,f"select val,senid,senid_name from {self.dbname}.{self.outTbname} order by _c0;",200,1) self.checkResultWithResultFile() + def createStream(self): tdLog.info(f"create stream :") @@ -81,9 +111,7 @@ def createStream(self): tdLog.info(f"create stream success!") def sxny_data1(self): - import random - import time - import datetime + random.seed(42) tdSql.execute("create database test1 vgroups 6;") @@ -114,94 +142,13 @@ def sxny_data1(self): sql = "INSERT INTO test1.%s VALUES (%d,%d,%d)" % (tb, ts, c1,c2) tdSql.execute(sql) - def checkResultWithResultFile(self): chkSql = f"select val,senid,senid_name from {self.dbname}.{self.outTbname} order by _c0;" tdLog.info(f"check result with sql: {chkSql}") if tdSql.getRows() >0: tdCom.generate_query_result_file(self.caseName, self.resultIdx, chkSql) tdCom.compare_query_with_result_file(self.resultIdx, chkSql, f"{self.currentDir}/ans/{self.caseName}.{self.resultIdx}.csv", self.caseName) - tdLog.info("check result with result file succeed") - - def dataIn(self): - tdLog.info(f"insert more data:") - config = { - "filetype": "insert", - "cfgdir": "/etc/taos", - "host": "localhost", - "port": 6030, - "user": "root", - "password": "taosdata", - "thread_count": 16, - "thread_count_create_tbl": 8, - "result_file": "./insert.txt", - "confirm_parameter_prompt": "no", - "insert_interval": 0, - "num_of_records_per_req": 1000, - "max_sql_len": 1048576, - "databases": [{ - "dbinfo": { - "name": "test1", - "drop": "no", - "replica": 3, - "days": 10, - "precision": "ms", - "keep": 36500, - "minRows": 100, - "maxRows": 4096 - }, - "super_tables": [{ - "name": "stba", - "child_table_exists": "no", - "childtable_count": 3, - "childtable_prefix": "a", - "auto_create_table": "no", - "batch_create_tbl_num": 10, - "data_source": "rand", - "insert_mode": "taosc", - "insert_rows": 5000, - "childtable_limit": 100000000, - "childtable_offset": 0, - "interlace_rows": 0, - "insert_interval": 0, - "max_sql_len": 1048576, - "disorder_ratio": 0, - "disorder_range": 1000, - "timestamp_step": 30000, - "start_timestamp": "2025-01-01 00:00:00.000", - "sample_format": "", - "sample_file": "", - "tags_file": "", - "columns": [ - {"type": "timestamp","name":"cts","count": 1,"start":"2025-02-01 00:00:00.000"}, - {"type": "int","name":"cint","max":100,"min":-1}, - {"type": "int","name":"i1","max":100,"min":-1} - ], - "tags": [ - {"type": "int","name":"tint","max":100,"min":-1}, - {"type": "double","name":"tdouble","max":100,"min":0}, - {"type": "varchar","name":"tvar","len":100,"count": 1}, - {"type": "nchar","name":"tnchar","len":100,"count": 1}, - {"type": "timestamp","name":"tts"}, - {"type": "bool","name":"tbool"} - ] - } - - ] - } - ] - } - - with open('insert_config.json','w') as f: - json.dump(config,f,indent=4) - tdLog.info('config file ready') - cmd = f"taosBenchmark -f insert_config.json " - # output = subprocess.check_output(cmd, shell=True).decode().strip() - ret = os.system(cmd) - if ret != 0: - raise Exception("taosBenchmark run failed") - time.sleep(5) - tdLog.info(f"Insert data:taosBenchmark -f insert_config.json") + tdLog.info("check result with result file succeed") def checkResultRows(self, expectedRows): diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4_bug1.py b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4_bug1.py deleted file mode 100644 index e66541d1333f..000000000000 --- a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4_bug1.py +++ /dev/null @@ -1,218 +0,0 @@ -import time -import math -import random -from new_test_framework.utils import tdLog, tdSql, tdStream, streamUtil,StreamTableType, StreamTable, cluster,tdCom -from random import randint -import os -import subprocess -import json -import random -import time -import datetime - -class Test_ThreeGorges: - caseName = "test_three_gorges_case4_bug1" - currentDir = os.path.dirname(os.path.abspath(__file__)) - runAll = False - dbname = "test1" - stbname= "stba" - stName = "" - resultIdx = "" - sliding = 1 - subTblNum = 3 - tblRowNum = 10 - tableList = [] - outTbname = "str_cjdl_point_data_szls_jk_test" - streamName = "str_cjdl_point_data_szls_jk_test" - tableList = [] - resultIdx = "1" - - def setup_class(cls): - tdLog.debug(f"start to execute {__file__}") - - def test_three_gorges_case4_bug1(self): - """test_three_gorges_case - - 1. create snode - 2. create stream - - - Catalog: - - Streams:str_cjdl_point_data_szls_jk_test - - Since: v3.3.3.7 - - Labels: common,ci - - Jira: None - - History: - - 2025-7-18 lvze Created - - """ - - - tdStream.dropAllStreamsAndDbs() - - self.sxny_data1() - self.createSnodeTest() - self.createStream() - self.checkStreamRunning() - tdSql.checkResultsByFunc( - f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test", - lambda: tdSql.getRows() > 0 - ) - - # insert expired time data (expired_time:3d) - today = datetime.date.today() - yesterday = today - datetime.timedelta(days=5) - base_ts = int(time.mktime(datetime.datetime.combine(yesterday, datetime.time.min).timetuple())) * 1000 - tdLog.info(f"write a data from 5 days ago") - tdSql.execute(f"insert into {self.dbname}.a1 values({base_ts},998,998) ;") #先写入一条当前时间5天前数据 (比如 7.14 号) - tdLog.info(f"Write the time data for the next 2 days") - tdSql.execute(f"insert into {self.dbname}.a1 values({base_ts+86400000*7},999,999) ;") #写入未来2天时间数据(7.21号) - - time.sleep(3) - tdLog.info(f"Write the time data for the next 1 days") - tdSql.execute(f"insert into {self.dbname}.a1 values({base_ts+86400000*6},997,997) ;")#写入未来1天时间数据(7.20 号) - # time.sleep(5) - # #检查过去 5 天数据是否写入 - # tdSql.query(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test where _c0 <= today()-5d") - # tdLog.info(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test where _c0 <= today()-5d") - # if tdSql.getRows() != 1: - # raise Exception("ERROR: result is now right!") - - # #检查未来 1 天数据是否写入 - # tdSql.query(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test where _c0 >today()") - # tdLog.info(f"select * from {self.dbname}.str_cjdl_point_data_szls_jk_test where _c0 >today()") - # if tdSql.getRows() != 1: - # raise Exception("ERROR: result is now right!") - tdSql.checkRowsLoop(6,f"select val,senid,senid_name from {self.dbname}.{self.outTbname} order by _c0;",200,1) - self.checkResultWithResultFile() - - - def createStream(self): - tdLog.info(f"create stream :") - stream = ( - f"""create stream {self.dbname}.str_cjdl_point_data_szls_jk_test interval(1m) sliding(1m) from {self.dbname}.stb_cjdl_point_data - partition by tbname,senid,senid_name - stream_options(expired_time(3d)|low_latency_calc|fill_history|pre_filter(tag_temp='A001')) - into {self.dbname}.str_cjdl_point_data_szls_jk_test - output_subtable(concat_ws('_','cjdl_point_data_szls_jk_test',senid)) - tags(`tbname` varchar(255) as tbname,senid varchar(255) as senid,senid_name varchar(255) as senid_name) - as select - last(ts) as ts, - last(val) as val - from - %%trows t1 ; - """ - ) - tdSql.execute(stream,queryTimes=2) - tdLog.info(f"create stream success!") - - def sxny_data1(self): - - - random.seed(42) - tdSql.execute("create database test1 vgroups 6;") - tdSql.execute(f"use {self.dbname}") - tdSql.execute("""CREATE STABLE `stb_cjdl_point_data` (`ts` TIMESTAMP , `st` DOUBLE , `val` DOUBLE ) - TAGS (`id` VARCHAR(20), `senid` VARCHAR(255), `senid_name` VARCHAR(255), `tag_temp` VARCHAR(255)) ; - )""") - - tdSql.execute("CREATE TABLE test1.`a0` USING test1.`stb_cjdl_point_data` TAGS ('a0','sendid_a0','name_a0','A000')") - tdSql.execute("CREATE TABLE test1.`a1` USING test1.`stb_cjdl_point_data` TAGS ('a1','sendid_a1','name_a1','A001')") - tdSql.execute("CREATE TABLE test1.`a2` USING test1.`stb_cjdl_point_data` TAGS ('a2','sendid_a2','name_a2','A002')") - - tables = ['a0', 'a1', 'a2'] - - - today = datetime.date.today() - yesterday = today - datetime.timedelta(days=1) - base_ts = int(time.mktime(datetime.datetime.combine(yesterday, datetime.time.min).timetuple())) * 1000 - - interval_ms = 30 * 1000 - total_rows = 10 - - for i in range(total_rows): - ts = base_ts + i * interval_ms - c1 = random.randint(0, 1000) - c2 = random.randint(0, 1000) - for tb in tables: - sql = "INSERT INTO test1.%s VALUES (%d,%d,%d)" % (tb, ts, c1,c2) - tdSql.execute(sql) - - def checkResultWithResultFile(self): - chkSql = f"select val,senid,senid_name from {self.dbname}.{self.outTbname} order by _c0;" - tdLog.info(f"check result with sql: {chkSql}") - if tdSql.getRows() >0: - tdCom.generate_query_result_file(self.caseName, self.resultIdx, chkSql) - tdCom.compare_query_with_result_file(self.resultIdx, chkSql, f"{self.currentDir}/ans/{self.caseName}.{self.resultIdx}.csv", self.caseName) - tdLog.info("check result with result file succeed") - - - def checkResultRows(self, expectedRows): - tdSql.checkResultsByFunc( - f"select * from information_schema.ins_snodes order by id;", - lambda: tdSql.getRows() == expectedRows, - delay=0.5, retry=2 - ) - - - def get_pid_by_cmdline(self,pattern): - try: - cmd = "unset LD_PRELOAD;ps -eo pid,cmd | grep '{}' | grep -v grep | grep -v SCREEN".format(pattern) - output = subprocess.check_output(cmd, shell=True).decode().strip() - # 可多行,默认取第一行 - lines = output.split('\n') - if lines: - pid = int(lines[0].strip().split()[0]) - return pid - except subprocess.CalledProcessError: - return None - - - def createSnodeTest(self): - tdLog.info(f"create snode test") - tdSql.query("select * from information_schema.ins_dnodes order by id;") - numOfNodes=tdSql.getRows() - tdLog.info(f"numOfNodes: {numOfNodes}") - - for i in range(1, numOfNodes + 1): - tdSql.execute(f"create snode on dnode {i}") - tdLog.info(f"create snode on dnode {i} success") - self.checkResultRows(numOfNodes) - - tdSql.checkResultsByFunc( - f"show snodes;", - lambda: tdSql.getRows() == numOfNodes, - delay=0.5, retry=2 - ) - - - - - - def checkStreamRunning(self): - tdLog.info(f"check stream running status:") - - timeout = 60 - start_time = time.time() - - while True: - if time.time() - start_time > timeout: - tdLog.error("Timeout waiting for all streams to be running.") - tdLog.error(f"Final stream running status: {streamRunning}") - raise TimeoutError(f"Stream status did not reach 'Running' within {timeout}s timeout.") - - tdSql.query(f"select status from information_schema.ins_streams order by stream_name;") - streamRunning=tdSql.getColData(0) - - if all(status == "Running" for status in streamRunning): - tdLog.info("All Stream running!") - tdLog.info(f"stream running status: {streamRunning}") - return - else: - tdLog.info("Stream not running! Wait stream running ...") - tdLog.info(f"stream running status: {streamRunning}") - time.sleep(1) diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1_bug1.py b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1_bug1.py rename to test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case12.py b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case12.py index 2892cf5ad164..1a09a49e5589 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case12.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case12.py @@ -42,7 +42,7 @@ def test_three_gorges_second_case12(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1_twostream.py b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case2.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1_twostream.py rename to test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case2.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case26.py b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case26.py index 76bc4b8c7603..e4c9c4868bbb 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case26.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case26.py @@ -42,7 +42,7 @@ def test_three_gorges_second_case26(self): Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci,skip Jira: None diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_ts-7152.py b/test/cases/41-StreamProcessing/20-UseCase/test_yuxi_TS_7152.py similarity index 99% rename from test/cases/41-StreamProcessing/20-UseCase/test_idmp_ts-7152.py rename to test/cases/41-StreamProcessing/20-UseCase/test_yuxi_TS_7152.py index c93c82c1d131..655357a9a3ce 100644 --- a/test/cases/41-StreamProcessing/20-UseCase/test_idmp_ts-7152.py +++ b/test/cases/41-StreamProcessing/20-UseCase/test_yuxi_TS_7152.py @@ -12,7 +12,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_usecase_em(self): - """yuxi + """YuXi: TS-7142 Refer: https://taosdata.feishu.cn/wiki/G8mSwPK20iLpPrk9MmOc9g95nLe diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case1.py b/test/cases/41-StreamProcessing/20-UseCase/three_gorges_case1.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case1.py rename to test/cases/41-StreamProcessing/20-UseCase/three_gorges_case1.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case2.py b/test/cases/41-StreamProcessing/20-UseCase/three_gorges_case2.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case2.py rename to test/cases/41-StreamProcessing/20-UseCase/three_gorges_case2.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case3.py b/test/cases/41-StreamProcessing/20-UseCase/three_gorges_case3.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_case3.py rename to test/cases/41-StreamProcessing/20-UseCase/three_gorges_case3.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_phase1_bug1.py b/test/cases/41-StreamProcessing/20-UseCase/three_gorges_phase1_bug1.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_phase1_bug1.py rename to test/cases/41-StreamProcessing/20-UseCase/three_gorges_phase1_bug1.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_phase2.py b/test/cases/41-StreamProcessing/20-UseCase/three_gorges_phase2.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_phase2.py rename to test/cases/41-StreamProcessing/20-UseCase/three_gorges_phase2.py diff --git a/test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_phase3.py b/test/cases/41-StreamProcessing/20-UseCase/three_gorges_phase3.py similarity index 100% rename from test/cases/41-StreamProcessing/20-UseCase/test_three_gorges_phase3.py rename to test/cases/41-StreamProcessing/20-UseCase/three_gorges_phase3.py diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility.py b/test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_backward_forward.py similarity index 99% rename from test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility.py rename to test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_backward_forward.py index 964847d9a8b3..f811100e60f6 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility.py +++ b/test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_backward_forward.py @@ -17,7 +17,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_compatibility(self): - """Stream Processing Backward and Forward Compatibility Test + """Comp: Stream Backward and Forward Test compatibility across 5 baseline versions with stream processing validation: diff --git a/test/cases/41-StreamProcessing/23-Compatibility/stream_compatibility.py b/test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_cross_version.py similarity index 99% rename from test/cases/41-StreamProcessing/23-Compatibility/stream_compatibility.py rename to test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_cross_version.py index a7260a6e7c10..2f277abc812c 100644 --- a/test/cases/41-StreamProcessing/23-Compatibility/stream_compatibility.py +++ b/test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_cross_version.py @@ -47,7 +47,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_compatibility(self): - """Stream Processing Cross-Version Compatibility Test + """Comp: Stream Cross-Version Test stream processing and TSMA compatibility across 4 base versions with actual stream/TSMA creation and verification: diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility_rolling_upgrade.py b/test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_rolling_upgrade.py similarity index 99% rename from test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility_rolling_upgrade.py rename to test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_rolling_upgrade.py index 55d54c774d9a..5e4bb356897b 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility_rolling_upgrade.py +++ b/test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_rolling_upgrade.py @@ -17,7 +17,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_compatibility_rolling_upgrade(self): - """TDengine Rolling Upgrade Compatibility Test + """Comp: Rolling Upgrade Test incremental rolling upgrade of individual nodes with stream processing validation: diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility_rolling_upgrade_all.py b/test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_rolling_upgrade_all.py similarity index 99% rename from test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility_rolling_upgrade_all.py rename to test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_rolling_upgrade_all.py index 8af1c3479ccb..468a05a54376 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_compatibility_rolling_upgrade_all.py +++ b/test/cases/41-StreamProcessing/23-Compatibility/test_compatibility_rolling_upgrade_all.py @@ -17,7 +17,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_compatibility_rolling_upgrade_all(self): - """TDengine Rolling Upgrade All Dnodes Compatibility Test + """Comp: Rolling Upgrade All Dnodes Test rolling upgrade of all cluster nodes simultaneously with stream processing validation: diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_at_once.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_at_once.py index 20bfbbbfedfd..2499e7e2302c 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_at_once.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_at_once.py @@ -8,12 +8,12 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_others_oldcase_atonce(self): - """at once + """OldPy: at once test replace the at once in old cases with the count(1) window function Catalog: - - Streams:UseCases + - Streams:OldPyCases Since: v3.3.3.7 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_backquote_check.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_backquote_check.py index 15253f10e8d0..7bf6f0daffae 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_backquote_check.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_backquote_check.py @@ -1,4 +1,4 @@ -# pytest --clean --skip_stop cases/13-StreamProcessing/31-OldTsimCases/test_oldcase_backquote_check.py +# pytest --clean --skip_stop cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_backquote_check.py import time from datetime import datetime @@ -25,12 +25,12 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_others_oldcase_backquote_check(self): - """back quote + """OldPy: back quote test back quote check Catalog: - - Streams:UseCases + - Streams:OldPyCases Since: v3.3.3.7 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_checkpoint_info.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_checkpoint_info.py index ebf872155859..190aa255e721 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_checkpoint_info.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_checkpoint_info.py @@ -42,7 +42,7 @@ def setup_class(cls): def test_checkpoint_info(self): - """Stream basic test 1 + """OldPy: checkpoint 1. create snode 2. create stream and restart stream @@ -56,7 +56,7 @@ def test_checkpoint_info(self): 10. check checkpoint file Catalog: - - Streams:OldStreamCases + - Streams:OldPyCases Since: v3.0.0.0 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_drop.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_drop.py similarity index 99% rename from test/cases/41-StreamProcessing/30-OldPyCases/test_drop.py rename to test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_drop.py index 1ac4cbf90feb..1733f1dce88c 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_drop.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_drop.py @@ -64,7 +64,7 @@ def test_stream_drop(self): 4.2.4 Validate connection stability after errors Catalog: - - Streams:Operations:Drop + - Streams:OldPyCases Since: v3.3.7.0 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_empty_identifier.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_empty_identifier.py similarity index 99% rename from test/cases/41-StreamProcessing/30-OldPyCases/test_empty_identifier.py rename to test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_empty_identifier.py index 851601ff76e4..8a21babb3063 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_empty_identifier.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_empty_identifier.py @@ -76,7 +76,7 @@ def test_empty_identifier(self): 8.4 Validate connection stability after errors Catalog: - - SQL:SyntaxValidation:EmptyIdentifier + - Streams:OldPyCases Since: v3.3.7.0 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_forcewindowclose.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_forcewindowclose.py index 5ccb894c2f97..e86137732948 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_forcewindowclose.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_forcewindowclose.py @@ -11,38 +11,32 @@ def setup_class(cls): cls.tdCom = tdCom def get_source_firt_ts(self, table_name1): - tdSql.query( - f'select cast(first(ts) as bigint) from {table_name1} order by 1' - ) + tdSql.query(f"select cast(first(ts) as bigint) from {table_name1} order by 1") # getData don't support negative index res_ts = tdSql.getData(0, 0) return res_ts def get_source_last_ts(self, table_name1): - tdSql.query( - f'select cast(last(ts) as bigint) from {table_name1} order by 1' - ) + tdSql.query(f"select cast(last(ts) as bigint) from {table_name1} order by 1") # getData don't support negative index res_ts = tdSql.getData(0, 0) return res_ts - + def get_stream_first_win_ts(self, table_name1): tdSql.query( f'select _wstart, count(*) from {table_name1} interval({self.tdCom.dataDict["interval"]}s) order by 1' ) res_ts = tdSql.getData(0, 0) return res_ts - + def insert_data(self, custom_col_index, col_value_type): self.tdCom.date_time = self.tdCom.genTs(precision=self.tdCom.precision)[0] time.sleep(1) - + min_new_ts = 0 for i in range(self.tdCom.range_count): cur_time = str(self.tdCom.date_time + self.tdCom.dataDict["interval"]) - ts_value = ( - cur_time + f"+{i * 5 + 30}s" - ) + ts_value = cur_time + f"+{i * 5 + 30}s" if min_new_ts == 0: min_new_ts = ts_value @@ -84,18 +78,18 @@ def insert_data(self, custom_col_index, col_value_type): ) self.tdCom.date_time += 1 cur_time = str(self.tdCom.date_time + self.tdCom.dataDict["interval"]) - max_new_ts = (cur_time + f"+{self.tdCom.range_count * 10 + 30}s") + max_new_ts = cur_time + f"+{self.tdCom.range_count * 10 + 30}s" self.tdCom.sinsert_rows( - tbname=self.tdCom.ctb_name, - ts_value=max_new_ts, - custom_col_index=custom_col_index, - col_value_type=col_value_type, + tbname=self.tdCom.ctb_name, + ts_value=max_new_ts, + custom_col_index=custom_col_index, + col_value_type=col_value_type, ) self.tdCom.sinsert_rows( - tbname=self.tdCom.tb_name, - ts_value=max_new_ts, - custom_col_index=custom_col_index, - col_value_type=col_value_type, + tbname=self.tdCom.tb_name, + ts_value=max_new_ts, + custom_col_index=custom_col_index, + col_value_type=col_value_type, ) return (min_new_ts, max_new_ts) @@ -105,43 +99,62 @@ def insert_disorder_data(self, custom_col_index, col_value_type): min_ts_str = str(min_ts) + f"-10000s" max_ts_str = str(max_ts) + f"+10000s" self.tdCom.sinsert_rows( - tbname=self.tdCom.ctb_name, - ts_value=min_ts_str, - custom_col_index=custom_col_index, - col_value_type=col_value_type, + tbname=self.tdCom.ctb_name, + ts_value=min_ts_str, + custom_col_index=custom_col_index, + col_value_type=col_value_type, ) self.tdCom.sinsert_rows( - tbname=self.tdCom.tb_name, - ts_value=min_ts_str, - custom_col_index=custom_col_index, - col_value_type=col_value_type, + tbname=self.tdCom.tb_name, + ts_value=min_ts_str, + custom_col_index=custom_col_index, + col_value_type=col_value_type, ) self.tdCom.sinsert_rows( - tbname=self.tdCom.ctb_name, - ts_value=max_ts_str, - custom_col_index=custom_col_index, - col_value_type=col_value_type, + tbname=self.tdCom.ctb_name, + ts_value=max_ts_str, + custom_col_index=custom_col_index, + col_value_type=col_value_type, ) self.tdCom.sinsert_rows( - tbname=self.tdCom.tb_name, - ts_value=max_ts_str, - custom_col_index=custom_col_index, - col_value_type=col_value_type, + tbname=self.tdCom.tb_name, + ts_value=max_ts_str, + custom_col_index=custom_col_index, + col_value_type=col_value_type, ) - def do_exec(self, interval, partition="tbname", delete=False, fill_value=None, filter=None): + def do_exec( + self, interval, partition="tbname", delete=False, fill_value=None, filter=None + ): # partition must be tbname, and not NONE. tdLog.info( f"*** testing stream do_exec + interval + fill. partition: {partition}, interval: {interval}, fill: {fill_value}, delete: {delete} ***" ) - fwc_downsampling_function_list = ["min(c1)", "max(c2)", "sum(c3)", "twa(c7)", "count(c8)", "elapsed(ts)", "timediff(1, 0, 1h)", "timezone()","min(t1)", "max(t2)", "sum(t3)", - "twa(t7)", "count(t8)"] - - fwc_stb_output_select_str = ','.join(list(map(lambda x:f'`{x}`', fwc_downsampling_function_list))) - fwc_tb_output_select_str = ','.join(list(map(lambda x:f'`{x}`', fwc_downsampling_function_list[0:7]))) - fwc_stb_source_select_str = ','.join(fwc_downsampling_function_list) - fwc_tb_source_select_str = ','.join(fwc_downsampling_function_list[0:7]) + fwc_downsampling_function_list = [ + "min(c1)", + "max(c2)", + "sum(c3)", + "twa(c7)", + "count(c8)", + "elapsed(ts)", + "timediff(1, 0, 1h)", + "timezone()", + "min(t1)", + "max(t2)", + "sum(t3)", + "twa(t7)", + "count(t8)", + ] + + fwc_stb_output_select_str = ",".join( + list(map(lambda x: f"`{x}`", fwc_downsampling_function_list)) + ) + fwc_tb_output_select_str = ",".join( + list(map(lambda x: f"`{x}`", fwc_downsampling_function_list[0:7])) + ) + fwc_stb_source_select_str = ",".join(fwc_downsampling_function_list) + fwc_tb_source_select_str = ",".join(fwc_downsampling_function_list[0:7]) self.tdCom.subtable = False col_value_type = "Incremental" if partition == "c1" else "random" @@ -163,29 +176,29 @@ def do_exec(self, interval, partition="tbname", delete=False, fill_value=None, f self.ctb_stream_des_table = f"{self.ctb_name}{self.tdCom.des_table_suffix}" self.tb_stream_des_table = f"{self.tb_name}{self.tdCom.des_table_suffix}" - + if partition: partition_elm = f"partition by {partition}" else: partition_elm = "" - + query_partition_elm = partition_elm if fill_value: if "value" in fill_value.lower(): - stb_fill_value='VALUE,1,2,3,4,5,6,1,2,3,4,5' - tb_fill_value='VALUE,1,2,3,4,5,6' + stb_fill_value = "VALUE,1,2,3,4,5,6,1,2,3,4,5" + tb_fill_value = "VALUE,1,2,3,4,5,6" else: - stb_fill_value=fill_value - tb_fill_value=fill_value + stb_fill_value = fill_value + tb_fill_value = fill_value query_stb_fill_elm = f"fill({stb_fill_value})" query_tb_fill_elm = f"fill({tb_fill_value})" else: query_stb_fill_elm = "" query_tb_fill_elm = "" stb_fill_value = None - tb_fill_value=None - + tb_fill_value = None + where_elm = "where 1=1" if filter: where_elm = f" and {filter}" @@ -206,35 +219,39 @@ def do_exec(self, interval, partition="tbname", delete=False, fill_value=None, f # ) self.tdCom.create_stream( - stream_name=f'{self.stb_name}{self.tdCom.stream_suffix}', + stream_name=f"{self.stb_name}{self.tdCom.stream_suffix}", des_table=self.stb_stream_des_table, - source_sql=f'select _twstart AS wstart, {fwc_stb_source_select_str} from ' - f'{self.stb_name} {where_elm} and _c0 >= _twstart and _c0 < _twend {partition_elm} ', - trigger_table=f'{self.stb_name}', + source_sql=f"select _twstart AS wstart, {fwc_stb_source_select_str} from " + f"{self.stb_name} {where_elm} and _c0 >= _twstart and _c0 < _twend {partition_elm} ", + trigger_table=f"{self.stb_name}", trigger_type=f'interval({self.tdCom.dataDict["interval"]}s) sliding({self.tdCom.dataDict["interval"]}s)', - partition_by="tbname" + partition_by="tbname", ) # and _c0 <= _tlocaltime/1000000 and _c0 >= _tprev_localtime/1000000 self.tdCom.create_stream( stream_name=f"{self.tb_name}{self.tdCom.stream_suffix}", des_table=self.tb_stream_des_table, - source_sql=f'select cast(_tlocaltime/1000000 as timestamp) AS wstart, {fwc_tb_source_select_str} from ' - f'{self.tb_name} {where_elm} {partition_elm} interval({self.tdCom.dataDict["interval"]}s)', + source_sql=f"select cast(_tlocaltime/1000000 as timestamp) AS wstart, {fwc_tb_source_select_str} from " + f'{self.tb_name} {where_elm} {partition_elm} interval({self.tdCom.dataDict["interval"]}s)', # fill_value=tb_fill_value, # fill_history_value=fill_history_value, - trigger_type = f'period({self.tdCom.dataDict["interval"]}s)' + trigger_type=f'period({self.tdCom.dataDict["interval"]}s)', ) # wait and check stream_task status is ready tdSql.query("show streams") - tdLog.info(f"tdSql.queryResult:{tdSql.queryResult},tdSql.queryRows:{tdSql.queryRows}") + tdLog.info( + f"tdSql.queryResult:{tdSql.queryResult},tdSql.queryRows:{tdSql.queryRows}" + ) time.sleep(10) localQueryResult = tdSql.queryResult while True: - tdSql.query(f"select status from information_schema.ins_streams where stream_name='{localQueryResult[0][0]}'") + tdSql.query( + f"select status from information_schema.ins_streams where stream_name='{localQueryResult[0][0]}'" + ) if tdSql.getData(0, 0) != "Running": print("stream not running, waiting....") time.sleep(10) @@ -242,7 +259,9 @@ def do_exec(self, interval, partition="tbname", delete=False, fill_value=None, f break while True: - tdSql.query(f"select status from information_schema.ins_streams where stream_name='{localQueryResult[1][0]}'") + tdSql.query( + f"select status from information_schema.ins_streams where stream_name='{localQueryResult[1][0]}'" + ) if tdSql.getData(0, 0) != "Running": print("stream not running, waiting....") time.sleep(10) @@ -265,19 +284,19 @@ def do_exec(self, interval, partition="tbname", delete=False, fill_value=None, f tdLog.info("insert data") temp, end_new_ts = self.insert_data(custom_col_index, col_value_type) - #history and future + # history and future self.insert_disorder_data(custom_col_index, col_value_type) time.sleep(self.tdCom.dataDict["interval"] * 6 * 2) tdLog.info("check data") # check the data - where_elm = f'{where_elm} and _c0 >= {start_new_ts} and _c0 <= {end_new_ts}' + where_elm = f"{where_elm} and _c0 >= {start_new_ts} and _c0 <= {end_new_ts}" for tbname in [self.stb_name, self.tb_name]: if fill_value: query_first_win_ts = self.get_stream_first_win_ts(tbname) query_where_elm = f'where wstart >= "{query_first_win_ts}"' - stream_where_elm = f'where wstart <= {end_new_ts}' + stream_where_elm = f"where wstart <= {end_new_ts}" else: query_where_elm = "" stream_where_elm = "" @@ -285,52 +304,57 @@ def do_exec(self, interval, partition="tbname", delete=False, fill_value=None, f # check data tdLog.info(f"check data for table {tbname}") if tbname == self.stb_name: - sql = f'select * from (select _wstart AS wstart, {fwc_stb_source_select_str} from {tbname} {where_elm} {query_partition_elm} interval({self.tdCom.dataDict["interval"]}s) {query_stb_fill_elm} order by wstart) {query_where_elm}', - print('--------------', sql) + sql = ( + f'select * from (select _wstart AS wstart, {fwc_stb_source_select_str} from {tbname} {where_elm} {query_partition_elm} interval({self.tdCom.dataDict["interval"]}s) {query_stb_fill_elm} order by wstart) {query_where_elm}', + ) + print("--------------", sql) - sql1 = f'select wstart, {fwc_stb_output_select_str} from {tbname}{self.tdCom.des_table_suffix} {stream_where_elm} order by wstart', - print('================', sql1) + sql1 = ( + f"select wstart, {fwc_stb_output_select_str} from {tbname}{self.tdCom.des_table_suffix} {stream_where_elm} order by wstart", + ) + print("================", sql1) self.tdCom.check_query_data( - f'select wstart, {fwc_stb_output_select_str} from {tbname}{self.tdCom.des_table_suffix} {where_elm} order by wstart', + f"select wstart, {fwc_stb_output_select_str} from {tbname}{self.tdCom.des_table_suffix} {where_elm} order by wstart", f'select * from (select _wstart AS wstart, {fwc_stb_source_select_str} from {tbname} {where_elm} {query_partition_elm} interval({self.tdCom.dataDict["interval"]}s) {query_stb_fill_elm} order by wstart) {query_where_elm}', - sorted=True + sorted=True, ) else: self.tdCom.check_query_data( - f'select wstart, {fwc_tb_output_select_str} from {tbname}{self.tdCom.des_table_suffix} {stream_where_elm} order by wstart', + f"select wstart, {fwc_tb_output_select_str} from {tbname}{self.tdCom.des_table_suffix} {stream_where_elm} order by wstart", f'select * from (select _wstart AS wstart, {fwc_tb_source_select_str} from {tbname} {where_elm} {query_partition_elm} interval({self.tdCom.dataDict["interval"]}s) {query_tb_fill_elm} order by wstart) {query_where_elm}', - sorted=True + sorted=True, ) def test_period_interval(self): - """basic test + """OldPy: force window close + + interval + sliding simulates the force window close trigger model. - interval + sliding simulates the force window close trigger model. + Catalog: + - Streams:OldPyCases - Catalog: - - Streams: 30-OldPyCases - Description: - - create 4 streams, each stream has 2 source tables - - write data to source tables - - check stream results - - fill(prev) and fill(Value) NOT support yet! + Description: + - create 4 streams, each stream has 2 source tables + - write data to source tables + - check stream results + - fill(prev) and fill(Value) NOT support yet! - Since: v3.3.3.7 + Since: v3.3.3.7 - Labels: common,ci + Labels: common,ci - Jira: None + Jira: None - History: - - 2025-07-22 + History: + - 2025-07-22 """ tdStream.createSnode() # self.do_exec(interval = 5, partition = "tbname", delete = True, fill_value = "NULL") - self.do_exec(interval = 5, partition = "tbname", delete = True, fill_value = None) + self.do_exec(interval=5, partition="tbname", delete=True, fill_value=None) # not support yet # self.do_exec(interval = 5, partition = "tbname", delete = True, fill_value = "VALUE") diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_interval.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_interval.py similarity index 100% rename from test/cases/41-StreamProcessing/30-OldPyCases/test_interval.py rename to test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_interval.py diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_interval_partition.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_interval_partition.py index b5bb47e74ea1..fb1edc60e53f 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_interval_partition.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_interval_partition.py @@ -11,7 +11,7 @@ class TestIntervalPartition: [(10, "tbname"), (10, "t1"), (10, "t2"), (10, "t1,t2")], ) def test_interval_partition(self, interval, partition_by): - """迁移老用例 + """OldPy: partitionby 老用例 tests/system-test/8-stream/partition_interval.py 老的建流语句 @@ -19,6 +19,9 @@ def test_interval_partition(self, interval, partition_by): 新的建流语句 CREATE STREAM xxx INTERVAL(10s) SLIDING(10s) FROM stb PARTITON BY tbname INTO xxx AS SELECT _tcurrent_ts as ts,count(val) FROM %%trows; + Catalog: + - Streams:OldPyCases + Since: v3.3.7.0 Labels: common,ci diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_math_func.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_math_func.py index e7e7e9c122fa..49da1e6db9b4 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_math_func.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_math_func.py @@ -26,7 +26,7 @@ class TestMathFunctionInStream: ], ) def test_math_function(self, math_func): - """迁移旧的测试用例 + """OldPy: math function 旧用例 tests/system-test/8-stream/scalar_function.py 测试在流计算中使用数学函数 @@ -35,6 +35,9 @@ def test_math_function(self, math_func): 新的建流语句: CREATE STREAM XXX SLIDING(10s) FROM tb INTO XXX AS SELECT ts, log(val, 2) as val FROM %%trows; + Catalog: + - Streams:OldPyCases + Since: v3.3.7.0 Labels: common,ci diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_sliding_partition.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_sliding_partition.py index bd1b9b2004dd..02eaa5ce28a2 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_sliding_partition.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_sliding_partition.py @@ -11,7 +11,7 @@ class TestSlindingPartition: [(10, "tbname"), (10, "t1"), (10, "t2"), (10, "t1,t2")], ) def test_sliding_partition(self, sliding, partition_by): - """迁移老用例 + """OldPy: sliding 老用例 tests/system-test/8-stream/partition_interval.py 老的建流语句 @@ -19,6 +19,9 @@ def test_sliding_partition(self, sliding, partition_by): 新的建流语句 CREATE STREAM xxx SLINDING(10s) FROM stb PARTITON BY tbname INTO xxx AS SELECT _tcurrent_ts as ts,count(val) FROM %%trows; + Catalog: + - Streams:OldPyCases + Since: v3.3.7.0 Labels: common,ci diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_snode_restart_with_checkpoint.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_snode_restart_with_checkpoint.py index faf4a7784e46..08dc69711323 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_snode_restart_with_checkpoint.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_snode_restart_with_checkpoint.py @@ -25,12 +25,12 @@ def setup_class(cls): def test_case1(self): - """Stream basic test 1 + """OldPy: snode 1. - Catalog: - - Streams:OldStreamCases + - Streams:OldPyCases Since: v3.0.0.0 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_state_window.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_state_window.py index c249f54301c6..8e3cdf4ddd51 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_state_window.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_state_window.py @@ -7,10 +7,13 @@ class TestStateWindow: @pytest.mark.ci def test_state_window(self): - """测试流计算的状态窗口STATE_WINDOW + """OldPy: state window 迁移自老用例: tests/system-test/8-stream/state_window_case.py + Catalog: + - Streams:OldPyCases + Since: v3.3.6.0 Labels: common,ci diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_basic.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_basic.py index 759ec3359298..c1ec14262088 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_basic.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_basic.py @@ -45,12 +45,12 @@ def setup_class(cls): # run def test_stream_basic(self): - """Stream basic test 1 + """OldPy: basic test 1 1. test stream basic Catalog: - - Streams:OldStreamCases + - Streams:OldPyCases Since: v3.0.0.0 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_multi_agg.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_multi_agg.py index acdd90f18fe6..a69dd6712c87 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_multi_agg.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_multi_agg.py @@ -39,12 +39,12 @@ def setup_class(cls): tdLog.info(f"start to excute {__file__}") def test_steram_multi_agg(self): - """Stream basic test 1 + """OldPy: aggregation func 1. test_Stream_Multi_Agg Catalog: - - Streams:OldStreamCases + - Streams:OldPyCases Since: v3.0.0.0 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_string_func.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_string_func.py index 4f9cf411a427..32fcdea2e27b 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_string_func.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_string_func.py @@ -23,7 +23,7 @@ class TestStringFunctionInStream: ], ) def test_string_function(self, string_func): - """迁移旧的测试用例 + """OldPy: string function 旧用例 tests/system-test/8-stream/scalar_function.py 测试在流计算中使用字符串函数 @@ -32,6 +32,9 @@ def test_string_function(self, string_func): 新的建流语句: CREATE STREAM XXX SLIDING(10s) FROM tb INTO XXX AS SELECT ts, char_length(val) as val FROM %%trows; + Catalog: + - Streams:OldPyCases + Since: v3.3.7.0 Labels: common,ci diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_taosdShell.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_taosdShell.py index 72253ea2bb1b..c57ddaf2055d 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_taosdShell.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_taosdShell.py @@ -1,4 +1,4 @@ -# pytest --clean --skip_stop cases/13-StreamProcessing/31-OldTsimCases/test_oldcase_lihui_taosdShell_new.py -N 5 +# pytest --clean --skip_stop cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_lihui_taosdShell_new.py -N 5 import time from datetime import datetime @@ -25,12 +25,12 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_others_oldcase_taosdShell(self): - """taosd shell + """OldPy: shell create stream test taosd shell command Catalog: - - Streams:UseCases + - Streams:OldPyCases Since: v3.3.3.7 diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_window_true_for.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_window_true_for.py index 0cc53dfa14c1..145af9fb8a79 100644 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_window_true_for.py +++ b/test/cases/41-StreamProcessing/30-OldPyCases/test_oldcase_window_true_for.py @@ -6,10 +6,13 @@ class TestWindowTrueFor: def test_window_true_for(self): - """迁移自老用例 + """OldPy: true for tests/system-test/2-query/test_window_true_for.py + Catalog: + - Streams:OldPyCases + Since: v3.3.7.0 Labels: common,ci diff --git a/test/cases/41-StreamProcessing/30-OldPyCases/test_stream_basic_bug1.py b/test/cases/41-StreamProcessing/30-OldPyCases/test_stream_basic_bug1.py deleted file mode 100644 index 2022cd7f0c38..000000000000 --- a/test/cases/41-StreamProcessing/30-OldPyCases/test_stream_basic_bug1.py +++ /dev/null @@ -1,132 +0,0 @@ -################################################################### -# Copyright (c) 2016 by TAOS Technologies, Inc. -# All rights reserved. -# -# This file is proprietary and confidential to TAOS Technologies. -# No part of this file may be reproduced, stored, transmitted, -# disclosed or used in any form or by any means other than as -# expressly provided by the written permission from Jianhui Tao -# -################################################################### - -# -*- coding: utf-8 -*- - - -from new_test_framework.utils import tdLog, tdSql, tdStream, streamUtil,StreamTableType, StreamTable, cluster - - -import random -import time -import traceback -import os -from os import path -import psutil - - -class TestStreamBasicCase: - caseName = "TestStreamBasicCase" - currentDir = os.path.dirname(os.path.abspath(__file__)) - runAll = False - dbname = "test" - trigTbname = "" - calcTbname = "" - outTbname = "" - stName = "" - resultIdx = "" - sliding = 1 - subTblNum = 3 - tblRowNum = 10 - tableList = [] - - def setup_class(cls): - tdLog.info(f"start to excute {__file__}") - - - - # run - def test_stream_basic(self): - """Stream basic test 1 - - 1. test stream basic - - Catalog: - - Streams:OldStreamCases - - Since: v3.0.0.0 - - Labels: common, ci - - Jira: None - - History: - - 2025-7-21 lvze Migrated from community/tests/system-test/8-stream/stream_basic.py - - """ - tdSql.execute("create snode on dnode 1") - self.case1() - - - - def case1(self): - - tdSql.execute(f'create database if not exists d1 vgroups 1') - tdSql.execute(f'use d1') - tdSql.execute(f'create table st(ts timestamp, i int) tags(t int)') - tdSql.execute(f'insert into t1 using st tags(1) values(now, 1) (now+1s, 2)') - tdSql.execute(f'insert into t2 using st tags(2) values(now, 1) (now+1s, 2)') - tdSql.execute(f'insert into t3 using st tags(3) values(now, 1) (now+1s, 2)') - - tdSql.execute("""create stream d1.stream1 interval(1m) sliding(1m) from d1.st partition by tbname - stream_options(fill_history) - into d1.sta output_subtable(concat('nee.w-', tbname)) - tags(tname varchar(100) as tbname) - as select - _twstart, - count(*), - avg(i) - from - %%tbname;""", queryTimes=2,show=True) - - self.checkStreamRunning() - - sql= "select * from sta" - tdSql.checkRowsLoop(3, sql, loopCount=100, waitTime=0.5) - tdSql.query("select tbname from sta order by tbname") - if '.' in tdSql.getData(0,0): - raise Exception("ERROR :table name have '.'") - if not tdSql.getData(0, 0).startswith('nee.w-t1'): - tdLog.exit("error1") - - - if not tdSql.getData(1, 0).startswith('nee.w-t2'): - tdLog.exit("error2") - - if not tdSql.getData(2, 0).startswith('nee.w-t3'): - tdLog.exit("error3") - - - def checkStreamRunning(self): - tdLog.info(f"check stream running status:") - - timeout = 60 - start_time = time.time() - - while True: - if time.time() - start_time > timeout: - tdLog.error("Timeout waiting for all streams to be running.") - tdLog.error(f"Final stream running status: {streamRunning}") - raise TimeoutError(f"Stream status did not reach 'Running' within {timeout}s timeout.") - - tdSql.query(f"select status from information_schema.ins_streams order by stream_name;") - streamRunning=tdSql.getColData(0) - - if all(status == "Running" for status in streamRunning): - tdLog.info("All Stream running!") - tdLog.info(f"stream running status: {streamRunning}") - return - else: - tdLog.info("Stream not running! Wait stream running ...") - tdLog.info(f"stream running status: {streamRunning}") - time.sleep(1) - - diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic1.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic1.py index 092a446c5a87..a2e10acdba1e 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic1.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic1.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_basic1(self): - """Stream basic test 1 + """OldTsim: basic 1 Basic test cases for streaming, part 1 diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic2.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic2.py index f3b886b7c721..daf9f5d9c36c 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic2.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic2.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_basic2(self): - """Stream basic test 2 + """OldTsim: basic 2 Basic test cases for streaming, part 2 diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_check.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_check.py index fe372eae2e2a..d0aaa4d8095d 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_check.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_check.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_check(self): - """Stream check stable + """OldTsim: check stable Verify the computation results of streams when triggered by different windows. diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_checkpoint.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_checkpoint.py index 4f53a1d0e61e..a71f83fb5dcc 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_checkpoint.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_checkpoint.py @@ -15,7 +15,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_checkpoint(self): - """Stream checkpoint + """OldTsim: checkpoint Test if the stream continues to run after a restart. diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_concat.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_concat.py index 7676cff3ffb0..a6b25bd692ed 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_concat.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_concat.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_concat(self): - """Stream concat + """OldTsim: concat Test the use of the concat function in output_subtable and tags statements. diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_continuewindowclose.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_continuewindowclose.py index d6deb430bacd..f788b657317b 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_continuewindowclose.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_continuewindowclose.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_continue_window_close(self): - """Stream continue window close + """OldTsim: continue window close Verify the alternative approach to the original continuous window close trigger mode in the new streaming computation diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_count.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_count.py index 678016303f80..98ed5a3e1d51 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_count.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_count.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_count(self): - """Stream count window + """OldTsim: count window Basic use cases of count window, include expired-data, out-of-order data, and data-deletion diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_delete.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_delete.py index c023e01b321a..6592671fcf7a 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_delete.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_delete.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_delete(self): - """Stream delete + """OldTsim: delete data Test the correctness of results when deleting data in various trigger windows diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_distribute.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_distribute.py index 89d339b6c2df..d3f48bece1f7 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_distribute.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_distribute.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_distribute(self): - """Stream distribute + """OldTsim: distribute Perform multiple write triggers to verify the correctness of the calculation results diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_event.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_event.py index face3c7e63cb..ad5656370359 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_event.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_event.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_event(self): - """Stream event window + """OldTsim: event window Test event window deletion and update diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_fillhistory.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_fillhistory.py index e87ed25b3bf8..69cff0b10704 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_fillhistory.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_fillhistory.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_fillhistory(self): - """Stream fill history + """OldTsim: fill history Verify the correctness of historical data calculation results, as well as the calculation results at the boundary between historical and real-time computation. diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_fillinternal.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_fillinternal.py index 7817540defc0..b213c57d7608 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_fillinternal.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_fillinternal.py @@ -11,7 +11,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_fill_interval(self): - """Stream fill interval + """OldTsim: fill interval Test the results of various numerical fillings in the interval window diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_forcewindowclose.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_forcewindowclose.py index c562b2a27d8f..b3cbcccfa7ea 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_forcewindowclose.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_forcewindowclose.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_force_window_close(self): - """Stream force window close + """OldTsim: force window close Verify the alternative approach to the original force window close trigger mode in the new streaming computation diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_delete.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_delete.py index a1fbe9c1105b..e11b4a572395 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_delete.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_delete.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_interp_delete(self): - """Stream interp delete + """OldTsim: interp delete Verify the calculation results of the interp function when deleting data diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_fill.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_fill.py index 8fe2a15dfcdc..f0d3c2f9721e 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_fill.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_fill.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_interp_fill(self): - """Stream interp fill + """OldTsim: interp fill Validate the calculation results of the interp function when filling data diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_history.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_history.py index 9984dea292e5..6a3c7f18aacf 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_history.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_history.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_interp_history(self): - """Stream interp history + """OldTsim: interp history Validate the calculation results of the interp function when processing historical data diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_partitionby.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_partitionby.py index aa77fa3b394e..8910a6ce0c58 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_partitionby.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_partitionby.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_interp_partitionby(self): - """Stream interp partition by + """OldTsim: interp partition by Validate the calculation results of the ​​interp​​ function under ​​PARTITION BY​​ clauses diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_primary.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_primary.py index 75b5098f95e9..043b8cb01aa5 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_primary.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_primary.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_interp_primary(self): - """Stream interp compisite key + """OldTsim: interp compisite key Validate the calculation results of the interp function with cmposite keys diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_update.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_update.py index dfce0418d905..bf1b0c48eab7 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_update.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_interp_update.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_interp_update(self): - """Stream interp update + """OldTsim: interp update Validate the calculation results of the interp function during data updates diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_options.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_options.py index 4ae576d62655..77f208f0555a 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_options.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_options.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_options(self): - """Stream options + """OldTsim: options Validate the calculation results when ignore update and ignore delete are applied diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_partitionby.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_partitionby.py index 13726abab9d7..7d03070f2462 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_partitionby.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_partitionby.py @@ -12,7 +12,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_partitionby(self): - """Stream partition by + """OldTsim: partition by Validate the calculation results under PARTITION BY clauses diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_primary.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_primary.py index 2db7aa86b1db..fb296364778e 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_primary.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_primary.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_interp_primary(self): - """Stream composite key + """OldTsim: composite key Validate the calculation results with composite keys diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_session.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_session.py index 3033b8f93fdb..ada2ec51dd1b 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_session.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_session.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_session(self): - """Stream session window + """OldTsim: session window Test the correctness of session windows diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_snode.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_snode.py index ff8c12faf46b..877877d4334f 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_snode.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_snode.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_snode(self): - """Stream snode + """OldTsim: snode Test basic operations of snode diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_state.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_state.py index 9f9d23d2bcfc..b16026115fe1 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_state.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_state.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_state(self): - """Stream state window + """OldTsim: state window Test the correctness of state windows diff --git a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_twa.py b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_twa.py index 8871242c3ced..30ec908e9d61 100644 --- a/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_twa.py +++ b/test/cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_twa.py @@ -13,7 +13,7 @@ def setup_class(cls): tdLog.debug(f"start to execute {__file__}") def test_stream_oldcase_twa(self): - """Stream twa + """OldTsim: twa Verify the behavior of the legacy TWA function in the new streaming computation system diff --git a/test/cases/41-StreamProcessing/99-Others/random_stream.py b/test/cases/41-StreamProcessing/99-Others/random_stream.py index c5a7dbbf5776..a56094c4d6e7 100644 --- a/test/cases/41-StreamProcessing/99-Others/random_stream.py +++ b/test/cases/41-StreamProcessing/99-Others/random_stream.py @@ -1739,7 +1739,7 @@ def test_stream_trigger_type1(self): 可以通过环境变量STREAM_COUNT设置要生成的stream数量 """ # 从环境变量获取stream数量,如果没有设置则使用默认值 - # eg:STREAM_COUNT=10 pytest --clean cases/13-StreamProcessing/99-Others/random_stream.py --skip_stop + # eg:STREAM_COUNT=10 pytest --clean cases/41-StreamProcessing/99-Others/random_stream.py --skip_stop import os stream_count = int(os.getenv('STREAM_COUNT', self.stream_count)) tdLog.debug(f"使用的stream数量: {stream_count}") diff --git a/test/ci/cases.task b/test/ci/cases.task index beb8e8ba97dc..6ca3f2d81af9 100644 --- a/test/ci/cases.task +++ b/test/ci/cases.task @@ -25,7 +25,7 @@ ,,y,.,./ci/pytest.sh pytest cases/01-DataTypes/test_null_tag.py # ​​​Special gap before priority support -,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_compatibility_rolling_upgrade.py -N 3 +,,n,.,pytest cases/41-StreamProcessing/23-Compatibility/test_compatibility_rolling_upgrade.py -N 3 # 02-Databases ## 01-Create @@ -330,47 +330,37 @@ ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/01-Snode/test_snode_privileges_stream.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/01-Snode/test_snode_privileges_systable.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/01-Snode/test_snode_privileges_twodb.py - ## 02-Stream -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/02-Stream/stream_nosnode.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/02-Stream/stream_checkname.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/02-Stream/stream_long_name.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/02-Stream/stream_samename.py - +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/02-Stream/test_stream_check_name.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/02-Stream/test_stream_long_name.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/02-Stream/test_stream_no_snode.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/02-Stream/test_stream_same_name.py ## 03-TriggerMode -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_state.py +#,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_count_new.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_count.py +,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_event_new.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_event.py -#,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_notify.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_fill_history.py -#,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_sliding.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_window_close_state_window.py - +,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_period_1.py ,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_sliding.py ,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_state_new.py -#,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_state_disorderNupdate_new.py -#,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_count_new.py -,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_event_new.py -,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_period_1.py - +#,,n,.,pytest cases/41-StreamProcessing/03-TriggerMode/test_state_disorder_update_new.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_state_window_close.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/03-TriggerMode/test_state.py ## 04-Options -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options_vtbl.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_abnormal_data_table.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_abnormal_data_vtable.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_meta_change_table.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_meta_change_vtable.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options_abnormal.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options_abnormal_vtbl.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_meta.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_meta_vtbl.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_disorderUpdateDelete.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_disorderUpdateDelete_vtbl.py - -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options_us.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options_basic.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options_ns.py - +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options_us.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/04-Options/test_options_vtable.py ## 05-Notify - +,,n,.,pytest cases/41-StreamProcessing/05-Notify/test_notify.py ## 06-ResultSaved ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/06-ResultSaved/test_result_saved_comprehensive.py - ## 07-SubQuery ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/07-SubQuery/test_subquery_basic.py ,,n,.,pytest cases/41-StreamProcessing/07-SubQuery/test_subquery_count_1.py @@ -380,30 +370,26 @@ ,,n,.,pytest cases/41-StreamProcessing/07-SubQuery/test_subquery_session.py ,,n,.,pytest cases/41-StreamProcessing/07-SubQuery/test_subquery_state.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/07-SubQuery/test_subquery_usage_restrict.py - ## 08-Recalc +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_combined_options.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_delete_recalc.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_expired_time.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_ignore_disorder.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_delete_recalc.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_watermark.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_combined_options.py -#,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_manual.py +#,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_manual_basic.py #,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_manual_with_options.py - +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/08-Recalc/test_recalc_watermark.py ## 20-UseCase ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_idmp_meters.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_idmp_ts-7152.py -,,n,.,pytest cases/41-StreamProcessing/20-UseCase/test_idmp_meters_td36808.py -,,n,.,pytest cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco.py +,,n,.,pytest cases/41-StreamProcessing/20-UseCase/test_idmp_public.py ,,n,.,pytest cases/41-StreamProcessing/20-UseCase/test_idmp_pv.py +,,n,.,pytest cases/41-StreamProcessing/20-UseCase/test_idmp_tobacco.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_idmp_vehicle.py ,,n,.,pytest cases/41-StreamProcessing/20-UseCase/test_nevados.py -,,n,.,pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_phase1.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_case4_bug1.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_case5.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1_bug1.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1_twostream.py +,,n,.,pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_phase1.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case1.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case2.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case3.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case4.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case6.py @@ -412,29 +398,25 @@ ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case19.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case19_bug1.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_three_gorges_second_case22.py - +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/20-UseCase/test_yuxi_TS_7152.py ## 21-Stability - ## 22-Performance - ## 23-Compatibility -,,n,.,pytest cases/41-StreamProcessing/23-Compatibility/stream_compatibility.py - +,,n,.,pytest cases/41-StreamProcessing/23-Compatibility/test_compatibility_cross_version.py ## 30-OldPyCases -,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_state_window.py -,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_window_true_for.py -,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_math_func.py -,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_string_func.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_at_once.py ,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_backquote_check.py -,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_taosdShell.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_checkpoint_info.py -N 4 +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_drop.py +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_empty_identifier.py +,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_math_func.py ,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_snode_restart_with_checkpoint.py -N 4 -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_multi_agg.py +,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_state_window.py ,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_basic.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_drop.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_empty_identifier.py -,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_at_once.py - +,,y,.,./ci/pytest.sh pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_stream_multi_agg.py +,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_string_func.py +,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_taosdShell.py +,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_oldcase_window_true_for.py ## 31-OldCases ,,n,.,pytest cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic1.py ,,n,.,pytest cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_basic2.py @@ -808,7 +790,7 @@ ,,y,.,./ci/pytest.sh pytest cases/uncatalog/system-test/2-query/test_stbJoin.py -Q 4 ,,y,.,./ci/pytest.sh pytest cases/uncatalog/system-test/2-query/test_hint.py # ​​​Special gap before priority support -,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_compatibility_rolling_upgrade_all.py -N 3 +,,n,.,pytest cases/41-StreamProcessing/23-Compatibility/test_compatibility_rolling_upgrade_all.py -N 3 ,,y,.,./ci/pytest.sh pytest cases/uncatalog/system-test/2-query/test_hint.py -Q 2 ,,y,.,./ci/pytest.sh pytest cases/uncatalog/system-test/2-query/test_hint.py -Q 3 ,,y,.,./ci/pytest.sh pytest cases/uncatalog/system-test/2-query/test_hint.py -Q 4 @@ -1348,7 +1330,7 @@ ,,y,.,./ci/pytest.sh pytest cases/uncatalog/system-test/2-query/test_projectionDesc.py -Q 2 # ​​​Special gap before priority support -,,n,.,pytest cases/41-StreamProcessing/30-OldPyCases/test_compatibility.py +,,n,.,pytest cases/41-StreamProcessing/23-Compatibility/test_compatibility_backward_forward.py ,,y,.,./ci/pytest.sh pytest cases/uncatalog/system-test/2-query/test_between.py -Q 3 ,,y,.,./ci/pytest.sh pytest cases/uncatalog/system-test/2-query/test_distinct.py -Q 3 diff --git a/test/new_test_framework/utils/compatibilityUtil.py b/test/new_test_framework/utils/compatibilityUtil.py index 97b415b75d01..2342b9b8ec3d 100644 --- a/test/new_test_framework/utils/compatibilityUtil.py +++ b/test/new_test_framework/utils/compatibilityUtil.py @@ -216,7 +216,7 @@ def prepareDataOnOldVersion(self, base_version, bPath,corss_major_version): os.system("LD_LIBRARY_PATH=/usr/lib taos -s 'flush database test '") os.system("LD_LIBRARY_PATH=/usr/lib taos -s \"insert into test.d1 values (now+11s, 11, 190, 0.21), (now+12s, 11, 190, 0.21), (now+13s, 11, 190, 0.21), (now+14s, 11, 190, 0.21), (now+15s, 11, 190, 0.21) test.d3 values (now+16s, 11, 190, 0.21), (now+17s, 11, 190, 0.21), (now+18s, 11, 190, 0.21), (now+19s, 119, 191, 0.25) test.d3 (ts) values (now+20s);\"") - os.system("LD_LIBRARY_PATH=/usr/lib taosBenchmark -f cases/13-StreamProcessing/30-OldPyCases/json/com_alltypedata.json -y") + os.system("LD_LIBRARY_PATH=/usr/lib taosBenchmark -f cases/41-StreamProcessing/30-OldPyCases/json/com_alltypedata.json -y") os.system("LD_LIBRARY_PATH=/usr/lib taos -s 'flush database curdb '") os.system("LD_LIBRARY_PATH=/usr/lib taos -s 'alter database curdb cachemodel \"both\" '") os.system("LD_LIBRARY_PATH=/usr/lib taos -s 'select count(*) from curdb.meters '") @@ -267,12 +267,12 @@ def prepareDataOnOldVersion(self, base_version, bPath,corss_major_version): consumer.close() - tdLog.info(" LD_LIBRARY_PATH=/usr/lib taosBenchmark -f cases/13-StreamProcessing/30-OldPyCases/json/compa4096.json -y ") - os.system("LD_LIBRARY_PATH=/usr/lib taosBenchmark -f cases/13-StreamProcessing/30-OldPyCases/json/compa4096.json -y") - os.system("LD_LIBRARY_PATH=/usr/lib taosBenchmark -f cases/13-StreamProcessing/30-OldPyCases/json/all_insertmode_alltypes.json -y") + tdLog.info(" LD_LIBRARY_PATH=/usr/lib taosBenchmark -f cases/41-StreamProcessing/30-OldPyCases/json/compa4096.json -y ") + os.system("LD_LIBRARY_PATH=/usr/lib taosBenchmark -f cases/41-StreamProcessing/30-OldPyCases/json/compa4096.json -y") + os.system("LD_LIBRARY_PATH=/usr/lib taosBenchmark -f cases/41-StreamProcessing/30-OldPyCases/json/all_insertmode_alltypes.json -y") # os.system("LD_LIBRARY_PATH=/usr/lib taos -s 'flush database db4096 '") - os.system("LD_LIBRARY_PATH=/usr/lib taos -f cases/13-StreamProcessing/30-OldPyCases/json/TS-3131.tsql") + os.system("LD_LIBRARY_PATH=/usr/lib taos -f cases/41-StreamProcessing/30-OldPyCases/json/TS-3131.tsql") # add deleted data os.system(f'LD_LIBRARY_PATH=/usr/lib taos -s "{deletedDataSql}" ') diff --git a/test/new_test_framework/utils/streamUtil.py b/test/new_test_framework/utils/streamUtil.py index 58ee74c049dd..fcce87fbbecc 100644 --- a/test/new_test_framework/utils/streamUtil.py +++ b/test/new_test_framework/utils/streamUtil.py @@ -993,7 +993,7 @@ def prepareViews( sql = f"create view view{v} as select cts, cint, cuint, cbigint, cubigint, cfloat, cdouble, cvarchar, csmallint, cusmallint, ctinyint, cutinyint, cbool, cnchar, cvarbinary, cgeometry from qdb.t{v}" tdSql.execute(sql) - # for StreamCheckItem, see cases/13-StreamProcessing/31-OldTsimCases/test_oldcase_twa.py + # for StreamCheckItem, see cases/41-StreamProcessing/31-OldTsimCases/test_oldcase_twa.py def checkAll(self, streams): for stream in streams: tdLog.info(f"stream:{stream.db} - create database, table, stream", color='blue')