Add search_session unit test for search option from client cmd
This patch is for search_session unit test with search option. This query is from client cmd. Change-Id: I7286610a652cc9552c26fb8b956846fc135f793f
This commit is contained in:
parent
8d7fe211fa
commit
1cc79e1567
|
@ -303,3 +303,111 @@ class DbSessionTestCase(base.DbTestCase):
|
|||
self.assertEqual(100, sessionmap['hold_off'])
|
||||
self.assertEqual('2018-12-12T00:00:00',
|
||||
sessionmap['schedule']['schedule_date'])
|
||||
|
||||
def test_session_list_with_search_with_all_opt_one_match(self):
|
||||
count = 0
|
||||
sessionids = []
|
||||
while (count < 20):
|
||||
doc = copy.deepcopy(self.fake_session_3)
|
||||
if count in [0, 4, 8, 12, 16]:
|
||||
doc['hold_off'] = 100
|
||||
session_id = self.dbapi.add_session(project_id=self.fake_session_3.
|
||||
get('project_id'),
|
||||
user_id=self.fake_session_3.
|
||||
get('user_id'),
|
||||
doc=doc)
|
||||
self.assertIsNotNone(session_id)
|
||||
sessionids.append(session_id)
|
||||
count += 1
|
||||
|
||||
search_opt = {'match': [{'_all': '[{"hold_off": 100}]'}]}
|
||||
result = self.dbapi.search_session(user_id=self.fake_session_3.
|
||||
get('user_id'),
|
||||
project_id=self.fake_session_3.
|
||||
get('project_id'),
|
||||
offset=0,
|
||||
limit=20,
|
||||
search=search_opt)
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result), 5)
|
||||
for index in range(len(result)):
|
||||
sessionmap = result[index]
|
||||
self.assertEqual(100, sessionmap['hold_off'])
|
||||
|
||||
def test_session_list_with_search_with_all_opt_two_matches(self):
|
||||
count = 0
|
||||
sessionids = []
|
||||
while (count < 20):
|
||||
doc = copy.deepcopy(self.fake_session_3)
|
||||
if count in [0, 4, 8, 12, 16]:
|
||||
doc['hold_off'] = 100
|
||||
if count in [4, 12]:
|
||||
doc['schedule']['schedule_date'] = '2018-12-12T00:00:00'
|
||||
session_id = self.dbapi.add_session(project_id=self.fake_session_3.
|
||||
get('project_id'),
|
||||
user_id=self.fake_session_3.
|
||||
get('user_id'),
|
||||
doc=doc)
|
||||
self.assertIsNotNone(session_id)
|
||||
sessionids.append(session_id)
|
||||
count += 1
|
||||
|
||||
search_opt = {'match': [{'_all': '[{"hold_off": 100},'
|
||||
'{"schedule_date": '
|
||||
'"2018-12-12T00:00:00"}]'}]}
|
||||
result = self.dbapi.search_session(user_id=self.fake_session_3.
|
||||
get('user_id'),
|
||||
project_id=self.fake_session_3.
|
||||
get('project_id'),
|
||||
offset=0,
|
||||
limit=20,
|
||||
search=search_opt)
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result), 2)
|
||||
for index in range(len(result)):
|
||||
sessionmap = result[index]
|
||||
self.assertEqual(100, sessionmap['hold_off'])
|
||||
self.assertEqual('2018-12-12T00:00:00',
|
||||
sessionmap['schedule']['schedule_date'])
|
||||
|
||||
def test_session_list_with_search_error_all_opt_return_alltuples(self):
|
||||
count = 0
|
||||
sessionids = []
|
||||
while (count < 20):
|
||||
doc = copy.deepcopy(self.fake_session_3)
|
||||
if count in [0, 4, 8, 12, 16]:
|
||||
doc['hold_off'] = 100
|
||||
session_id = self.dbapi.add_session(project_id=self.fake_session_3.
|
||||
get('project_id'),
|
||||
user_id=self.fake_session_3.
|
||||
get('user_id'),
|
||||
doc=doc)
|
||||
self.assertIsNotNone(session_id)
|
||||
sessionids.append(session_id)
|
||||
count += 1
|
||||
|
||||
search_opt = {'match': [{'_all': '{"hold_off": 100}'}]}
|
||||
result = self.dbapi.search_session(user_id=self.fake_session_3.
|
||||
get('user_id'),
|
||||
project_id=self.fake_session_3.
|
||||
get('project_id'),
|
||||
offset=0,
|
||||
limit=20,
|
||||
search=search_opt)
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result), 20)
|
||||
|
||||
search_opt = {'match': [{'_all': 'hold_off=100'}]}
|
||||
result = self.dbapi.search_session(user_id=self.fake_session_3.
|
||||
get('user_id'),
|
||||
project_id=self.fake_session_3.
|
||||
get('project_id'),
|
||||
offset=0,
|
||||
limit=20,
|
||||
search=search_opt)
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(len(result), 20)
|
||||
|
|
Loading…
Reference in New Issue