changeset 3769:a75874df92b8

tests (e2e/cli/cache_search): make the test more resilient: the caching is triggered by `li.blog.get`, but it is done in parallel without blocking the command, thus there is a possibility that the data is not yet in cache when the following test is done. A loop with delay is done, to be sure that cache is filled before doing the assertion.
author Goffi <goffi@goffi.org>
date Fri, 13 May 2022 19:27:21 +0200
parents b443821399a3
children f31113777881
files tests/e2e/libervia-cli/test_libervia-cli.py
diffstat 1 files changed, 22 insertions(+), 5 deletions(-) [+]
line wrap: on
line diff
--- a/tests/e2e/libervia-cli/test_libervia-cli.py	Fri May 13 19:24:38 2022 +0200
+++ b/tests/e2e/libervia-cli/test_libervia-cli.py	Fri May 13 19:27:21 2022 +0200
@@ -21,6 +21,7 @@
 import pytest
 import sh
 from sh import li
+from time import sleep
 from sat.tools.common import uri
 
 
@@ -168,19 +169,35 @@
             title=au_txt,
             syntax="markdown"
         )
-        # we get the blog to be activate the cache for it
+        # we get the blog to activate the cache for it
         li.blog.get(max_items=1)
         # FTS
-        found = li_json.pubsub.cache.search(type="blog", fts='Slovakia OR "New Caledonia"')
+        found = []
+        for __ in range(5):
+            found = li_json.pubsub.cache.search(
+                type="blog", fts='Slovakia OR "New Caledonia"'
+            )
+            if found:
+                break
+            else:
+                # retrieving blog triggers the caching, but it's done in parallel
+                # thus we may have nothing in cache yet
+                sleep(0.5)
         assert len(found) == 2
         assert all(i["content"] in (sk_txt, nc_txt) for i in found)
         # search by field
-        found = li_json.pubsub.cache.search("-F", "tags", "overlap", "travel", type="blog")
+        found = li_json.pubsub.cache.search(
+            "-F", "tags", "overlap", "travel", type="blog"
+        )
         assert len(found) == 4
-        found = li_json.pubsub.cache.search("-F", "tags", "overlap", "europe", type="blog")
+        found = li_json.pubsub.cache.search(
+            "-F", "tags", "overlap", "europe", type="blog"
+        )
         assert len(found) == 2
         assert all(i["content"] in (sk_txt, fr_txt) for i in found)
-        found = li_json.pubsub.cache.search("-F", "tags", "ioverlap", "SOUTH PACIFIC", type="blog")
+        found = li_json.pubsub.cache.search(
+            "-F", "tags", "ioverlap", "SOUTH PACIFIC", type="blog"
+        )
         assert all(i["content"] in (nc_txt, au_txt) for i in found)