Skip to content

Commit

Permalink
Consume fetch stream output in tests
Browse files Browse the repository at this point in the history
  • Loading branch information
GregoryTravis committed Dec 16, 2024
1 parent 63ed629 commit 94287ad
Showing 1 changed file with 20 additions and 20 deletions.
40 changes: 20 additions & 20 deletions test/Table_Tests/src/IO/Fetch_Spec.enso
Original file line number Diff line number Diff line change
Expand Up @@ -228,16 +228,16 @@ add_specs suite_builder =
group_builder.specify "Cache policy should work for HTTP.fetch" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
expect_counts [0, 0] <|
HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache
HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache
HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text
HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text
expect_counts [0, 2] <|
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache
HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text
HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text

with_default_cache <|
expect_counts [0, 2] <|
HTTP.fetch url0
HTTP.fetch url1
HTTP.fetch url0 . decode_as_text
HTTP.fetch url1 . decode_as_text

group_builder.specify "Cache policy should work for Data.fetch" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
Expand Down Expand Up @@ -412,11 +412,11 @@ add_specs suite_builder =

group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
HTTP.fetch url0
HTTP.fetch url0 . decode_as_text
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'crash'
HTTP.fetch base_url_with_slash+'crash' . decode_as_text
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'nonexistent_endpoint'
HTTP.fetch base_url_with_slash+'nonexistent_endpoint' . decode_as_text
get_num_response_cache_entries . should_equal 1

cloud_setup = Cloud_Tests_Setup.prepare
Expand All @@ -437,9 +437,9 @@ add_specs suite_builder =
. add_query_argument "arg1" secret2
. add_query_argument "arg2" "plain value"

HTTP.fetch url1
HTTP.fetch url1 . decode_as_text
get_num_response_cache_entries . should_equal 1
HTTP.fetch uri2
HTTP.fetch uri2 . decode_as_text
get_num_response_cache_entries . should_equal 2

group_builder.specify "Should work with secrets in the headers" pending=pending_has_url <| Test.with_retries <|
Expand All @@ -455,9 +455,9 @@ add_specs suite_builder =
headers1 = [Header.new "A-Header" secret1]
headers2 = [Header.new "A-Header" secret2]

HTTP.fetch headers=headers1 uri
HTTP.fetch headers=headers1 uri . decode_as_text
get_num_response_cache_entries . should_equal 1
HTTP.fetch headers=headers2 uri
HTTP.fetch headers=headers2 uri . decode_as_text
get_num_response_cache_entries . should_equal 2

group_builder.specify "Does not attempt to make room for the maximum file size when that is larger than the total cache size" pending=pending_has_url <| Test.with_retries <|
Expand Down Expand Up @@ -545,24 +545,24 @@ add_specs suite_builder =
LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2)

group_builder.specify "Cache should be cleared when a reload is detected" <|
HTTP.fetch base_url_with_slash+'test_download?length=10'
HTTP.fetch base_url_with_slash+'test_download?length=11'
HTTP.fetch base_url_with_slash+'test_download?length=12'
HTTP.fetch base_url_with_slash+'test_download?length=10' . decode_as_text
HTTP.fetch base_url_with_slash+'test_download?length=11' . decode_as_text
HTTP.fetch base_url_with_slash+'test_download?length=12' . decode_as_text
get_num_response_cache_entries . should_equal 3

fake_reload

get_num_response_cache_entries . should_equal 3 # Cleaning is not triggered until the next request
HTTP.fetch base_url_with_slash+'test_download?length=10'
HTTP.fetch base_url_with_slash+'test_download?length=10' . decode_as_text
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'test_download?length=14'
HTTP.fetch base_url_with_slash+'test_download?length=15'
HTTP.fetch base_url_with_slash+'test_download?length=14' . decode_as_text
HTTP.fetch base_url_with_slash+'test_download?length=15' . decode_as_text
get_num_response_cache_entries . should_equal 3

fake_reload

get_num_response_cache_entries . should_equal 3 # Cleaning is not triggered until the next request
HTTP.fetch base_url_with_slash+'test_download?length=16'
HTTP.fetch base_url_with_slash+'test_download?length=16' . decode_as_text
get_num_response_cache_entries . should_equal 1

group_builder.specify "Reissues the request if the cache file disappears" pending=pending_has_url <| Test.with_retries <|
Expand Down

0 comments on commit 94287ad

Please sign in to comment.