diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Reload_Detector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Reload_Detector.enso index 966975d0ecf7..3669c7eaacfe 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Reload_Detector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Reload_Detector.enso @@ -18,7 +18,7 @@ type Reload_Detector private Value mr:Managed_Resource new -> Reload_Detector = - mr = Managed_Resource.register (Ref.new 1) (x-> Nothing) True + mr = Managed_Resource.register (Ref.new 1) (_-> Nothing) True Reload_Detector.Value mr has_reload_occurred self = diff --git a/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java b/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java index eba05b6c3345..50641c3b63cf 100644 --- a/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java +++ b/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java @@ -10,9 +10,11 @@ import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.function.Predicate; @@ -61,6 +63,7 @@ public class LRUCache { /** Used to override cache parameters for testing. */ private final Map> cache = new HashMap<>(); + private final Map> opened = new HashMap<>(); private final Map lastUsed = new HashMap<>(); @@ -109,7 +112,7 @@ public CacheResult getResult(ItemBuilder itemBuilder) // a rare case so it seems unnecessary. logger.log( Level.WARNING, - "Error in cache file handling; will re-execute without caching: {}", + "Error in cache file handling; will re-execute without caching: {0}", e.getMessage()); Item rerequested = itemBuilder.buildItem(); return new CacheResult<>(rerequested.stream(), rerequested.metadata()); @@ -176,7 +179,23 @@ private CacheResult getResultForCacheEntry(String cacheKey) } markCacheEntryUsed(cacheKey); - return new CacheResult<>(new FileInputStream(cacheFile), cache.get(cacheKey).metadata()); + var fis = new FileInputStream(cacheFile) { + static int serial = 0; + int s; + { + s = serial; + serial++; + opened.computeIfAbsent(cacheKey, (k) -> new HashSet<>()).add(s); + System.out.println("AAA Opening " + cacheKey + " " + s); + } + public void close() throws IOException { + super.close(); + assert opened.containsKey(cacheKey) && opened.get(cacheKey).contains(s); + opened.get(cacheKey).remove(s); + System.out.println("AAA Closing " + cacheKey + " " + s); + } + }; + return new CacheResult<>(fis, cache.get(cacheKey).metadata()); } /** @@ -263,6 +282,7 @@ private void removeCacheEntry(String key, CacheEntry value) { /** Remove a cache file. */ private void removeCacheFile(String key, CacheEntry cacheEntry) { boolean removed = cacheEntry.responseData.delete(); + System.out.println("AAA Deleting " + key + " " + opened.get(key).isEmpty() + " " + opened.get(key)); if (!removed) { logger.log(Level.WARNING, "Unable to delete cache file for key {0}", key); } diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java index 2e7b622e6848..16a0df9c1b65 100644 --- a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java +++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java @@ -163,7 +163,9 @@ public String hashKey() { keyStrings.add(resolvedHeader.getRight()); } - return Integer.toString(Arrays.deepHashCode(keyStrings.toArray())); + var cacheKey = Integer.toHexString(Arrays.deepHashCode(keyStrings.toArray())); + System.out.println("AAA ck " + cacheKey + " " + keyStrings); + return cacheKey; } @Override diff --git a/test/Table_Tests/src/IO/Fetch_Spec.enso b/test/Table_Tests/src/IO/Fetch_Spec.enso index b1c33445065f..a738be1efd20 100644 --- a/test/Table_Tests/src/IO/Fetch_Spec.enso +++ b/test/Table_Tests/src/IO/Fetch_Spec.enso @@ -228,16 +228,16 @@ add_specs suite_builder = group_builder.specify "Cache policy should work for HTTP.fetch" pending=pending_has_url <| Test.with_retries <| with_default_cache <| expect_counts [0, 0] <| - HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache - HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache + HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text + HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text expect_counts [0, 2] <| - HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache - HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache + HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text + HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text with_default_cache <| expect_counts [0, 2] <| - HTTP.fetch url0 - HTTP.fetch url1 + HTTP.fetch url0 . decode_as_text + HTTP.fetch url1 . decode_as_text group_builder.specify "Cache policy should work for Data.fetch" pending=pending_has_url <| Test.with_retries <| with_default_cache <| @@ -412,16 +412,16 @@ add_specs suite_builder = group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <| with_default_cache <| - HTTP.fetch url0 + HTTP.fetch url0 . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch base_url_with_slash+'crash' + HTTP.fetch base_url_with_slash+'crash' . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch base_url_with_slash+'nonexistent_endpoint' + HTTP.fetch base_url_with_slash+'nonexistent_endpoint' . decode_as_text get_num_response_cache_entries . should_equal 1 cloud_setup = Cloud_Tests_Setup.prepare - group_builder.specify "Should work with secrets in the URI" pending=pending_has_url <| Test.with_retries <| + group_builder.specify "Should work with secrets in the URI" pending="a" <| Test.with_retries <| cloud_setup.with_prepared_environment <| secret1 = Enso_Secret.create "http-cache-secret-1-"+Random.uuid "My Value" secret2 = Enso_Secret.create "http-cache-secret-2-"+Random.uuid "Some Value" @@ -437,12 +437,12 @@ add_specs suite_builder = . add_query_argument "arg1" secret2 . add_query_argument "arg2" "plain value" - HTTP.fetch url1 + HTTP.fetch url1 . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch uri2 + HTTP.fetch uri2 . decode_as_text get_num_response_cache_entries . should_equal 2 - group_builder.specify "Should work with secrets in the headers" pending=pending_has_url <| Test.with_retries <| + group_builder.specify "Should work with secrets in the headers" pending="a" <| Test.with_retries <| cloud_setup.with_prepared_environment <| secret1 = Enso_Secret.create "http-cache-secret-1-"+Random.uuid "My Value" secret2 = Enso_Secret.create "http-cache-secret-2-"+Random.uuid "Some Value" @@ -455,9 +455,9 @@ add_specs suite_builder = headers1 = [Header.new "A-Header" secret1] headers2 = [Header.new "A-Header" secret2] - HTTP.fetch headers=headers1 uri + HTTP.fetch headers=headers1 uri . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch headers=headers2 uri + HTTP.fetch headers=headers2 uri . decode_as_text get_num_response_cache_entries . should_equal 2 group_builder.specify "Does not attempt to make room for the maximum file size when that is larger than the total cache size" pending=pending_has_url <| Test.with_retries <| @@ -545,24 +545,24 @@ add_specs suite_builder = LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2) group_builder.specify "Cache should be cleared when a reload is detected" <| - HTTP.fetch base_url_with_slash+'test_download?length=10' - HTTP.fetch base_url_with_slash+'test_download?length=11' - HTTP.fetch base_url_with_slash+'test_download?length=12' + HTTP.fetch base_url_with_slash+'test_download?length=10' . decode_as_text + HTTP.fetch base_url_with_slash+'test_download?length=11' . decode_as_text + HTTP.fetch base_url_with_slash+'test_download?length=12' . decode_as_text get_num_response_cache_entries . should_equal 3 fake_reload get_num_response_cache_entries . should_equal 3 # Cleaning is not triggered until the next request - HTTP.fetch base_url_with_slash+'test_download?length=10' + HTTP.fetch base_url_with_slash+'test_download?length=10' . decode_as_text get_num_response_cache_entries . should_equal 1 - HTTP.fetch base_url_with_slash+'test_download?length=14' - HTTP.fetch base_url_with_slash+'test_download?length=15' + HTTP.fetch base_url_with_slash+'test_download?length=14' . decode_as_text + HTTP.fetch base_url_with_slash+'test_download?length=15' . decode_as_text get_num_response_cache_entries . should_equal 3 fake_reload get_num_response_cache_entries . should_equal 3 # Cleaning is not triggered until the next request - HTTP.fetch base_url_with_slash+'test_download?length=16' + HTTP.fetch base_url_with_slash+'test_download?length=16' . decode_as_text get_num_response_cache_entries . should_equal 1 group_builder.specify "Reissues the request if the cache file disappears" pending=pending_has_url <| Test.with_retries <|