From 3afcb42557b8b22886cc17caa05d242f049e9dbe Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Fri, 12 Jan 2024 10:31:07 -0800 Subject: [PATCH 01/22] [Tiered caching] Integrating ehcache disk cache Signed-off-by: Sagar Upadhyaya --- buildSrc/version.properties | 1 + .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 - .../licenses/slf4j-api-LICENSE.txt | 21 - .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 - .../licenses/slf4j-api-LICENSE.txt | 21 - .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 - .../licenses/slf4j-api-LICENSE.txt | 21 - .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 - .../licenses/slf4j-api-LICENSE.txt | 21 - .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 - .../licenses/slf4j-api-LICENSE.txt | 21 - .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 - .../licenses/slf4j-api-LICENSE.txt | 21 - .../licenses/slf4j-api-NOTICE.txt | 0 server/build.gradle | 3 + server/licenses/ehcache-3.10.8.jar.sha1 | 1 + server/licenses/ehcache-LICENSE.txt | 201 +++++++ server/licenses/ehcache-NOTICE.txt | 5 + .../licenses/slf4j-api-1.7.36.jar.sha1 | 0 .../licenses/slf4j-api-LICENSE.txt | 0 .../licenses/slf4j-api-NOTICE.txt | 0 .../org/opensearch/common/cache/ICache.java | 6 + .../common/cache/stats/CacheStats.java | 18 + .../common/cache/stats/package-info.java | 9 + .../common/cache/store/EhCacheDiskCache.java | 541 ++++++++++++++++++ .../cache/store/OpenSearchOnHeapCache.java | 23 +- .../cache/tier/TieredSpilloverCache.java | 35 +- .../org/opensearch/bootstrap/security.policy | 4 + .../cache/store/EhCacheDiskCacheTests.java | 469 +++++++++++++++ .../cache/tier/TieredSpilloverCacheTests.java | 11 + 35 files changed, 1321 insertions(+), 138 deletions(-) delete mode 100644 plugins/discovery-ec2/licenses/slf4j-api-1.7.36.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/slf4j-api-LICENSE.txt delete mode 100644 plugins/discovery-ec2/licenses/slf4j-api-NOTICE.txt delete mode 100644 plugins/identity-shiro/licenses/slf4j-api-1.7.36.jar.sha1 delete mode 100644 plugins/identity-shiro/licenses/slf4j-api-LICENSE.txt delete mode 100644 plugins/identity-shiro/licenses/slf4j-api-NOTICE.txt delete mode 100644 plugins/ingest-attachment/licenses/slf4j-api-1.7.36.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/slf4j-api-LICENSE.txt delete mode 100644 plugins/ingest-attachment/licenses/slf4j-api-NOTICE.txt delete mode 100644 plugins/repository-azure/licenses/slf4j-api-1.7.36.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/slf4j-api-LICENSE.txt delete mode 100644 plugins/repository-azure/licenses/slf4j-api-NOTICE.txt delete mode 100644 plugins/repository-hdfs/licenses/slf4j-api-1.7.36.jar.sha1 delete mode 100644 plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt delete mode 100644 plugins/repository-hdfs/licenses/slf4j-api-NOTICE.txt delete mode 100644 plugins/repository-s3/licenses/slf4j-api-1.7.36.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/slf4j-api-LICENSE.txt delete mode 100644 plugins/repository-s3/licenses/slf4j-api-NOTICE.txt create mode 100644 server/licenses/ehcache-3.10.8.jar.sha1 create mode 100644 server/licenses/ehcache-LICENSE.txt create mode 100644 server/licenses/ehcache-NOTICE.txt rename {plugins/crypto-kms => server}/licenses/slf4j-api-1.7.36.jar.sha1 (100%) rename {plugins/crypto-kms => server}/licenses/slf4j-api-LICENSE.txt (100%) rename {plugins/crypto-kms => server}/licenses/slf4j-api-NOTICE.txt (100%) create mode 100644 server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java create mode 100644 server/src/main/java/org/opensearch/common/cache/stats/package-info.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java create mode 100644 server/src/test/java/org/opensearch/common/cache/store/EhCacheDiskCacheTests.java diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 3813750507f18..63b74d5f0200d 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -72,3 +72,4 @@ resteasy = 6.2.4.Final # opentelemetry dependencies opentelemetry = 1.32.0 opentelemetrysemconv = 1.23.1-alpha +ehcache = 3.10.8 diff --git a/plugins/discovery-ec2/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/discovery-ec2/licenses/slf4j-api-1.7.36.jar.sha1 deleted file mode 100644 index 77b9917528382..0000000000000 --- a/plugins/discovery-ec2/licenses/slf4j-api-1.7.36.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/slf4j-api-LICENSE.txt b/plugins/discovery-ec2/licenses/slf4j-api-LICENSE.txt deleted file mode 100644 index 2be7689435062..0000000000000 --- a/plugins/discovery-ec2/licenses/slf4j-api-LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2004-2022 QOS.ch -All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/slf4j-api-NOTICE.txt b/plugins/discovery-ec2/licenses/slf4j-api-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/plugins/identity-shiro/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/identity-shiro/licenses/slf4j-api-1.7.36.jar.sha1 deleted file mode 100644 index 77b9917528382..0000000000000 --- a/plugins/identity-shiro/licenses/slf4j-api-1.7.36.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/identity-shiro/licenses/slf4j-api-LICENSE.txt b/plugins/identity-shiro/licenses/slf4j-api-LICENSE.txt deleted file mode 100644 index 8fda22f4d72f6..0000000000000 --- a/plugins/identity-shiro/licenses/slf4j-api-LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2004-2014 QOS.ch -All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/plugins/identity-shiro/licenses/slf4j-api-NOTICE.txt b/plugins/identity-shiro/licenses/slf4j-api-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/plugins/ingest-attachment/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/ingest-attachment/licenses/slf4j-api-1.7.36.jar.sha1 deleted file mode 100644 index 77b9917528382..0000000000000 --- a/plugins/ingest-attachment/licenses/slf4j-api-1.7.36.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/slf4j-api-LICENSE.txt b/plugins/ingest-attachment/licenses/slf4j-api-LICENSE.txt deleted file mode 100644 index 8fda22f4d72f6..0000000000000 --- a/plugins/ingest-attachment/licenses/slf4j-api-LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2004-2014 QOS.ch -All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/plugins/ingest-attachment/licenses/slf4j-api-NOTICE.txt b/plugins/ingest-attachment/licenses/slf4j-api-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/plugins/repository-azure/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/repository-azure/licenses/slf4j-api-1.7.36.jar.sha1 deleted file mode 100644 index 77b9917528382..0000000000000 --- a/plugins/repository-azure/licenses/slf4j-api-1.7.36.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/slf4j-api-LICENSE.txt b/plugins/repository-azure/licenses/slf4j-api-LICENSE.txt deleted file mode 100644 index 8fda22f4d72f6..0000000000000 --- a/plugins/repository-azure/licenses/slf4j-api-LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2004-2014 QOS.ch -All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/plugins/repository-azure/licenses/slf4j-api-NOTICE.txt b/plugins/repository-azure/licenses/slf4j-api-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/plugins/repository-hdfs/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/repository-hdfs/licenses/slf4j-api-1.7.36.jar.sha1 deleted file mode 100644 index 77b9917528382..0000000000000 --- a/plugins/repository-hdfs/licenses/slf4j-api-1.7.36.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt b/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt deleted file mode 100644 index 8fda22f4d72f6..0000000000000 --- a/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2004-2014 QOS.ch -All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/plugins/repository-hdfs/licenses/slf4j-api-NOTICE.txt b/plugins/repository-hdfs/licenses/slf4j-api-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/plugins/repository-s3/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/repository-s3/licenses/slf4j-api-1.7.36.jar.sha1 deleted file mode 100644 index 77b9917528382..0000000000000 --- a/plugins/repository-s3/licenses/slf4j-api-1.7.36.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/slf4j-api-LICENSE.txt b/plugins/repository-s3/licenses/slf4j-api-LICENSE.txt deleted file mode 100644 index 8fda22f4d72f6..0000000000000 --- a/plugins/repository-s3/licenses/slf4j-api-LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2004-2014 QOS.ch -All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/plugins/repository-s3/licenses/slf4j-api-NOTICE.txt b/plugins/repository-s3/licenses/slf4j-api-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/server/build.gradle b/server/build.gradle index e36498bf1038b..85f16cee83249 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -124,6 +124,9 @@ dependencies { api "com.google.protobuf:protobuf-java:${versions.protobuf}" api "jakarta.annotation:jakarta.annotation-api:${versions.jakarta_annotation}" + api "org.ehcache:ehcache:${versions.ehcache}" + api "org.slf4j:slf4j-api:${versions.slf4j}" + testImplementation(project(":test:framework")) { // tests use the locally compiled version of server exclude group: 'org.opensearch', module: 'server' diff --git a/server/licenses/ehcache-3.10.8.jar.sha1 b/server/licenses/ehcache-3.10.8.jar.sha1 new file mode 100644 index 0000000000000..dee07e9238ebf --- /dev/null +++ b/server/licenses/ehcache-3.10.8.jar.sha1 @@ -0,0 +1 @@ +f0d50ede46609db78413ca7f4250d348a597b101 \ No newline at end of file diff --git a/server/licenses/ehcache-LICENSE.txt b/server/licenses/ehcache-LICENSE.txt new file mode 100644 index 0000000000000..8dada3edaf50d --- /dev/null +++ b/server/licenses/ehcache-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/server/licenses/ehcache-NOTICE.txt b/server/licenses/ehcache-NOTICE.txt new file mode 100644 index 0000000000000..1dbd38242cc98 --- /dev/null +++ b/server/licenses/ehcache-NOTICE.txt @@ -0,0 +1,5 @@ +Ehcache V3 +Copyright 2014-2023 Terracotta, Inc. + +The product includes software from the Apache Commons Lang project, +under the Apache License 2.0 (see: org.ehcache.impl.internal.classes.commonslang) diff --git a/plugins/crypto-kms/licenses/slf4j-api-1.7.36.jar.sha1 b/server/licenses/slf4j-api-1.7.36.jar.sha1 similarity index 100% rename from plugins/crypto-kms/licenses/slf4j-api-1.7.36.jar.sha1 rename to server/licenses/slf4j-api-1.7.36.jar.sha1 diff --git a/plugins/crypto-kms/licenses/slf4j-api-LICENSE.txt b/server/licenses/slf4j-api-LICENSE.txt similarity index 100% rename from plugins/crypto-kms/licenses/slf4j-api-LICENSE.txt rename to server/licenses/slf4j-api-LICENSE.txt diff --git a/plugins/crypto-kms/licenses/slf4j-api-NOTICE.txt b/server/licenses/slf4j-api-NOTICE.txt similarity index 100% rename from plugins/crypto-kms/licenses/slf4j-api-NOTICE.txt rename to server/licenses/slf4j-api-NOTICE.txt diff --git a/server/src/main/java/org/opensearch/common/cache/ICache.java b/server/src/main/java/org/opensearch/common/cache/ICache.java index c6ea5fca1a8fe..0eb778034e417 100644 --- a/server/src/main/java/org/opensearch/common/cache/ICache.java +++ b/server/src/main/java/org/opensearch/common/cache/ICache.java @@ -8,6 +8,8 @@ package org.opensearch.common.cache; +import org.opensearch.common.cache.stats.CacheStats; + /** * Represents a cache interface. * @param Type of key. @@ -31,4 +33,8 @@ public interface ICache { long count(); void refresh(); + + void close(); + + CacheStats stats(); } diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java new file mode 100644 index 0000000000000..a952a2485ed23 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.stats; + +/** + * Interface for any cache specific stats. + * TODO: Add rest of stats like hits/misses. + */ +public interface CacheStats { + // Provides the number of entries in cache. + long count(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/stats/package-info.java b/server/src/main/java/org/opensearch/common/cache/stats/package-info.java new file mode 100644 index 0000000000000..08aef5a9b3e88 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/stats/package-info.java @@ -0,0 +1,9 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +/** Base package for stats related classes */ +package org.opensearch.common.cache.stats; diff --git a/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java b/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java new file mode 100644 index 0000000000000..f6315dc78eadf --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java @@ -0,0 +1,541 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchException; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.collect.Tuple; +import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; + +import java.io.File; +import java.time.Duration; +import java.util.Iterator; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; +import java.util.function.BiFunction; +import java.util.function.Supplier; + +import org.ehcache.Cache; +import org.ehcache.CachePersistenceException; +import org.ehcache.PersistentCacheManager; +import org.ehcache.config.builders.CacheConfigurationBuilder; +import org.ehcache.config.builders.CacheEventListenerConfigurationBuilder; +import org.ehcache.config.builders.CacheManagerBuilder; +import org.ehcache.config.builders.PooledExecutionServiceConfigurationBuilder; +import org.ehcache.config.builders.ResourcePoolsBuilder; +import org.ehcache.config.units.MemoryUnit; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; +import org.ehcache.event.EventType; +import org.ehcache.expiry.ExpiryPolicy; +import org.ehcache.impl.config.store.disk.OffHeapDiskStoreConfiguration; +import org.ehcache.spi.loaderwriter.CacheLoadingException; +import org.ehcache.spi.loaderwriter.CacheWritingException; + +/** + * This variant of disk cache uses Ehcache underneath. + * @param Type of key. + * @param Type of value. + * + * @opensearch.experimental + * + */ +public class EhCacheDiskCache implements StoreAwareCache { + + private static final Logger logger = LogManager.getLogger(EhCacheDiskCache.class); + + // A Cache manager can create many caches. + private final PersistentCacheManager cacheManager; + + // Disk cache + private Cache cache; + private final long maxWeightInBytes; + private final String storagePath; + + private final Class keyType; + + private final Class valueType; + + private final TimeValue expireAfterAccess; + + private final DiskCacheStats stats = new DiskCacheStats(); + + private final EhCacheEventListener ehCacheEventListener; + + private final String threadPoolAlias; + + private final Settings settings; + + private final static String DISK_CACHE_ALIAS = "ehDiskCache"; + + private final static String THREAD_POOL_ALIAS_PREFIX = "ehcachePool"; + + private final static int MINIMUM_MAX_SIZE_IN_BYTES = 1024 * 100; // 100KB + + // Ehcache disk write minimum threads for its pool + public final Setting DISK_WRITE_MINIMUM_THREADS; + + // Ehcache disk write maximum threads for its pool + public final Setting DISK_WRITE_MAXIMUM_THREADS; + + // Not be to confused with number of disk segments, this is different. Defines + // distinct write queues created for disk store where a group of segments share a write queue. This is + // implemented with ehcache using a partitioned thread pool exectutor By default all segments share a single write + // queue ie write concurrency is 1. Check OffHeapDiskStoreConfiguration and DiskWriteThreadPool. + public final Setting DISK_WRITE_CONCURRENCY; + + // Defines how many segments the disk cache is separated into. Higher number achieves greater concurrency but + // will hold that many file pointers. Default is 16. + public final Setting DISK_SEGMENTS; + + private final StoreAwareCacheEventListener eventListener; + + /** + * Used in computeIfAbsent to synchronize loading of a given key. This is needed as ehcache doesn't provide a + * computeIfAbsent method. + */ + Map>> completableFutureMap = new ConcurrentHashMap<>(); + + private EhCacheDiskCache(Builder builder) { + this.keyType = Objects.requireNonNull(builder.keyType, "Key type shouldn't be null"); + this.valueType = Objects.requireNonNull(builder.valueType, "Value type shouldn't be null"); + this.expireAfterAccess = Objects.requireNonNull(builder.getExpireAfterAcess(), "ExpireAfterAccess value shouldn't " + "be null"); + this.maxWeightInBytes = builder.getMaxWeightInBytes(); + if (this.maxWeightInBytes <= MINIMUM_MAX_SIZE_IN_BYTES) { + throw new IllegalArgumentException("Ehcache Disk tier cache size should be greater than " + MINIMUM_MAX_SIZE_IN_BYTES); + } + this.storagePath = Objects.requireNonNull(builder.storagePath, "Storage path shouldn't be null"); + if (builder.threadPoolAlias == null || builder.threadPoolAlias.isBlank()) { + this.threadPoolAlias = THREAD_POOL_ALIAS_PREFIX + "DiskWrite"; + } else { + this.threadPoolAlias = builder.threadPoolAlias; + } + this.settings = Objects.requireNonNull(builder.settings, "Settings objects shouldn't be null"); + Objects.requireNonNull(builder.settingPrefix, "Setting prefix shouldn't be null"); + this.DISK_WRITE_MINIMUM_THREADS = Setting.intSetting(builder.settingPrefix + ".tier.disk.ehcache.min_threads", 2, 1, 5); + this.DISK_WRITE_MAXIMUM_THREADS = Setting.intSetting(builder.settingPrefix + ".tier.disk.ehcache.max_threads", 2, 1, 20); + // Default value is 1 within EhCache. + this.DISK_WRITE_CONCURRENCY = Setting.intSetting(builder.settingPrefix + ".tier.disk.ehcache.concurrency", 2, 1, 3); + // Default value is 16 within Ehcache. + this.DISK_SEGMENTS = Setting.intSetting(builder.settingPrefix + "tier.disk.ehcache.segments", 16, 1, 32); + this.cacheManager = buildCacheManager(); + Objects.requireNonNull(builder.getEventListener(), "Listener can't be null"); + this.eventListener = builder.getEventListener(); + this.ehCacheEventListener = new EhCacheEventListener(builder.getEventListener()); + this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); + } + + private PersistentCacheManager buildCacheManager() { + // In case we use multiple ehCaches, we can define this cache manager at a global level. + return CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(new File(storagePath))) + .using( + PooledExecutionServiceConfigurationBuilder.newPooledExecutionServiceConfigurationBuilder() + .defaultPool(THREAD_POOL_ALIAS_PREFIX + "Default", 1, 3) // Default pool used for other tasks like + // event listeners + .pool(this.threadPoolAlias, DISK_WRITE_MINIMUM_THREADS.get(settings), DISK_WRITE_MAXIMUM_THREADS.get(settings)) + .build() + ) + .build(true); + } + + private Cache buildCache(Duration expireAfterAccess, Builder builder) { + return this.cacheManager.createCache( + DISK_CACHE_ALIAS, + CacheConfigurationBuilder.newCacheConfigurationBuilder( + this.keyType, + this.valueType, + ResourcePoolsBuilder.newResourcePoolsBuilder().disk(maxWeightInBytes, MemoryUnit.B) + ).withExpiry(new ExpiryPolicy<>() { + @Override + public Duration getExpiryForCreation(K key, V value) { + return INFINITE; + } + + @Override + public Duration getExpiryForAccess(K key, Supplier value) { + return expireAfterAccess; + } + + @Override + public Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue) { + return INFINITE; + } + }) + .withService(getListenerConfiguration(builder)) + .withService( + new OffHeapDiskStoreConfiguration( + this.threadPoolAlias, + DISK_WRITE_CONCURRENCY.get(settings), + DISK_SEGMENTS.get(settings) + ) + ) + ); + } + + private CacheEventListenerConfigurationBuilder getListenerConfiguration(Builder builder) { + CacheEventListenerConfigurationBuilder configurationBuilder = CacheEventListenerConfigurationBuilder.newEventListenerConfiguration( + this.ehCacheEventListener, + EventType.EVICTED, + EventType.EXPIRED, + EventType.REMOVED, + EventType.UPDATED, + EventType.CREATED + ).unordered(); + if (builder.isEventListenerModeSync) { + return configurationBuilder.synchronous(); + } else { + return configurationBuilder.asynchronous(); + } + } + + // Package private for testing + Map>> getCompletableFutureMap() { + return completableFutureMap; + } + + @Override + public V get(K key) { + if (key == null) { + throw new IllegalArgumentException("Key passed to ehcache disk cache was null."); + } + // Optimize it by adding key store. + V value; + try { + value = cache.get(key); + } catch (CacheLoadingException ex) { + throw new OpenSearchException("Exception occurred while trying to fetch item from ehcache disk cache"); + } + if (value != null) { + eventListener.onHit(key, value, CacheStoreType.DISK); + } else { + eventListener.onMiss(key, CacheStoreType.DISK); + } + return value; + } + + @Override + public void put(K key, V value) { + try { + cache.put(key, value); + } catch (CacheWritingException ex) { + throw new OpenSearchException("Exception occurred while put item to ehcache disk cache"); + } + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + // Ehache doesn't provide any computeIfAbsent function. Exposes putIfAbsent but that works differently and is + // not performant in case there are multiple concurrent request for same key. Below is our own custom + // implementation of computeIfAbsent on top of ehcache. Inspired by OpenSearch Cache implementation. + V value = cache.get(key); + if (value == null) { + value = compute(key, loader); + } + if (!loader.isLoaded()) { + eventListener.onHit(key, value, CacheStoreType.DISK); + } else { + eventListener.onMiss(key, CacheStoreType.DISK); + } + return value; + } + + private V compute(K key, LoadAwareCacheLoader loader) throws Exception { + // A future that returns a pair of key/value. + CompletableFuture> completableFuture = new CompletableFuture<>(); + // Only one of the threads will succeed putting a future into map for the same key. + // Rest will fetch existing future. + CompletableFuture> future = completableFutureMap.putIfAbsent(key, completableFuture); + // Handler to handle results post processing. Takes a tuple or exception as an input and returns + // the value. Also before returning value, puts the value in cache. + BiFunction, Throwable, V> handler = (pair, ex) -> { + V value = null; + if (pair != null) { + cache.put(pair.v1(), pair.v2()); + value = pair.v2(); // Returning a value itself assuming that a next get should return the same. Should + // be safe to assume if we got no exception and reached here. + } + completableFutureMap.remove(key); // Remove key from map as not needed anymore. + return value; + }; + CompletableFuture completableValue; + if (future == null) { + future = completableFuture; + completableValue = future.handle(handler); + V value; + try { + value = loader.load(key); + } catch (Exception ex) { + future.completeExceptionally(ex); + throw new ExecutionException(ex); + } + if (value == null) { + NullPointerException npe = new NullPointerException("loader returned a null value"); + future.completeExceptionally(npe); + throw new ExecutionException(npe); + } else { + future.complete(new Tuple<>(key, value)); + } + + } else { + completableValue = future.handle(handler); + } + V value; + try { + value = completableValue.get(); + if (future.isCompletedExceptionally()) { + future.get(); // call get to force the exception to be thrown for other concurrent callers + throw new IllegalStateException("Future completed exceptionally but no error thrown"); + } + } catch (InterruptedException ex) { + throw new IllegalStateException(ex); + } + return value; + } + + @Override + public void invalidate(K key) { + // There seems to be an thread leak issue while calling this and then closing cache. + try { + cache.remove(key); + } catch (CacheWritingException ex) { + // Handle + throw new RuntimeException(ex); + } + } + + @Override + public void invalidateAll() { + // TODO + } + + @Override + public Iterable keys() { + return () -> new EhCacheKeyIterator<>(cache.iterator()); + } + + @Override + public long count() { + return stats.count(); + } + + @Override + public void refresh() { + // TODO: ehcache doesn't provide a way to refresh a cache. + } + + @Override + public CacheStoreType getTierType() { + return CacheStoreType.DISK; + } + + @Override + public void close() { + cacheManager.removeCache(DISK_CACHE_ALIAS); + cacheManager.close(); + try { + cacheManager.destroyCache(DISK_CACHE_ALIAS); + } catch (CachePersistenceException e) { + throw new OpenSearchException("Exception occurred while destroying ehcache and associated data", e); + } + } + + @Override + public CacheStats stats() { + return stats; + } + + /** + * Stats related to disk cache. + */ + class DiskCacheStats implements CacheStats { + private CounterMetric count = new CounterMetric(); + + @Override + public long count() { + return count.count(); + } + } + + /** + * Wrapper over Ehcache original listener to listen to desired events and notify desired subscribers. + * @param Type of key + * @param Type of value + */ + class EhCacheEventListener implements CacheEventListener { + + private final StoreAwareCacheEventListener eventListener; + + EhCacheEventListener(StoreAwareCacheEventListener eventListener) { + this.eventListener = eventListener; + } + + @Override + public void onEvent(CacheEvent event) { + switch (event.getType()) { + case CREATED: + stats.count.inc(); + this.eventListener.onCached(event.getKey(), event.getNewValue(), CacheStoreType.DISK); + assert event.getOldValue() == null; + break; + case EVICTED: + this.eventListener.onRemoval( + new StoreAwareCacheRemovalNotification<>( + event.getKey(), + event.getOldValue(), + RemovalReason.EVICTED, + CacheStoreType.DISK + ) + ); + stats.count.dec(); + assert event.getNewValue() == null; + break; + case REMOVED: + stats.count.dec(); + this.eventListener.onRemoval( + new StoreAwareCacheRemovalNotification<>( + event.getKey(), + event.getOldValue(), + RemovalReason.EXPLICIT, + CacheStoreType.DISK + ) + ); + assert event.getNewValue() == null; + break; + case EXPIRED: + this.eventListener.onRemoval( + new StoreAwareCacheRemovalNotification<>( + event.getKey(), + event.getOldValue(), + RemovalReason.INVALIDATED, + CacheStoreType.DISK + ) + ); + stats.count.dec(); + assert event.getNewValue() == null; + break; + case UPDATED: + break; + default: + break; + } + } + } + + /** + * This iterator wraps ehCache iterator and only iterates over its keys. + * @param Type of key + */ + class EhCacheKeyIterator implements Iterator { + + Iterator> iterator; + + EhCacheKeyIterator(Iterator> iterator) { + this.iterator = iterator; + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public K next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + return iterator.next().getKey(); + } + } + + /** + * Builder object to build Ehcache disk tier. + * @param Type of key + * @param Type of value + */ + public static class Builder extends StoreAwareCacheBuilder { + private Class keyType; + + private Class valueType; + + private String storagePath; + + private String threadPoolAlias; + + private Settings settings; + + private String diskCacheAlias; + + private String settingPrefix; + + // Provides capability to make ehCache event listener to run in sync mode. Used for testing too. + private boolean isEventListenerModeSync; + + public Builder() {} + + public EhCacheDiskCache.Builder setKeyType(Class keyType) { + this.keyType = keyType; + return this; + } + + public EhCacheDiskCache.Builder setValueType(Class valueType) { + this.valueType = valueType; + return this; + } + + public EhCacheDiskCache.Builder setStoragePath(String storagePath) { + this.storagePath = storagePath; + return this; + } + + public EhCacheDiskCache.Builder setThreadPoolAlias(String threadPoolAlias) { + this.threadPoolAlias = threadPoolAlias; + return this; + } + + public EhCacheDiskCache.Builder setSettings(Settings settings) { + this.settings = settings; + return this; + } + + public EhCacheDiskCache.Builder setDiskCacheAlias(String diskCacheAlias) { + this.diskCacheAlias = diskCacheAlias; + return this; + } + + public EhCacheDiskCache.Builder setSettingPrefix(String settingPrefix) { + // TODO: Do some basic validation. So that it doesn't end with "." etc. + this.settingPrefix = settingPrefix; + return this; + } + + public EhCacheDiskCache.Builder setIsEventListenerModeSync(boolean isEventListenerModeSync) { + this.isEventListenerModeSync = isEventListenerModeSync; + return this; + } + + public EhCacheDiskCache build() { + return new EhCacheDiskCache<>(this); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index c497c8dbb7ea9..5b9ff5921a01c 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -13,6 +13,7 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; @@ -30,6 +31,8 @@ public class OpenSearchOnHeapCache implements StoreAwareCache, Remov private final StoreAwareCacheEventListener eventListener; + private final CacheStats stats = new OpenSearchOnHeapCacheStats(); + public OpenSearchOnHeapCache(Builder builder) { CacheBuilder cacheBuilder = CacheBuilder.builder() .setMaximumWeight(builder.getMaxWeightInBytes()) @@ -88,7 +91,7 @@ public Iterable keys() { @Override public long count() { - return cache.count(); + return stats.count(); } @Override @@ -96,6 +99,14 @@ public void refresh() { cache.refresh(); } + @Override + public void close() {} + + @Override + public CacheStats stats() { + return stats; + } + @Override public CacheStoreType getTierType() { return CacheStoreType.ON_HEAP; @@ -113,6 +124,16 @@ public void onRemoval(RemovalNotification notification) { ); } + /** + * Stats for opensearch on heap cache. + */ + class OpenSearchOnHeapCacheStats implements CacheStats { + @Override + public long count() { + return cache.count(); + } + } + /** * Builder object * @param Type of key diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java index 8b432c9484aed..2216b90b69d69 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java @@ -11,6 +11,7 @@ import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; import org.opensearch.common.cache.store.StoreAwareCacheValue; @@ -45,6 +46,7 @@ public class TieredSpilloverCache implements ICache, StoreAwareCache private final Optional> onDiskCache; private final StoreAwareCache onHeapCache; private final StoreAwareCacheEventListener listener; + private final CacheStats stats = new TieredSpillOverCacheStats(); ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock()); ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock()); @@ -162,11 +164,7 @@ public Iterable keys() { @Override public long count() { - long totalCount = 0; - for (StoreAwareCache storeAwareCache : cacheList) { - totalCount += storeAwareCache.count(); - } - return totalCount; + return stats.count(); } @Override @@ -178,6 +176,18 @@ public void refresh() { } } + @Override + public void close() { + for (StoreAwareCache storeAwareCache : cacheList) { + storeAwareCache.close(); + } + } + + @Override + public CacheStats stats() { + return stats; + } + @Override public void onMiss(K key, CacheStoreType cacheStoreType) { // Misses for tiered cache are tracked here itself. @@ -234,6 +244,21 @@ private Function> getValueFromTieredCache(boolean tri }; } + /** + * Stats for tiered spillover cache. + */ + class TieredSpillOverCacheStats implements CacheStats { + + @Override + public long count() { + long totalCount = 0; + for (StoreAwareCache storeAwareCache : cacheList) { + totalCount += storeAwareCache.count(); + } + return totalCount; + } + } + /** * Builder object for tiered spillover cache. * @param Type of key diff --git a/server/src/main/resources/org/opensearch/bootstrap/security.policy b/server/src/main/resources/org/opensearch/bootstrap/security.policy index 77cd0ab05278e..a07ea064928e3 100644 --- a/server/src/main/resources/org/opensearch/bootstrap/security.policy +++ b/server/src/main/resources/org/opensearch/bootstrap/security.policy @@ -188,4 +188,8 @@ grant { permission java.io.FilePermission "/sys/fs/cgroup/memory", "read"; permission java.io.FilePermission "/sys/fs/cgroup/memory/-", "read"; + // For ehcache + permission java.lang.RuntimePermission "createClassLoader"; + permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + }; diff --git a/server/src/test/java/org/opensearch/common/cache/store/EhCacheDiskCacheTests.java b/server/src/test/java/org/opensearch/common/cache/store/EhCacheDiskCacheTests.java new file mode 100644 index 0000000000000..12bffb93fc2f8 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/store/EhCacheDiskCacheTests.java @@ -0,0 +1,469 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.test.OpenSearchSingleNodeTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Phaser; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.CoreMatchers.instanceOf; + +public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { + + private static final int CACHE_SIZE_IN_BYTES = 1024 * 101; + private static final String SETTING_PREFIX = "indices.request.cache"; + + public void testBasicGetAndPut() throws IOException { + Settings settings = Settings.builder().build(); + MockEventListener mockEventListener = new MockEventListener<>(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) + .setThreadPoolAlias("ehcacheTest") + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setSettingPrefix(SETTING_PREFIX) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setEventListener(mockEventListener) + .build(); + int randomKeys = randomIntBetween(10, 100); + Map keyValueMap = new HashMap<>(); + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + ehcacheTest.put(entry.getKey(), entry.getValue()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + String value = ehcacheTest.get(entry.getKey()); + assertEquals(entry.getValue(), value); + } + assertEquals(randomKeys, mockEventListener.onCachedCount.get()); + assertEquals(randomKeys, mockEventListener.onHitCount.get()); + + // Validate misses + int expectedNumberOfMisses = randomIntBetween(10, 200); + for (int i = 0; i < expectedNumberOfMisses; i++) { + ehcacheTest.get(UUID.randomUUID().toString()); + } + + assertEquals(expectedNumberOfMisses, mockEventListener.onMissCount.get()); + ehcacheTest.close(); + } + } + + public void testConcurrentPut() throws Exception { + Settings settings = Settings.builder().build(); + MockEventListener mockEventListener = new MockEventListener<>(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) + .setThreadPoolAlias("ehcacheTest") + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setSettingPrefix(SETTING_PREFIX) + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setEventListener(mockEventListener) + .build(); + int randomKeys = randomIntBetween(20, 100); + Thread[] threads = new Thread[randomKeys]; + Phaser phaser = new Phaser(randomKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(randomKeys); + Map keyValueMap = new HashMap<>(); + int j = 0; + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + threads[j] = new Thread(() -> { + phaser.arriveAndAwaitAdvance(); + ehcacheTest.put(entry.getKey(), entry.getValue()); + countDownLatch.countDown(); + }); + threads[j].start(); + j++; + } + phaser.arriveAndAwaitAdvance(); // Will trigger parallel puts above. + countDownLatch.await(); // Wait for all threads to finish + for (Map.Entry entry : keyValueMap.entrySet()) { + String value = ehcacheTest.get(entry.getKey()); + assertEquals(entry.getValue(), value); + } + assertEquals(randomKeys, mockEventListener.onCachedCount.get()); + ehcacheTest.close(); + } + } + + public void testEhcacheParallelGets() throws Exception { + Settings settings = Settings.builder().build(); + MockEventListener mockEventListener = new MockEventListener<>(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) + .setThreadPoolAlias("ehcacheTest") + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setSettingPrefix(SETTING_PREFIX) + .setIsEventListenerModeSync(true) // For accurate count + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setEventListener(mockEventListener) + .build(); + int randomKeys = randomIntBetween(20, 100); + Thread[] threads = new Thread[randomKeys]; + Phaser phaser = new Phaser(randomKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(randomKeys); + Map keyValueMap = new HashMap<>(); + int j = 0; + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + ehcacheTest.put(entry.getKey(), entry.getValue()); + } + assertEquals(keyValueMap.size(), ehcacheTest.count()); + for (Map.Entry entry : keyValueMap.entrySet()) { + threads[j] = new Thread(() -> { + phaser.arriveAndAwaitAdvance(); + assertEquals(entry.getValue(), ehcacheTest.get(entry.getKey())); + countDownLatch.countDown(); + }); + threads[j].start(); + j++; + } + phaser.arriveAndAwaitAdvance(); // Will trigger parallel puts above. + countDownLatch.await(); // Wait for all threads to finish + assertEquals(randomKeys, mockEventListener.onHitCount.get()); + ehcacheTest.close(); + } + } + + public void testEhcacheKeyIterator() throws Exception { + Settings settings = Settings.builder().build(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) + .setThreadPoolAlias("ehcacheTest") + .setSettingPrefix(SETTING_PREFIX) + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setEventListener(new MockEventListener<>()) + .build(); + + int randomKeys = randomIntBetween(2, 100); + Map keyValueMap = new HashMap<>(); + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + ehcacheTest.put(entry.getKey(), entry.getValue()); + } + Iterator keys = ehcacheTest.keys().iterator(); + int keysCount = 0; + while (keys.hasNext()) { + String key = keys.next(); + keysCount++; + assertNotNull(ehcacheTest.get(key)); + } + assertEquals(CacheStoreType.DISK, ehcacheTest.getTierType()); + assertEquals(keysCount, randomKeys); + ehcacheTest.close(); + } + } + + public void testEvictions() throws Exception { + Settings settings = Settings.builder().build(); + MockEventListener mockEventListener = new MockEventListener<>(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) + .setThreadPoolAlias("ehcacheTest") + .setSettingPrefix(SETTING_PREFIX) + .setIsEventListenerModeSync(true) + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setEventListener(mockEventListener) + .build(); + + // Generate a string with 100 characters + String value = generateRandomString(100); + + // Trying to generate more than 100kb to cause evictions. + for (int i = 0; i < 1000; i++) { + String key = "Key" + i; + ehcacheTest.put(key, value); + } + assertTrue(mockEventListener.onRemovalCount.get() > 0); + ehcacheTest.close(); + } + } + + public void testComputeIfAbsentConcurrently() throws Exception { + Settings settings = Settings.builder().build(); + MockEventListener mockEventListener = new MockEventListener<>(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) + .setThreadPoolAlias("ehcacheTest") + .setSettingPrefix(SETTING_PREFIX) + .setIsEventListenerModeSync(true) + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setEventListener(mockEventListener) + .build(); + + int numberOfRequest = 2;// randomIntBetween(200, 400); + String key = UUID.randomUUID().toString(); + String value = "dummy"; + Thread[] threads = new Thread[numberOfRequest]; + Phaser phaser = new Phaser(numberOfRequest + 1); + CountDownLatch countDownLatch = new CountDownLatch(numberOfRequest); + + List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + + // Try to hit different request with the same key concurrently. Verify value is only loaded once. + for (int i = 0; i < numberOfRequest; i++) { + threads[i] = new Thread(() -> { + LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + boolean isLoaded; + + @Override + public boolean isLoaded() { + return isLoaded; + } + + @Override + public String load(String key) { + isLoaded = true; + return value; + } + }; + loadAwareCacheLoaderList.add(loadAwareCacheLoader); + phaser.arriveAndAwaitAdvance(); + try { + assertEquals(value, ehcacheTest.computeIfAbsent(key, loadAwareCacheLoader)); + } catch (Exception e) { + throw new RuntimeException(e); + } + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); + int numberOfTimesValueLoaded = 0; + for (int i = 0; i < numberOfRequest; i++) { + if (loadAwareCacheLoaderList.get(i).isLoaded()) { + numberOfTimesValueLoaded++; + } + } + assertEquals(1, numberOfTimesValueLoaded); + assertEquals(0, ((EhCacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); + assertEquals(1, mockEventListener.onMissCount.get()); + assertEquals(1, mockEventListener.onCachedCount.get()); + assertEquals(numberOfRequest - 1, mockEventListener.onHitCount.get()); + ehcacheTest.close(); + } + } + + public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception { + Settings settings = Settings.builder().build(); + MockEventListener mockEventListener = new MockEventListener<>(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) + .setThreadPoolAlias("ehcacheTest") + .setSettingPrefix(SETTING_PREFIX) + .setIsEventListenerModeSync(true) + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setEventListener(mockEventListener) + .build(); + + int numberOfRequest = randomIntBetween(200, 400); + String key = UUID.randomUUID().toString(); + Thread[] threads = new Thread[numberOfRequest]; + Phaser phaser = new Phaser(numberOfRequest + 1); + CountDownLatch countDownLatch = new CountDownLatch(numberOfRequest); + + List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + + // Try to hit different request with the same key concurrently. Loader throws exception. + for (int i = 0; i < numberOfRequest; i++) { + threads[i] = new Thread(() -> { + LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + boolean isLoaded; + + @Override + public boolean isLoaded() { + return isLoaded; + } + + @Override + public String load(String key) throws Exception { + isLoaded = true; + throw new RuntimeException("Exception"); + } + }; + loadAwareCacheLoaderList.add(loadAwareCacheLoader); + phaser.arriveAndAwaitAdvance(); + assertThrows(ExecutionException.class, () -> ehcacheTest.computeIfAbsent(key, loadAwareCacheLoader)); + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); + + assertEquals(0, ((EhCacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); + ehcacheTest.close(); + } + } + + public void testComputeIfAbsentWithNullValueLoading() throws Exception { + Settings settings = Settings.builder().build(); + MockEventListener mockEventListener = new MockEventListener<>(); + try (NodeEnvironment env = newNodeEnvironment(settings)) { + StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) + .setThreadPoolAlias("ehcacheTest") + .setSettingPrefix(SETTING_PREFIX) + .setIsEventListenerModeSync(true) + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setExpireAfterAccess(TimeValue.MAX_VALUE) + .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) + .setEventListener(mockEventListener) + .build(); + + int numberOfRequest = randomIntBetween(200, 400); + String key = UUID.randomUUID().toString(); + Thread[] threads = new Thread[numberOfRequest]; + Phaser phaser = new Phaser(numberOfRequest + 1); + CountDownLatch countDownLatch = new CountDownLatch(numberOfRequest); + + List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + + // Try to hit different request with the same key concurrently. Loader throws exception. + for (int i = 0; i < numberOfRequest; i++) { + threads[i] = new Thread(() -> { + LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + boolean isLoaded; + + @Override + public boolean isLoaded() { + return isLoaded; + } + + @Override + public String load(String key) throws Exception { + isLoaded = true; + return null; + } + }; + loadAwareCacheLoaderList.add(loadAwareCacheLoader); + phaser.arriveAndAwaitAdvance(); + try { + ehcacheTest.computeIfAbsent(key, loadAwareCacheLoader); + } catch (Exception ex) { + assertThat(ex.getCause(), instanceOf(NullPointerException.class)); + } + assertThrows(ExecutionException.class, () -> ehcacheTest.computeIfAbsent(key, loadAwareCacheLoader)); + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); + + assertEquals(0, ((EhCacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); + ehcacheTest.close(); + } + } + + private static String generateRandomString(int length) { + String characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + StringBuilder randomString = new StringBuilder(length); + + for (int i = 0; i < length; i++) { + int index = (int) (Math.random() * characters.length()); + randomString.append(characters.charAt(index)); + } + + return randomString.toString(); + } + + // TODO: Remove this from here in final PR. + enum EventType { + ON_HIT, + ON_MISS, + ON_CACHED, + ON_REMOVAL; + } + + class MockEventListener implements StoreAwareCacheEventListener { + + AtomicInteger onMissCount = new AtomicInteger(); + AtomicInteger onHitCount = new AtomicInteger(); + AtomicInteger onCachedCount = new AtomicInteger(); + AtomicInteger onRemovalCount = new AtomicInteger(); + + MockEventListener() {} + + @Override + public void onMiss(K key, CacheStoreType cacheStoreType) { + assert cacheStoreType.equals(CacheStoreType.DISK); + onMissCount.incrementAndGet(); + } + + @Override + public void onRemoval(StoreAwareCacheRemovalNotification notification) { + assert notification.getCacheStoreType().equals(CacheStoreType.DISK); + onRemovalCount.incrementAndGet(); + } + + @Override + public void onHit(K key, V value, CacheStoreType cacheStoreType) { + assert cacheStoreType.equals(CacheStoreType.DISK); + onHitCount.incrementAndGet(); + } + + @Override + public void onCached(K key, V value, CacheStoreType cacheStoreType) { + assert cacheStoreType.equals(CacheStoreType.DISK); + onCachedCount.incrementAndGet(); + } + } +} diff --git a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java index eb75244c6f8b1..cce0449dc88b8 100644 --- a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java +++ b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java @@ -10,6 +10,7 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.store.OpenSearchOnHeapCache; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; @@ -758,6 +759,16 @@ public long count() { @Override public void refresh() {} + @Override + public void close() { + + } + + @Override + public CacheStats stats() { + return null; + } + @Override public CacheStoreType getTierType() { return CacheStoreType.DISK; From 00f4545cc26b458ff4a94b83c678f3afe0df103c Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Tue, 16 Jan 2024 00:28:32 -0800 Subject: [PATCH 02/22] Adding suppressForbidden for File.io used by ehcache Signed-off-by: Sagar Upadhyaya --- .../org/opensearch/common/cache/store/EhCacheDiskCache.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java b/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java index f6315dc78eadf..80def5f260cc5 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; +import org.opensearch.common.SuppressForbidden; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; @@ -145,6 +146,7 @@ private EhCacheDiskCache(Builder builder) { this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); } + @SuppressForbidden(reason = "Ehcache uses File.io") private PersistentCacheManager buildCacheManager() { // In case we use multiple ehCaches, we can define this cache manager at a global level. return CacheManagerBuilder.newCacheManagerBuilder() From 30ad4e9b10ce473286ec5a6ab591fa97dfdbe876 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Mon, 22 Jan 2024 09:42:12 -0800 Subject: [PATCH 03/22] Fixing gradle build failure Signed-off-by: Sagar Upadhyaya --- modules/transport-netty4/build.gradle | 7 ------- plugins/transport-nio/build.gradle | 7 ------- plugins/transport-reactor-netty4/build.gradle | 7 ------- .../java/org/opensearch/common/cache/stats/CacheStats.java | 2 +- 4 files changed, 1 insertion(+), 22 deletions(-) diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 83c4db80b7798..dd19650b8da9e 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -177,13 +177,6 @@ thirdPartyAudit { 'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.Unmarshaller', - // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional - 'org.slf4j.helpers.FormattingTuple', - 'org.slf4j.helpers.MessageFormatter', - 'org.slf4j.Logger', - 'org.slf4j.LoggerFactory', - 'org.slf4j.spi.LocationAwareLogger', - 'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.MessageNano', 'com.ning.compress.BufferRecycler', diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index 8c0ee8ba718ac..4e503413c265b 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -103,13 +103,6 @@ thirdPartyAudit { 'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.Unmarshaller', - // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional - 'org.slf4j.helpers.FormattingTuple', - 'org.slf4j.helpers.MessageFormatter', - 'org.slf4j.Logger', - 'org.slf4j.LoggerFactory', - 'org.slf4j.spi.LocationAwareLogger', - 'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.MessageNano', 'com.ning.compress.BufferRecycler', diff --git a/plugins/transport-reactor-netty4/build.gradle b/plugins/transport-reactor-netty4/build.gradle index 7d7eb330b4a55..74ceb2ab5687a 100644 --- a/plugins/transport-reactor-netty4/build.gradle +++ b/plugins/transport-reactor-netty4/build.gradle @@ -136,13 +136,6 @@ thirdPartyAudit { 'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.Unmarshaller', - // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional - 'org.slf4j.helpers.FormattingTuple', - 'org.slf4j.helpers.MessageFormatter', - 'org.slf4j.Logger', - 'org.slf4j.LoggerFactory', - 'org.slf4j.spi.LocationAwareLogger', - 'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.MessageNano', 'com.ning.compress.BufferRecycler', diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java index a952a2485ed23..cf84f296916fb 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java @@ -13,6 +13,6 @@ * TODO: Add rest of stats like hits/misses. */ public interface CacheStats { - // Provides the number of entries in cache. + // Provides the current number of entries in cache. long count(); } From 6bcf2f4f8b36a96a7393989be63c3c9051017ae3 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Mon, 22 Jan 2024 09:51:56 -0800 Subject: [PATCH 04/22] Adding changelog Signed-off-by: Sagar Upadhyaya --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 10338f6646053..3cce25d971bc7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -125,6 +125,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) - Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039)) - Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) +- [Tiered caching] Integrating ehcache as a disk cache option ([#11874](https://github.com/opensearch-project/OpenSearch/pull/11874)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) From a5e5afdae51328a5833fc5f281ee354c88437d6c Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Thu, 25 Jan 2024 13:12:54 -0800 Subject: [PATCH 05/22] Exposing ehcache disk cache variant as a plugin Signed-off-by: Sagar Upadhyaya --- modules/transport-netty4/build.gradle | 7 + plugins/cache-ehcache/.gitignore | 29 ++ plugins/cache-ehcache/build.gradle | 81 +++++ .../licenses/ehcache-3.10.8.jar.sha1 | 0 .../licenses/ehcache-LICENSE.txt | 0 .../licenses/ehcache-NOTICE.txt | 0 .../opensearch/cache/EhcacheCachePlugin.java | 39 +++ .../opensearch/cache/EhcacheDiskCache.java | 324 +++++++++++------- .../org/opensearch/cache/EhcacheSettings.java | 52 +++ .../org/opensearch/cache/package-info.java | 10 + .../plugin-metadata/plugin-security.policy | 15 + .../cache}/EhCacheDiskCacheTests.java | 91 +++-- .../opensearch/cache/EhcachePluginTests.java | 26 ++ .../licenses/slf4j-api-1.7.36.jar.sha1 | 0 .../licenses/slf4j-api-LICENSE.txt | 0 .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 + .../licenses/slf4j-api-LICENSE.txt | 21 ++ .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 + .../licenses/slf4j-api-LICENSE.txt | 21 ++ .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 + .../licenses/slf4j-api-LICENSE.txt | 21 ++ .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 + .../licenses/slf4j-api-LICENSE.txt | 21 ++ .../licenses/slf4j-api-NOTICE.txt | 0 .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 + .../licenses/slf4j-api-LICENSE.txt | 21 ++ .../licenses/slf4j-api-NOTICE.txt | 0 plugins/transport-nio/build.gradle | 7 + plugins/transport-reactor-netty4/build.gradle | 7 + server/build.gradle | 3 - .../common/cache/provider/CacheProvider.java | 91 +++++ .../common/cache/provider/package-info.java | 10 + .../common/cache/store/StoreAwareCache.java | 12 + .../builders/StoreAwareCacheBuilder.java | 14 +- .../store/config/StoreAwareCacheConfig.java | 195 +++++++++++ .../cache/store/config/package-info.java | 10 + .../cache/store/enums/CacheStoreType.java | 3 + .../main/java/org/opensearch/node/Node.java | 3 + .../org/opensearch/plugins/CachePlugin.java | 28 ++ 43 files changed, 986 insertions(+), 181 deletions(-) create mode 100644 plugins/cache-ehcache/.gitignore create mode 100644 plugins/cache-ehcache/build.gradle rename {server => plugins/cache-ehcache}/licenses/ehcache-3.10.8.jar.sha1 (100%) rename {server => plugins/cache-ehcache}/licenses/ehcache-LICENSE.txt (100%) rename {server => plugins/cache-ehcache}/licenses/ehcache-NOTICE.txt (100%) create mode 100644 plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java rename server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java => plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCache.java (72%) create mode 100644 plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java create mode 100644 plugins/cache-ehcache/src/main/java/org/opensearch/cache/package-info.java create mode 100644 plugins/cache-ehcache/src/main/plugin-metadata/plugin-security.policy rename {server/src/test/java/org/opensearch/common/cache/store => plugins/cache-ehcache/src/test/java/org/opensearch/cache}/EhCacheDiskCacheTests.java (93%) create mode 100644 plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java rename {server => plugins/discovery-ec2}/licenses/slf4j-api-1.7.36.jar.sha1 (100%) rename {server => plugins/discovery-ec2}/licenses/slf4j-api-LICENSE.txt (100%) rename {server => plugins/discovery-ec2}/licenses/slf4j-api-NOTICE.txt (100%) create mode 100644 plugins/identity-shiro/licenses/slf4j-api-1.7.36.jar.sha1 create mode 100644 plugins/identity-shiro/licenses/slf4j-api-LICENSE.txt create mode 100644 plugins/identity-shiro/licenses/slf4j-api-NOTICE.txt create mode 100644 plugins/ingest-attachment/licenses/slf4j-api-1.7.36.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/slf4j-api-LICENSE.txt create mode 100644 plugins/ingest-attachment/licenses/slf4j-api-NOTICE.txt create mode 100644 plugins/repository-azure/licenses/slf4j-api-1.7.36.jar.sha1 create mode 100644 plugins/repository-azure/licenses/slf4j-api-LICENSE.txt create mode 100644 plugins/repository-azure/licenses/slf4j-api-NOTICE.txt create mode 100644 plugins/repository-hdfs/licenses/slf4j-api-1.7.36.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt create mode 100644 plugins/repository-hdfs/licenses/slf4j-api-NOTICE.txt create mode 100644 plugins/repository-s3/licenses/slf4j-api-1.7.36.jar.sha1 create mode 100644 plugins/repository-s3/licenses/slf4j-api-LICENSE.txt create mode 100644 plugins/repository-s3/licenses/slf4j-api-NOTICE.txt create mode 100644 server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java create mode 100644 server/src/main/java/org/opensearch/common/cache/provider/package-info.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/config/package-info.java create mode 100644 server/src/main/java/org/opensearch/plugins/CachePlugin.java diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index dd19650b8da9e..83c4db80b7798 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -177,6 +177,13 @@ thirdPartyAudit { 'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.Unmarshaller', + // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional + 'org.slf4j.helpers.FormattingTuple', + 'org.slf4j.helpers.MessageFormatter', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + 'org.slf4j.spi.LocationAwareLogger', + 'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.MessageNano', 'com.ning.compress.BufferRecycler', diff --git a/plugins/cache-ehcache/.gitignore b/plugins/cache-ehcache/.gitignore new file mode 100644 index 0000000000000..f68d1099657e3 --- /dev/null +++ b/plugins/cache-ehcache/.gitignore @@ -0,0 +1,29 @@ +### IntelliJ IDEA ### +out/ +!**/src/main/**/out/ +!**/src/test/**/out/ + +### Eclipse ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache +bin/ +!**/src/main/**/bin/ +!**/src/test/**/bin/ + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ + +### VS Code ### +.vscode/ + +### Mac OS ### +.DS_Store \ No newline at end of file diff --git a/plugins/cache-ehcache/build.gradle b/plugins/cache-ehcache/build.gradle new file mode 100644 index 0000000000000..87c38f616cbb3 --- /dev/null +++ b/plugins/cache-ehcache/build.gradle @@ -0,0 +1,81 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +import org.apache.tools.ant.taskdefs.condition.Os +import org.opensearch.gradle.Architecture +import org.opensearch.gradle.OS +import org.opensearch.gradle.info.BuildParams + +opensearchplugin { + description 'Ehcache based cache implementation.' + classname 'org.opensearch.cache.EhcacheCachePlugin' + hasClientJar = true +} + +dependencies { + api "org.ehcache:ehcache:${versions.ehcache}" +} + +thirdPartyAudit { + ignoreViolations( + 'org.ehcache.impl.internal.concurrent.ConcurrentHashMap', + 'org.ehcache.impl.internal.concurrent.ConcurrentHashMap$CounterCell', + 'org.ehcache.impl.internal.concurrent.ConcurrentHashMap$TreeBin', + 'org.ehcache.impl.internal.concurrent.ThreadLocalRandomUtil', + 'org.ehcache.sizeof.impl.UnsafeSizeOf' + ) + + ignoreMissingClasses( + 'javax.cache.Cache', + 'javax.cache.Cache$Entry', + 'javax.cache.CacheException', + 'javax.cache.CacheManager', + 'javax.cache.configuration.CacheEntryListenerConfiguration', + 'javax.cache.configuration.CompleteConfiguration', + 'javax.cache.configuration.Configuration', + 'javax.cache.configuration.Factory', + 'javax.cache.configuration.OptionalFeature', + 'javax.cache.event.CacheEntryCreatedListener', + 'javax.cache.event.CacheEntryEvent', + 'javax.cache.event.CacheEntryEventFilter', + 'javax.cache.event.CacheEntryExpiredListener', + 'javax.cache.event.CacheEntryListener', + 'javax.cache.event.CacheEntryRemovedListener', + 'javax.cache.event.CacheEntryUpdatedListener', + 'javax.cache.event.EventType', + 'javax.cache.expiry.Duration', + 'javax.cache.expiry.EternalExpiryPolicy', + 'javax.cache.expiry.ExpiryPolicy', + 'javax.cache.integration.CacheLoader', + 'javax.cache.integration.CacheLoaderException', + 'javax.cache.integration.CacheWriter', + 'javax.cache.integration.CacheWriterException', + 'javax.cache.integration.CompletionListener', + 'javax.cache.management.CacheMXBean', + 'javax.cache.management.CacheStatisticsMXBean', + 'javax.cache.processor.EntryProcessor', + 'javax.cache.processor.EntryProcessorResult', + 'javax.cache.processor.MutableEntry', + 'javax.cache.spi.CachingProvider', + 'javax.xml.bind.JAXBContext', + 'javax.xml.bind.JAXBElement', + 'javax.xml.bind.Marshaller', + 'javax.xml.bind.Unmarshaller', + 'javax.xml.bind.annotation.XmlElement', + 'javax.xml.bind.annotation.XmlRootElement', + 'javax.xml.bind.annotation.XmlSchema', + 'javax.xml.bind.annotation.adapters.XmlAdapter', + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + 'org.osgi.framework.ServiceReference', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + 'org.slf4j.Marker', + 'org.slf4j.event.Level' + ) +} diff --git a/server/licenses/ehcache-3.10.8.jar.sha1 b/plugins/cache-ehcache/licenses/ehcache-3.10.8.jar.sha1 similarity index 100% rename from server/licenses/ehcache-3.10.8.jar.sha1 rename to plugins/cache-ehcache/licenses/ehcache-3.10.8.jar.sha1 diff --git a/server/licenses/ehcache-LICENSE.txt b/plugins/cache-ehcache/licenses/ehcache-LICENSE.txt similarity index 100% rename from server/licenses/ehcache-LICENSE.txt rename to plugins/cache-ehcache/licenses/ehcache-LICENSE.txt diff --git a/server/licenses/ehcache-NOTICE.txt b/plugins/cache-ehcache/licenses/ehcache-NOTICE.txt similarity index 100% rename from server/licenses/ehcache-NOTICE.txt rename to plugins/cache-ehcache/licenses/ehcache-NOTICE.txt diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java new file mode 100644 index 0000000000000..faf0b5ca79f04 --- /dev/null +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java @@ -0,0 +1,39 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache; + +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.plugins.CachePlugin; +import org.opensearch.plugins.Plugin; + +import java.util.Map; + +/** + * Ehcache based cache plugin. + */ +public class EhcacheCachePlugin extends Plugin implements CachePlugin { + + private static final String EHCACHE_CACHE_PLUGIN = "EhcachePlugin"; + + /** + * Default constructor to avoid javadoc related failures. + */ + public EhcacheCachePlugin() {} + + @Override + public Map getCacheStoreTypeMap() { + return Map.of(CacheStoreType.DISK, new EhcacheDiskCache.EhcacheDiskCacheFactory()); + } + + @Override + public String getName() { + return EHCACHE_CACHE_PLUGIN; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCache.java similarity index 72% rename from server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java rename to plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCache.java index 80def5f260cc5..8ecf38169fe8e 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/EhCacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCache.java @@ -6,21 +6,25 @@ * compatible open source license. */ -package org.opensearch.common.cache.store; +package org.opensearch.cache; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.collect.Tuple; import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -53,6 +57,11 @@ import org.ehcache.spi.loaderwriter.CacheLoadingException; import org.ehcache.spi.loaderwriter.CacheWritingException; +import static org.opensearch.cache.EhcacheSettings.DISK_SEGMENTS; +import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_CONCURRENCY; +import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MAXIMUM_THREADS; +import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MINIMUM_THREADS; + /** * This variant of disk cache uses Ehcache underneath. * @param Type of key. @@ -61,9 +70,10 @@ * @opensearch.experimental * */ -public class EhCacheDiskCache implements StoreAwareCache { +@ExperimentalApi +public class EhcacheDiskCache implements StoreAwareCache { - private static final Logger logger = LogManager.getLogger(EhCacheDiskCache.class); + private static final Logger logger = LogManager.getLogger(EhcacheDiskCache.class); // A Cache manager can create many caches. private final PersistentCacheManager cacheManager; @@ -93,22 +103,6 @@ public class EhCacheDiskCache implements StoreAwareCache { private final static int MINIMUM_MAX_SIZE_IN_BYTES = 1024 * 100; // 100KB - // Ehcache disk write minimum threads for its pool - public final Setting DISK_WRITE_MINIMUM_THREADS; - - // Ehcache disk write maximum threads for its pool - public final Setting DISK_WRITE_MAXIMUM_THREADS; - - // Not be to confused with number of disk segments, this is different. Defines - // distinct write queues created for disk store where a group of segments share a write queue. This is - // implemented with ehcache using a partitioned thread pool exectutor By default all segments share a single write - // queue ie write concurrency is 1. Check OffHeapDiskStoreConfiguration and DiskWriteThreadPool. - public final Setting DISK_WRITE_CONCURRENCY; - - // Defines how many segments the disk cache is separated into. Higher number achieves greater concurrency but - // will hold that many file pointers. Default is 16. - public final Setting DISK_SEGMENTS; - private final StoreAwareCacheEventListener eventListener; /** @@ -117,7 +111,7 @@ public class EhCacheDiskCache implements StoreAwareCache { */ Map>> completableFutureMap = new ConcurrentHashMap<>(); - private EhCacheDiskCache(Builder builder) { + private EhcacheDiskCache(Builder builder) { this.keyType = Objects.requireNonNull(builder.keyType, "Key type shouldn't be null"); this.valueType = Objects.requireNonNull(builder.valueType, "Value type shouldn't be null"); this.expireAfterAccess = Objects.requireNonNull(builder.getExpireAfterAcess(), "ExpireAfterAccess value shouldn't " + "be null"); @@ -131,14 +125,7 @@ private EhCacheDiskCache(Builder builder) { } else { this.threadPoolAlias = builder.threadPoolAlias; } - this.settings = Objects.requireNonNull(builder.settings, "Settings objects shouldn't be null"); - Objects.requireNonNull(builder.settingPrefix, "Setting prefix shouldn't be null"); - this.DISK_WRITE_MINIMUM_THREADS = Setting.intSetting(builder.settingPrefix + ".tier.disk.ehcache.min_threads", 2, 1, 5); - this.DISK_WRITE_MAXIMUM_THREADS = Setting.intSetting(builder.settingPrefix + ".tier.disk.ehcache.max_threads", 2, 1, 20); - // Default value is 1 within EhCache. - this.DISK_WRITE_CONCURRENCY = Setting.intSetting(builder.settingPrefix + ".tier.disk.ehcache.concurrency", 2, 1, 3); - // Default value is 16 within Ehcache. - this.DISK_SEGMENTS = Setting.intSetting(builder.settingPrefix + "tier.disk.ehcache.segments", 16, 1, 32); + this.settings = Objects.requireNonNull(builder.getSettings(), "Settings objects shouldn't be null"); this.cacheManager = buildCacheManager(); Objects.requireNonNull(builder.getEventListener(), "Listener can't be null"); this.eventListener = builder.getEventListener(); @@ -146,53 +133,46 @@ private EhCacheDiskCache(Builder builder) { this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); } - @SuppressForbidden(reason = "Ehcache uses File.io") - private PersistentCacheManager buildCacheManager() { - // In case we use multiple ehCaches, we can define this cache manager at a global level. - return CacheManagerBuilder.newCacheManagerBuilder() - .with(CacheManagerBuilder.persistence(new File(storagePath))) - .using( - PooledExecutionServiceConfigurationBuilder.newPooledExecutionServiceConfigurationBuilder() - .defaultPool(THREAD_POOL_ALIAS_PREFIX + "Default", 1, 3) // Default pool used for other tasks like - // event listeners - .pool(this.threadPoolAlias, DISK_WRITE_MINIMUM_THREADS.get(settings), DISK_WRITE_MAXIMUM_THREADS.get(settings)) - .build() - ) - .build(true); - } - private Cache buildCache(Duration expireAfterAccess, Builder builder) { - return this.cacheManager.createCache( - DISK_CACHE_ALIAS, - CacheConfigurationBuilder.newCacheConfigurationBuilder( - this.keyType, - this.valueType, - ResourcePoolsBuilder.newResourcePoolsBuilder().disk(maxWeightInBytes, MemoryUnit.B) - ).withExpiry(new ExpiryPolicy<>() { - @Override - public Duration getExpiryForCreation(K key, V value) { - return INFINITE; - } - - @Override - public Duration getExpiryForAccess(K key, Supplier value) { - return expireAfterAccess; - } - - @Override - public Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue) { - return INFINITE; - } - }) - .withService(getListenerConfiguration(builder)) - .withService( - new OffHeapDiskStoreConfiguration( - this.threadPoolAlias, - DISK_WRITE_CONCURRENCY.get(settings), - DISK_SEGMENTS.get(settings) + try { + return this.cacheManager.createCache( + builder.diskCacheAlias, + CacheConfigurationBuilder.newCacheConfigurationBuilder( + this.keyType, + this.valueType, + ResourcePoolsBuilder.newResourcePoolsBuilder().disk(maxWeightInBytes, MemoryUnit.B) + ).withExpiry(new ExpiryPolicy<>() { + @Override + public Duration getExpiryForCreation(K key, V value) { + return INFINITE; + } + + @Override + public Duration getExpiryForAccess(K key, Supplier value) { + return expireAfterAccess; + } + + @Override + public Duration getExpiryForUpdate(K key, Supplier oldValue, V newValue) { + return INFINITE; + } + }) + .withService(getListenerConfiguration(builder)) + .withService( + new OffHeapDiskStoreConfiguration( + this.threadPoolAlias, + DISK_WRITE_CONCURRENCY.get(settings), + DISK_SEGMENTS.get(settings) + ) ) - ) - ); + ); + } catch (IllegalArgumentException ex) { + logger.error("Ehcache disk cache initialization failed due to illegal argument: {}", ex.getMessage()); + throw ex; + } catch (IllegalStateException ex) { + logger.error("Ehcache disk cache initialization failed: {}", ex.getMessage()); + throw ex; + } } private CacheEventListenerConfigurationBuilder getListenerConfiguration(Builder builder) { @@ -216,12 +196,26 @@ Map>> getCompletableFutureMap() { return completableFutureMap; } + @SuppressForbidden(reason = "Ehcache uses File.io") + private PersistentCacheManager buildCacheManager() { + // In case we use multiple ehCaches, we can define this cache manager at a global level. + return CacheManagerBuilder.newCacheManagerBuilder() + .with(CacheManagerBuilder.persistence(new File(storagePath))) + .using( + PooledExecutionServiceConfigurationBuilder.newPooledExecutionServiceConfigurationBuilder() + .defaultPool(THREAD_POOL_ALIAS_PREFIX + "Default", 1, 3) // Default pool used for other tasks like + // event listeners + .pool(this.threadPoolAlias, DISK_WRITE_MINIMUM_THREADS.get(settings), DISK_WRITE_MAXIMUM_THREADS.get(settings)) + .build() + ) + .build(true); + } + @Override public V get(K key) { if (key == null) { throw new IllegalArgumentException("Key passed to ehcache disk cache was null."); } - // Optimize it by adding key store. V value; try { value = cache.get(key); @@ -236,6 +230,11 @@ public V get(K key) { return value; } + /** + * Puts the item into cache. + * @param key Type of key. + * @param value Type of value. + */ @Override public void put(K key, V value) { try { @@ -245,6 +244,13 @@ public void put(K key, V value) { } } + /** + * Computes the value using loader in case key is not present, otherwise fetches it. + * @param key Type of key + * @param loader loader to load the value in case key is missing + * @return value + * @throws Exception + */ @Override public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { // Ehache doesn't provide any computeIfAbsent function. Exposes putIfAbsent but that works differently and is @@ -315,27 +321,37 @@ private V compute(K key, LoadAwareCacheLoader loader) throws Exception { return value; } + /** + * Invalidate the item. + * @param key key to be invalidated. + */ @Override public void invalidate(K key) { - // There seems to be an thread leak issue while calling this and then closing cache. try { cache.remove(key); } catch (CacheWritingException ex) { // Handle throw new RuntimeException(ex); } + } @Override - public void invalidateAll() { - // TODO - } + public void invalidateAll() {} + /** + * Provides a way to iterate over disk cache keys. + * @return Iterable + */ @Override public Iterable keys() { return () -> new EhCacheKeyIterator<>(cache.iterator()); } + /** + * Gives the current count of keys in disk cache. + * @return + */ @Override public long count() { return stats.count(); @@ -346,11 +362,6 @@ public void refresh() { // TODO: ehcache doesn't provide a way to refresh a cache. } - @Override - public CacheStoreType getTierType() { - return CacheStoreType.DISK; - } - @Override public void close() { cacheManager.removeCache(DISK_CACHE_ALIAS); @@ -362,16 +373,29 @@ public void close() { } } + /** + * Relevant stats for this cache. + * @return CacheStats + */ @Override public CacheStats stats() { return stats; } + /** + * Returns the tier type. + * @return CacheStoreType.DISK + */ + @Override + public CacheStoreType getTierType() { + return CacheStoreType.DISK; + } + /** * Stats related to disk cache. */ - class DiskCacheStats implements CacheStats { - private CounterMetric count = new CounterMetric(); + static class DiskCacheStats implements CacheStats { + private final CounterMetric count = new CounterMetric(); @Override public long count() { @@ -379,6 +403,32 @@ public long count() { } } + /** + * This iterator wraps ehCache iterator and only iterates over its keys. + * @param Type of key + */ + class EhCacheKeyIterator implements Iterator { + + Iterator> iterator; + + EhCacheKeyIterator(Iterator> iterator) { + this.iterator = iterator; + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public K next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + return iterator.next().getKey(); + } + } + /** * Wrapper over Ehcache original listener to listen to desired events and notify desired subscribers. * @param Type of key @@ -445,28 +495,26 @@ public void onEvent(CacheEvent event) { } /** - * This iterator wraps ehCache iterator and only iterates over its keys. - * @param Type of key + * Factory to create an ehcache disk cache. */ - class EhCacheKeyIterator implements Iterator { - - Iterator> iterator; - - EhCacheKeyIterator(Iterator> iterator) { - this.iterator = iterator; - } + static class EhcacheDiskCacheFactory implements StoreAwareCache.Factory { @Override - public boolean hasNext() { - return iterator.hasNext(); + public StoreAwareCache create(StoreAwareCacheConfig storeAwareCacheConfig) { + return new Builder().setStoragePath(storeAwareCacheConfig.getStoragePath()) + .setDiskCacheAlias(storeAwareCacheConfig.getCacheAlias()) + .setKeyType(storeAwareCacheConfig.getKeyType()) + .setValueType(storeAwareCacheConfig.getValueType()) + .setEventListener(storeAwareCacheConfig.getEventListener()) + .setExpireAfterAccess(storeAwareCacheConfig.getExpireAfterAcess()) + .setMaximumWeightInBytes(storeAwareCacheConfig.getMaxWeightInBytes()) + .setSettings(storeAwareCacheConfig.getSettings()) + .build(); } @Override - public K next() { - if (!hasNext()) { - throw new NoSuchElementException(); - } - return iterator.next().getKey(); + public String getCacheName() { + return CacheProvider.CacheType.EHCACHE.getValue(); } } @@ -476,68 +524,88 @@ public K next() { * @param Type of value */ public static class Builder extends StoreAwareCacheBuilder { - private Class keyType; - - private Class valueType; private String storagePath; private String threadPoolAlias; - private Settings settings; - private String diskCacheAlias; - private String settingPrefix; - // Provides capability to make ehCache event listener to run in sync mode. Used for testing too. private boolean isEventListenerModeSync; + private Class keyType; + + private Class valueType; + + /** + * Default constructor. Added to fix javadocs. + */ public Builder() {} - public EhCacheDiskCache.Builder setKeyType(Class keyType) { + /** + * Sets the key type of value. + * @param keyType type of key + * @return builder + */ + public Builder setKeyType(Class keyType) { this.keyType = keyType; return this; } - public EhCacheDiskCache.Builder setValueType(Class valueType) { + /** + * Sets the class type of value. + * @param valueType type of value + * @return builder + */ + public Builder setValueType(Class valueType) { this.valueType = valueType; return this; } - public EhCacheDiskCache.Builder setStoragePath(String storagePath) { + /** + * Desired storage path for disk cache. + * @param storagePath path for disk cache + * @return builder + */ + public Builder setStoragePath(String storagePath) { this.storagePath = storagePath; return this; } - public EhCacheDiskCache.Builder setThreadPoolAlias(String threadPoolAlias) { + /** + * Thread pool alias for the cache. + * @param threadPoolAlias alias + * @return builder + */ + public Builder setThreadPoolAlias(String threadPoolAlias) { this.threadPoolAlias = threadPoolAlias; return this; } - public EhCacheDiskCache.Builder setSettings(Settings settings) { - this.settings = settings; - return this; - } - - public EhCacheDiskCache.Builder setDiskCacheAlias(String diskCacheAlias) { + /** + * Cache alias + * @param diskCacheAlias + * @return builder + */ + public Builder setDiskCacheAlias(String diskCacheAlias) { this.diskCacheAlias = diskCacheAlias; return this; } - public EhCacheDiskCache.Builder setSettingPrefix(String settingPrefix) { - // TODO: Do some basic validation. So that it doesn't end with "." etc. - this.settingPrefix = settingPrefix; - return this; - } - - public EhCacheDiskCache.Builder setIsEventListenerModeSync(boolean isEventListenerModeSync) { + /** + * Determines whether event listener is triggered async/sync. + * @param isEventListenerModeSync mode sync + * @return builder + */ + public Builder setIsEventListenerModeSync(boolean isEventListenerModeSync) { this.isEventListenerModeSync = isEventListenerModeSync; return this; } - public EhCacheDiskCache build() { - return new EhCacheDiskCache<>(this); + @Override + public EhcacheDiskCache build() { + return new EhcacheDiskCache<>(this); } } } diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java new file mode 100644 index 0000000000000..507ecb20f73b9 --- /dev/null +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java @@ -0,0 +1,52 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache; + +import org.opensearch.common.settings.Setting; + +/** + * Settings related to ehcache. + */ +public class EhcacheSettings { + + static final String SETTING_PREFIX = "cache.disk.ehcache"; + + /** + * Ehcache disk write minimum threads for its pool + */ + public static final Setting DISK_WRITE_MINIMUM_THREADS = Setting.intSetting(SETTING_PREFIX + "min_threads", 2, 1, 5); + + /** + * Ehcache disk write maximum threads for its pool + */ + public static final Setting DISK_WRITE_MAXIMUM_THREADS = Setting.intSetting(SETTING_PREFIX + ".max_threads", 2, 1, 20); + + /** + * Not be to confused with number of disk segments, this is different. Defines + * distinct write queues created for disk store where a group of segments share a write queue. This is + * implemented with ehcache using a partitioned thread pool exectutor By default all segments share a single write + * queue ie write concurrency is 1. Check OffHeapDiskStoreConfiguration and DiskWriteThreadPool. + * + * Default is 1 within ehcache. + */ + public static final Setting DISK_WRITE_CONCURRENCY = Setting.intSetting(SETTING_PREFIX + ".concurrency", 1, 1, 3); + + /** + * Defines how many segments the disk cache is separated into. Higher number achieves greater concurrency but + * will hold that many file pointers. Default is 16. + * + * Default value is 16 within Ehcache. + */ + public static final Setting DISK_SEGMENTS = Setting.intSetting(SETTING_PREFIX + ".segments", 16, 1, 32); + + /** + * Default constructor. Added to fix javadocs. + */ + public EhcacheSettings() {} +} diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/package-info.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/package-info.java new file mode 100644 index 0000000000000..f9be1c3dbf826 --- /dev/null +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Base package for cache plugin */ +package org.opensearch.cache; diff --git a/plugins/cache-ehcache/src/main/plugin-metadata/plugin-security.policy b/plugins/cache-ehcache/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 0000000000000..cbd43446c6ba0 --- /dev/null +++ b/plugins/cache-ehcache/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,15 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +grant { + // For ehcache + permission java.lang.RuntimePermission "createClassLoader"; + permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; +}; + + diff --git a/server/src/test/java/org/opensearch/common/cache/store/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhCacheDiskCacheTests.java similarity index 93% rename from server/src/test/java/org/opensearch/common/cache/store/EhCacheDiskCacheTests.java rename to plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhCacheDiskCacheTests.java index 12bffb93fc2f8..dc9354e26168a 100644 --- a/server/src/test/java/org/opensearch/common/cache/store/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhCacheDiskCacheTests.java @@ -6,9 +6,11 @@ * compatible open source license. */ -package org.opensearch.common.cache.store; +package org.opensearch.cache; import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.settings.Settings; @@ -33,18 +35,17 @@ public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { private static final int CACHE_SIZE_IN_BYTES = 1024 * 101; - private static final String SETTING_PREFIX = "indices.request.cache"; public void testBasicGetAndPut() throws IOException { Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) - .setValueType(String.class) - .setSettings(settings) + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") - .setSettingPrefix(SETTING_PREFIX) + .setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setEventListener(mockEventListener) @@ -79,12 +80,12 @@ public void testConcurrentPut() throws Exception { Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) - .setValueType(String.class) - .setSettings(settings) + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") - .setSettingPrefix(SETTING_PREFIX) + .setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setEventListener(mockEventListener) @@ -122,13 +123,13 @@ public void testEhcacheParallelGets() throws Exception { Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) - .setValueType(String.class) - .setSettings(settings) + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") - .setSettingPrefix(SETTING_PREFIX) .setIsEventListenerModeSync(true) // For accurate count + .setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setEventListener(mockEventListener) @@ -165,12 +166,12 @@ public void testEhcacheParallelGets() throws Exception { public void testEhcacheKeyIterator() throws Exception { Settings settings = Settings.builder().build(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) - .setValueType(String.class) - .setSettings(settings) + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") - .setSettingPrefix(SETTING_PREFIX) .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setEventListener(new MockEventListener<>()) @@ -201,13 +202,13 @@ public void testEvictions() throws Exception { Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setIsEventListenerModeSync(true) + .setThreadPoolAlias("ehcacheTest") + .setKeyType(String.class) .setValueType(String.class) .setSettings(settings) - .setThreadPoolAlias("ehcacheTest") - .setSettingPrefix(SETTING_PREFIX) - .setIsEventListenerModeSync(true) - .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setEventListener(mockEventListener) @@ -230,13 +231,13 @@ public void testComputeIfAbsentConcurrently() throws Exception { Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) - .setValueType(String.class) - .setSettings(settings) - .setThreadPoolAlias("ehcacheTest") - .setSettingPrefix(SETTING_PREFIX) + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setIsEventListenerModeSync(true) .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setThreadPoolAlias("ehcacheTest") + .setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setEventListener(mockEventListener) @@ -288,7 +289,7 @@ public String load(String key) { } } assertEquals(1, numberOfTimesValueLoaded); - assertEquals(0, ((EhCacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); + assertEquals(0, ((EhcacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); assertEquals(1, mockEventListener.onMissCount.get()); assertEquals(1, mockEventListener.onCachedCount.get()); assertEquals(numberOfRequest - 1, mockEventListener.onHitCount.get()); @@ -300,13 +301,13 @@ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setIsEventListenerModeSync(true) + .setThreadPoolAlias("ehcacheTest") + .setKeyType(String.class) .setValueType(String.class) .setSettings(settings) - .setThreadPoolAlias("ehcacheTest") - .setSettingPrefix(SETTING_PREFIX) - .setIsEventListenerModeSync(true) - .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setEventListener(mockEventListener) @@ -347,7 +348,7 @@ public String load(String key) throws Exception { phaser.arriveAndAwaitAdvance(); countDownLatch.await(); - assertEquals(0, ((EhCacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); + assertEquals(0, ((EhcacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); ehcacheTest.close(); } } @@ -356,13 +357,13 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhCacheDiskCache.Builder().setKeyType(String.class) - .setValueType(String.class) - .setSettings(settings) + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") - .setSettingPrefix(SETTING_PREFIX) .setIsEventListenerModeSync(true) .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setKeyType(String.class) + .setValueType(String.class) + .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) .setEventListener(mockEventListener) @@ -408,7 +409,7 @@ public String load(String key) throws Exception { phaser.arriveAndAwaitAdvance(); countDownLatch.await(); - assertEquals(0, ((EhCacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); + assertEquals(0, ((EhcacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); ehcacheTest.close(); } } @@ -418,21 +419,13 @@ private static String generateRandomString(int length) { StringBuilder randomString = new StringBuilder(length); for (int i = 0; i < length; i++) { - int index = (int) (Math.random() * characters.length()); + int index = (int) (randomDouble() * characters.length()); randomString.append(characters.charAt(index)); } return randomString.toString(); } - // TODO: Remove this from here in final PR. - enum EventType { - ON_HIT, - ON_MISS, - ON_CACHED, - ON_REMOVAL; - } - class MockEventListener implements StoreAwareCacheEventListener { AtomicInteger onMissCount = new AtomicInteger(); diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java new file mode 100644 index 0000000000000..61442c3aa66a5 --- /dev/null +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java @@ -0,0 +1,26 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache; + +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Map; + +public class EhcachePluginTests extends OpenSearchTestCase { + + private EhcacheCachePlugin ehcacheCachePlugin = new EhcacheCachePlugin(); + + public void testGetCacheStoreTypeMap() { + Map factoryMap = ehcacheCachePlugin.getCacheStoreTypeMap(); + assertNotNull(factoryMap); + assertNotNull(factoryMap.get(CacheStoreType.DISK)); + } +} diff --git a/server/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/discovery-ec2/licenses/slf4j-api-1.7.36.jar.sha1 similarity index 100% rename from server/licenses/slf4j-api-1.7.36.jar.sha1 rename to plugins/discovery-ec2/licenses/slf4j-api-1.7.36.jar.sha1 diff --git a/server/licenses/slf4j-api-LICENSE.txt b/plugins/discovery-ec2/licenses/slf4j-api-LICENSE.txt similarity index 100% rename from server/licenses/slf4j-api-LICENSE.txt rename to plugins/discovery-ec2/licenses/slf4j-api-LICENSE.txt diff --git a/server/licenses/slf4j-api-NOTICE.txt b/plugins/discovery-ec2/licenses/slf4j-api-NOTICE.txt similarity index 100% rename from server/licenses/slf4j-api-NOTICE.txt rename to plugins/discovery-ec2/licenses/slf4j-api-NOTICE.txt diff --git a/plugins/identity-shiro/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/identity-shiro/licenses/slf4j-api-1.7.36.jar.sha1 new file mode 100644 index 0000000000000..77b9917528382 --- /dev/null +++ b/plugins/identity-shiro/licenses/slf4j-api-1.7.36.jar.sha1 @@ -0,0 +1 @@ +6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/identity-shiro/licenses/slf4j-api-LICENSE.txt b/plugins/identity-shiro/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..2be7689435062 --- /dev/null +++ b/plugins/identity-shiro/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2022 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/plugins/identity-shiro/licenses/slf4j-api-NOTICE.txt b/plugins/identity-shiro/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/ingest-attachment/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/ingest-attachment/licenses/slf4j-api-1.7.36.jar.sha1 new file mode 100644 index 0000000000000..77b9917528382 --- /dev/null +++ b/plugins/ingest-attachment/licenses/slf4j-api-1.7.36.jar.sha1 @@ -0,0 +1 @@ +6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/slf4j-api-LICENSE.txt b/plugins/ingest-attachment/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..2be7689435062 --- /dev/null +++ b/plugins/ingest-attachment/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2022 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/slf4j-api-NOTICE.txt b/plugins/ingest-attachment/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-azure/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/repository-azure/licenses/slf4j-api-1.7.36.jar.sha1 new file mode 100644 index 0000000000000..77b9917528382 --- /dev/null +++ b/plugins/repository-azure/licenses/slf4j-api-1.7.36.jar.sha1 @@ -0,0 +1 @@ +6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/slf4j-api-LICENSE.txt b/plugins/repository-azure/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..2be7689435062 --- /dev/null +++ b/plugins/repository-azure/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2022 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/plugins/repository-azure/licenses/slf4j-api-NOTICE.txt b/plugins/repository-azure/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-hdfs/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/repository-hdfs/licenses/slf4j-api-1.7.36.jar.sha1 new file mode 100644 index 0000000000000..77b9917528382 --- /dev/null +++ b/plugins/repository-hdfs/licenses/slf4j-api-1.7.36.jar.sha1 @@ -0,0 +1 @@ +6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt b/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..2be7689435062 --- /dev/null +++ b/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2022 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/slf4j-api-NOTICE.txt b/plugins/repository-hdfs/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-s3/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/repository-s3/licenses/slf4j-api-1.7.36.jar.sha1 new file mode 100644 index 0000000000000..77b9917528382 --- /dev/null +++ b/plugins/repository-s3/licenses/slf4j-api-1.7.36.jar.sha1 @@ -0,0 +1 @@ +6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/slf4j-api-LICENSE.txt b/plugins/repository-s3/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..2be7689435062 --- /dev/null +++ b/plugins/repository-s3/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2022 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/plugins/repository-s3/licenses/slf4j-api-NOTICE.txt b/plugins/repository-s3/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index 4e503413c265b..8c0ee8ba718ac 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -103,6 +103,13 @@ thirdPartyAudit { 'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.Unmarshaller', + // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional + 'org.slf4j.helpers.FormattingTuple', + 'org.slf4j.helpers.MessageFormatter', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + 'org.slf4j.spi.LocationAwareLogger', + 'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.MessageNano', 'com.ning.compress.BufferRecycler', diff --git a/plugins/transport-reactor-netty4/build.gradle b/plugins/transport-reactor-netty4/build.gradle index 74ceb2ab5687a..7d7eb330b4a55 100644 --- a/plugins/transport-reactor-netty4/build.gradle +++ b/plugins/transport-reactor-netty4/build.gradle @@ -136,6 +136,13 @@ thirdPartyAudit { 'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.Unmarshaller', + // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional + 'org.slf4j.helpers.FormattingTuple', + 'org.slf4j.helpers.MessageFormatter', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + 'org.slf4j.spi.LocationAwareLogger', + 'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.MessageNano', 'com.ning.compress.BufferRecycler', diff --git a/server/build.gradle b/server/build.gradle index 85f16cee83249..e36498bf1038b 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -124,9 +124,6 @@ dependencies { api "com.google.protobuf:protobuf-java:${versions.protobuf}" api "jakarta.annotation:jakarta.annotation-api:${versions.jakarta_annotation}" - api "org.ehcache:ehcache:${versions.ehcache}" - api "org.slf4j:slf4j-api:${versions.slf4j}" - testImplementation(project(":test:framework")) { // tests use the locally compiled version of server exclude group: 'org.opensearch', module: 'server' diff --git a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java b/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java new file mode 100644 index 0000000000000..fd046757a2a53 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java @@ -0,0 +1,91 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.provider; + +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.plugins.CachePlugin; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Holds all the cache factories and provides a way to fetch them when needed. + */ +public class CacheProvider { + + private final Map> cacheStoreTypeFactories; + + private final Settings settings; + + public CacheProvider(List cachePlugins, Settings settings) { + this.cacheStoreTypeFactories = getCacheStoreTypeFactories(cachePlugins); + this.settings = settings; + } + + private Map> getCacheStoreTypeFactories(List cachePlugins) { + Map> cacheStoreTypeFactories = new HashMap<>(); + for (CachePlugin cachePlugin : cachePlugins) { + Map factoryMap = cachePlugin.getCacheStoreTypeMap(); + for (Map.Entry entry : factoryMap.entrySet()) { + cacheStoreTypeFactories.computeIfAbsent(entry.getKey(), k -> new ArrayList<>()).add(entry.getValue()); + } + } + return Collections.unmodifiableMap(cacheStoreTypeFactories); + } + + public Map> getCacheStoreTypeFactories() { + return cacheStoreTypeFactories; + } + + /** + * Given a map of storeType and cacheName setting, extract a specific implementation. + * type. + * @param cacheStoreTypeSettings Setting map + * @return CacheStoreType + */ + public Map getCacheStoreType(Map> cacheStoreTypeSettings) { + Map cacheStoreTypeFactoryMap = new HashMap<>(); + for (Map.Entry> cacheStoreTypeFactoryEntry : cacheStoreTypeFactories.entrySet()) { + CacheStoreType cacheStoreType = cacheStoreTypeFactoryEntry.getKey(); + if (!cacheStoreTypeSettings.containsKey(cacheStoreType)) { + continue; + } + for (StoreAwareCache.Factory factory : cacheStoreTypeFactoryEntry.getValue()) { + if (factory.getCacheName().equals(cacheStoreTypeSettings.get(cacheStoreType).get(settings))) { + cacheStoreTypeFactoryMap.put(cacheStoreType, factory); + break; + } + } + } + return cacheStoreTypeFactoryMap; + } + + /** + * Cache types available. + */ + public enum CacheType { + EHCACHE("ehcache"); + + private final String value; + + CacheType(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/provider/package-info.java b/server/src/main/java/org/opensearch/common/cache/provider/package-info.java new file mode 100644 index 0000000000000..24221f222f93f --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/provider/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Base package for cache providers. */ +package org.opensearch.common.cache.provider; diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java index 45ca48d94c140..4cf000dfff8a6 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java @@ -8,7 +8,9 @@ package org.opensearch.common.cache.store; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; import org.opensearch.common.cache.store.enums.CacheStoreType; /** @@ -18,6 +20,16 @@ * * @opensearch.experimental */ +@ExperimentalApi public interface StoreAwareCache extends ICache { CacheStoreType getTierType(); + + /** + * Provides a way to create a new cache. + */ + interface Factory { + StoreAwareCache create(StoreAwareCacheConfig storeAwareCacheConfig); + + String getCacheName(); + } } diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java b/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java index fc5aa48aae90f..3b52afebf5579 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java @@ -10,6 +10,7 @@ import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import java.util.function.ToLongBiFunction; @@ -31,6 +32,8 @@ public abstract class StoreAwareCacheBuilder { private StoreAwareCacheEventListener eventListener; + private Settings settings; + public StoreAwareCacheBuilder() {} public StoreAwareCacheBuilder setMaximumWeightInBytes(long sizeInBytes) { @@ -53,6 +56,11 @@ public StoreAwareCacheBuilder setEventListener(StoreAwareCacheEventListene return this; } + public StoreAwareCacheBuilder setSettings(Settings settings) { + this.settings = settings; + return this; + } + public long getMaxWeightInBytes() { return maxWeightInBytes; } @@ -66,7 +74,11 @@ public ToLongBiFunction getWeigher() { } public StoreAwareCacheEventListener getEventListener() { - return eventListener; + return this.eventListener; + } + + public Settings getSettings() { + return settings; } public abstract StoreAwareCache build(); diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java b/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java new file mode 100644 index 0000000000000..514c0c1bd8708 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java @@ -0,0 +1,195 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store.config; + +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; + +import java.util.function.ToLongBiFunction; + +/** + * Configurations related to store aware caches + * + * @opensearch.internal + */ +public class StoreAwareCacheConfig { + + private long maxWeightInBytes; + + private ToLongBiFunction weigher; + + private TimeValue expireAfterAcess; + + private StoreAwareCacheEventListener eventListener; + + private Settings settings; + + private Class keyType; + + private Class valueType; + + private String storagePath; + + private String threadPoolAlias; + + private String cacheAlias; + + private String settingPrefix; + + // Provides capability to make event listener to run in sync/async mode. + private boolean isEventListenerModeSync; + + private StoreAwareCacheConfig(Builder builder) { + this.cacheAlias = builder.cacheAlias; + this.keyType = builder.keyType; + } + + public long getMaxWeightInBytes() { + return maxWeightInBytes; + } + + public ToLongBiFunction getWeigher() { + return weigher; + } + + public TimeValue getExpireAfterAcess() { + return expireAfterAcess; + } + + public StoreAwareCacheEventListener getEventListener() { + return eventListener; + } + + public boolean isEventListenerModeSync() { + return isEventListenerModeSync; + } + + public Class getKeyType() { + return keyType; + } + + public Class getValueType() { + return valueType; + } + + public String getCacheAlias() { + return cacheAlias; + } + + public Settings getSettings() { + return settings; + } + + public String getSettingPrefix() { + return settingPrefix; + } + + public String getStoragePath() { + return storagePath; + } + + public String getThreadPoolAlias() { + return threadPoolAlias; + } + + /** + * Builder class to build Cache config related parameters. + * @param Type of key. + * @param Type of value. + */ + public static class Builder { + + private long maxWeightInBytes; + + private ToLongBiFunction weigher; + + private TimeValue expireAfterAcess; + + private StoreAwareCacheEventListener eventListener; + + private Settings settings; + + private Class keyType; + + private Class valueType; + + private String storagePath; + + private String threadPoolAlias; + + private String cacheAlias; + + private String settingPrefix; + + // Provides capability to make event listener to run in sync/async mode. + private boolean isEventListenerModeSync; + + public Builder() {} + + public Builder setMaxWeightInBytes(long maxWeightInBytes) { + this.maxWeightInBytes = maxWeightInBytes; + return this; + } + + public Builder setWeigher(ToLongBiFunction weigher) { + this.weigher = weigher; + return this; + } + + public Builder setExpireAfterAcess(TimeValue expireAfterAcess) { + this.expireAfterAcess = expireAfterAcess; + return this; + } + + public Builder setEventListener(StoreAwareCacheEventListener listener) { + this.eventListener = listener; + return this; + } + + public Builder setSettings(Settings settings) { + this.settings = settings; + return this; + } + + public Builder setKeyType(Class keyType) { + this.keyType = keyType; + return this; + } + + public Builder setValueType(Class keyType) { + this.keyType = keyType; + return this; + } + + public Builder setStoragePath(String storagePath) { + this.storagePath = storagePath; + return this; + } + + public Builder setThreadPoolAlias(String threadPoolAlias) { + this.threadPoolAlias = threadPoolAlias; + return this; + } + + public Builder setCacheAlias(String cacheAlias) { + this.cacheAlias = cacheAlias; + return this; + } + + public Builder setIsEventListenerModeSync(boolean isEventListenerModeSync) { + this.isEventListenerModeSync = isEventListenerModeSync; + return this; + } + + public StoreAwareCacheConfig build() { + return new StoreAwareCacheConfig<>(this); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/config/package-info.java new file mode 100644 index 0000000000000..6b662a8af3f9d --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/config/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Base package for store aware cache config */ +package org.opensearch.common.cache.store.config; diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java index 04c0825787b66..1e1aeba332c2f 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java +++ b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java @@ -8,11 +8,14 @@ package org.opensearch.common.cache.store.enums; +import org.opensearch.common.annotation.ExperimentalApi; + /** * Cache store types in tiered cache. * * @opensearch.internal */ +@ExperimentalApi public enum CacheStoreType { ON_HEAP, diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 8510122c39fcb..420c0aca3d129 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -83,6 +83,7 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.SetOnce; import org.opensearch.common.StopWatch; +import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.common.inject.Injector; import org.opensearch.common.inject.Key; import org.opensearch.common.inject.Module; @@ -178,6 +179,7 @@ import org.opensearch.persistent.PersistentTasksService; import org.opensearch.plugins.ActionPlugin; import org.opensearch.plugins.AnalysisPlugin; +import org.opensearch.plugins.CachePlugin; import org.opensearch.plugins.CircuitBreakerPlugin; import org.opensearch.plugins.ClusterPlugin; import org.opensearch.plugins.CryptoKeyProviderPlugin; @@ -789,6 +791,7 @@ protected Node( final SearchRequestSlowLog searchRequestSlowLog = new SearchRequestSlowLog(clusterService); remoteStoreStatsTrackerFactory = new RemoteStoreStatsTrackerFactory(clusterService, settings); + CacheProvider cacheProvider = new CacheProvider(pluginsService.filterPlugins(CachePlugin.class), settings); final IndicesService indicesService = new IndicesService( settings, pluginsService, diff --git a/server/src/main/java/org/opensearch/plugins/CachePlugin.java b/server/src/main/java/org/opensearch/plugins/CachePlugin.java new file mode 100644 index 0000000000000..1774236489048 --- /dev/null +++ b/server/src/main/java/org/opensearch/plugins/CachePlugin.java @@ -0,0 +1,28 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugins; + +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.enums.CacheStoreType; + +import java.util.Map; + +/** + * Plugin to extend cache related classes + * + * @opensearch.experimental + */ +@ExperimentalApi +public interface CachePlugin { + + Map getCacheStoreTypeMap(); + + String getName(); +} From c74992f7c3423277b27e6f590bdea02cacf4f8e7 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Thu, 25 Jan 2024 13:25:16 -0800 Subject: [PATCH 06/22] Fixing gradle failures Signed-off-by: Sagar Upadhyaya --- plugins/cache-ehcache/.gitignore | 29 ------------------- .../licenses/slf4j-api-1.7.36.jar.sha1 | 1 + .../crypto-kms/licenses/slf4j-api-LICENSE.txt | 21 ++++++++++++++ .../crypto-kms/licenses/slf4j-api-NOTICE.txt | 0 .../org/opensearch/bootstrap/security.policy | 5 ---- 5 files changed, 22 insertions(+), 34 deletions(-) delete mode 100644 plugins/cache-ehcache/.gitignore create mode 100644 plugins/crypto-kms/licenses/slf4j-api-1.7.36.jar.sha1 create mode 100644 plugins/crypto-kms/licenses/slf4j-api-LICENSE.txt create mode 100644 plugins/crypto-kms/licenses/slf4j-api-NOTICE.txt diff --git a/plugins/cache-ehcache/.gitignore b/plugins/cache-ehcache/.gitignore deleted file mode 100644 index f68d1099657e3..0000000000000 --- a/plugins/cache-ehcache/.gitignore +++ /dev/null @@ -1,29 +0,0 @@ -### IntelliJ IDEA ### -out/ -!**/src/main/**/out/ -!**/src/test/**/out/ - -### Eclipse ### -.apt_generated -.classpath -.factorypath -.project -.settings -.springBeans -.sts4-cache -bin/ -!**/src/main/**/bin/ -!**/src/test/**/bin/ - -### NetBeans ### -/nbproject/private/ -/nbbuild/ -/dist/ -/nbdist/ -/.nb-gradle/ - -### VS Code ### -.vscode/ - -### Mac OS ### -.DS_Store \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/crypto-kms/licenses/slf4j-api-1.7.36.jar.sha1 new file mode 100644 index 0000000000000..77b9917528382 --- /dev/null +++ b/plugins/crypto-kms/licenses/slf4j-api-1.7.36.jar.sha1 @@ -0,0 +1 @@ +6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/slf4j-api-LICENSE.txt b/plugins/crypto-kms/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..2be7689435062 --- /dev/null +++ b/plugins/crypto-kms/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2022 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/slf4j-api-NOTICE.txt b/plugins/crypto-kms/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/server/src/main/resources/org/opensearch/bootstrap/security.policy b/server/src/main/resources/org/opensearch/bootstrap/security.policy index a07ea064928e3..e1226345ef961 100644 --- a/server/src/main/resources/org/opensearch/bootstrap/security.policy +++ b/server/src/main/resources/org/opensearch/bootstrap/security.policy @@ -187,9 +187,4 @@ grant { permission java.io.FilePermission "/sys/fs/cgroup/cpuacct/-", "read"; permission java.io.FilePermission "/sys/fs/cgroup/memory", "read"; permission java.io.FilePermission "/sys/fs/cgroup/memory/-", "read"; - - // For ehcache - permission java.lang.RuntimePermission "createClassLoader"; - permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; - }; From 06386b0304b42cee15571aec163316a111eb0c47 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Thu, 25 Jan 2024 13:32:18 -0800 Subject: [PATCH 07/22] Making ICache extend Closeable Signed-off-by: Sagar Upadhyaya --- .../src/main/java/org/opensearch/common/cache/ICache.java | 6 +++--- .../opensearch/common/cache/tier/TieredSpilloverCache.java | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/opensearch/common/cache/ICache.java b/server/src/main/java/org/opensearch/common/cache/ICache.java index 0eb778034e417..fad2a31786825 100644 --- a/server/src/main/java/org/opensearch/common/cache/ICache.java +++ b/server/src/main/java/org/opensearch/common/cache/ICache.java @@ -10,6 +10,8 @@ import org.opensearch.common.cache.stats.CacheStats; +import java.io.Closeable; + /** * Represents a cache interface. * @param Type of key. @@ -17,7 +19,7 @@ * * @opensearch.experimental */ -public interface ICache { +public interface ICache extends Closeable { V get(K key); void put(K key, V value); @@ -34,7 +36,5 @@ public interface ICache { void refresh(); - void close(); - CacheStats stats(); } diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java index 2216b90b69d69..027eef358c2fa 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java @@ -21,6 +21,7 @@ import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.common.util.iterable.Iterables; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -177,7 +178,7 @@ public void refresh() { } @Override - public void close() { + public void close() throws IOException { for (StoreAwareCache storeAwareCache : cacheList) { storeAwareCache.close(); } From 8d48951feb478cf6e56d84790db42877385a59f0 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Mon, 29 Jan 2024 10:36:07 -0800 Subject: [PATCH 08/22] Exposing plugin based settings and refactoring Signed-off-by: Sagar Upadhyaya --- plugins/cache-ehcache/build.gradle | 13 ++ .../opensearch/cache/EhcacheCachePlugin.java | 20 +++ .../org/opensearch/cache/EhcacheSettings.java | 156 +++++++++++++++++- .../{ => store/disk}/EhcacheDiskCache.java | 119 ++++++++----- .../cache/store/disk/package-info.java | 11 ++ .../plugin-metadata/plugin-security.policy | 6 +- .../disk}/EhCacheDiskCacheTests.java | 11 +- .../opensearch/common/cache/CacheType.java | 29 ++++ .../common/cache/provider/CacheProvider.java | 41 ----- .../common/cache/settings/CacheSettings.java | 50 ++++++ .../common/cache/settings/package-info.java | 10 ++ .../common/cache/store/StoreAwareCache.java | 4 +- .../store/config/StoreAwareCacheConfig.java | 111 +------------ .../cache/store/enums/CacheStoreType.java | 2 +- .../StoreAwareCacheEventListener.java | 4 +- .../opensearch/common/settings/Setting.java | 13 +- .../org/opensearch/plugins/CachePlugin.java | 13 ++ .../cache/provider/CacheProviderTests.java | 40 +++++ .../common/settings/SettingTests.java | 12 ++ 19 files changed, 464 insertions(+), 201 deletions(-) rename plugins/cache-ehcache/src/main/java/org/opensearch/cache/{ => store/disk}/EhcacheDiskCache.java (84%) create mode 100644 plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/package-info.java rename plugins/cache-ehcache/src/test/java/org/opensearch/cache/{ => store/disk}/EhCacheDiskCacheTests.java (97%) create mode 100644 server/src/main/java/org/opensearch/common/cache/CacheType.java create mode 100644 server/src/main/java/org/opensearch/common/cache/settings/CacheSettings.java create mode 100644 server/src/main/java/org/opensearch/common/cache/settings/package-info.java create mode 100644 server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java diff --git a/plugins/cache-ehcache/build.gradle b/plugins/cache-ehcache/build.gradle index 87c38f616cbb3..ea5fed57929e7 100644 --- a/plugins/cache-ehcache/build.gradle +++ b/plugins/cache-ehcache/build.gradle @@ -11,6 +11,8 @@ import org.opensearch.gradle.Architecture import org.opensearch.gradle.OS import org.opensearch.gradle.info.BuildParams +apply plugin: 'opensearch.internal-cluster-test' + opensearchplugin { description 'Ehcache based cache implementation.' classname 'org.opensearch.cache.EhcacheCachePlugin' @@ -79,3 +81,14 @@ thirdPartyAudit { 'org.slf4j.event.Level' ) } + +tasks.named("bundlePlugin").configure { + from('config/cache-ehcache') { + into 'config' + } +} + +test { + // TODO: Adding permission in plugin-security.policy doesn't seem to work. + systemProperty 'tests.security.manager', 'false' +} diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java index faf0b5ca79f04..b82b52806a8b5 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java @@ -8,13 +8,20 @@ package org.opensearch.cache; +import org.opensearch.cache.store.disk.EhcacheDiskCache; +import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.settings.Setting; import org.opensearch.plugins.CachePlugin; import org.opensearch.plugins.Plugin; +import java.util.ArrayList; +import java.util.List; import java.util.Map; +import static org.opensearch.cache.EhcacheSettings.CACHE_TYPE_MAP; + /** * Ehcache based cache plugin. */ @@ -32,6 +39,19 @@ public Map getCacheStoreTypeMap() { return Map.of(CacheStoreType.DISK, new EhcacheDiskCache.EhcacheDiskCacheFactory()); } + @Override + public List> getSettings() { + List> settingList = new ArrayList<>(); + for (Map.Entry>>> entry : CACHE_TYPE_MAP.entrySet()) { + for (Map.Entry>> cacheStoreTypeMap : entry.getValue().entrySet()) { + for (Map.Entry> entry1 : cacheStoreTypeMap.getValue().entrySet()) { + settingList.add(entry1.getValue()); + } + } + } + return settingList; + } + @Override public String getName() { return EHCACHE_CACHE_PLUGIN; diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java index 507ecb20f73b9..4eb9c6e31ce93 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java @@ -8,24 +8,42 @@ package org.opensearch.cache; +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.settings.Setting; +import org.opensearch.common.unit.TimeValue; + +import java.util.Map; + +import static org.opensearch.common.settings.Setting.Property.NodeScope; /** * Settings related to ehcache. */ public class EhcacheSettings { - static final String SETTING_PREFIX = "cache.disk.ehcache"; + static final String DISK_CACHE_SETTING_SUFFIX = "disk.ehcache"; /** * Ehcache disk write minimum threads for its pool + * + * Setting pattern: {cache_type}.disk.ehcache.min_threads */ - public static final Setting DISK_WRITE_MINIMUM_THREADS = Setting.intSetting(SETTING_PREFIX + "min_threads", 2, 1, 5); + + public static final Setting.AffixSetting DISK_WRITE_MINIMUM_THREADS_SETTING = Setting.suffixKeySetting( + DISK_CACHE_SETTING_SUFFIX + ".min_threads", + (key) -> Setting.intSetting(key, 2, 1, 5, NodeScope) + ); /** * Ehcache disk write maximum threads for its pool + * + * Setting pattern: {cache_type}.disk.ehcache.max_threads */ - public static final Setting DISK_WRITE_MAXIMUM_THREADS = Setting.intSetting(SETTING_PREFIX + ".max_threads", 2, 1, 20); + public static final Setting.AffixSetting DISK_WRITE_MAXIMUM_THREADS_SETTING = Setting.suffixKeySetting( + DISK_CACHE_SETTING_SUFFIX + ".max_threads", + (key) -> Setting.intSetting(key, 2, 1, 20, NodeScope) + ); /** * Not be to confused with number of disk segments, this is different. Defines @@ -34,8 +52,13 @@ public class EhcacheSettings { * queue ie write concurrency is 1. Check OffHeapDiskStoreConfiguration and DiskWriteThreadPool. * * Default is 1 within ehcache. + * + * */ - public static final Setting DISK_WRITE_CONCURRENCY = Setting.intSetting(SETTING_PREFIX + ".concurrency", 1, 1, 3); + public static final Setting.AffixSetting DISK_WRITE_CONCURRENCY_SETTING = Setting.suffixKeySetting( + DISK_CACHE_SETTING_SUFFIX + ".concurrency", + (key) -> Setting.intSetting(key, 1, 1, 3, NodeScope) + ); /** * Defines how many segments the disk cache is separated into. Higher number achieves greater concurrency but @@ -43,7 +66,130 @@ public class EhcacheSettings { * * Default value is 16 within Ehcache. */ - public static final Setting DISK_SEGMENTS = Setting.intSetting(SETTING_PREFIX + ".segments", 16, 1, 32); + public static final Setting.AffixSetting DISK_SEGMENTS_SETTING = Setting.suffixKeySetting( + DISK_CACHE_SETTING_SUFFIX + ".segments", + (key) -> Setting.intSetting(key, 16, 1, 32, NodeScope) + ); + + /** + * Storage path for disk cache. + */ + public static final Setting.AffixSetting DISK_STORAGE_PATH_SETTING = Setting.suffixKeySetting( + DISK_CACHE_SETTING_SUFFIX + ".storage.path", + (key) -> Setting.simpleString(key, "", NodeScope) + ); + + /** + * Disk cache alias. + */ + public static final Setting.AffixSetting DISK_CACHE_ALIAS_SETTING = Setting.suffixKeySetting( + DISK_CACHE_SETTING_SUFFIX + ".alias", + (key) -> Setting.simpleString(key, "", NodeScope) + ); + + /** + * Disk cache expire after access setting. + */ + public static final Setting.AffixSetting DISK_CACHE_EXPIRE_AFTER_ACCESS_SETTING = Setting.suffixKeySetting( + DISK_CACHE_SETTING_SUFFIX + ".expire_after_access", + (key) -> Setting.positiveTimeSetting(key, new TimeValue(0), NodeScope) + ); + + /** + * Disk cache max size setting. + */ + public static final Setting.AffixSetting DISK_CACHE_MAX_SIZE_IN_BYTES_SETTING = Setting.suffixKeySetting( + DISK_CACHE_SETTING_SUFFIX + ".max_size_in_bytes", + (key) -> Setting.longSetting(key, 1073741824L, NodeScope) + ); + + /** + * Key for disk segment. + */ + public static final String DISK_SEGMENT_KEY = "disk_segment"; + /** + * Key for max size. + */ + public static final String DISK_MAX_SIZE_IN_BYTES_KEY = "max_size_in_bytes"; + /** + * Key for expire after access. + */ + public static final String DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY = "disk_cache_expire_after_access_key"; + /** + * Key for cache alias. + */ + public static final String DISK_CACHE_ALIAS_KEY = "disk_cache_alias"; + /** + * Key for disk segment. + */ + public static final String DISK_SEGMENTS_KEY = "disk_segments"; + /** + * Key for disk write concurrency. + */ + public static final String DISK_WRITE_CONCURRENCY_KEY = "disk_write_concurrency"; + /** + * Key for max threads. + */ + public static final String DISK_WRITE_MAXIMUM_THREADS_KEY = "disk_write_max_threads"; + /** + * Key for min threads. + */ + public static final String DISK_WRITE_MIN_THREADS_KEY = "disk_write_min_threads"; + /** + * Key for storage path. + */ + public static final String DISK_STORAGE_PATH_KEY = "disk_storage_path"; + + /** + * Map to store desired settings for a cache type. + */ + public static final Map>>> CACHE_TYPE_MAP = Map.of( + CacheType.INDICES_REQUEST_CACHE, + Map.of( + CacheStoreType.DISK, + Map.of( + DISK_SEGMENT_KEY, + DISK_SEGMENTS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), + DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY, + DISK_CACHE_EXPIRE_AFTER_ACCESS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), + DISK_CACHE_ALIAS_KEY, + DISK_CACHE_ALIAS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), + DISK_SEGMENTS_KEY, + DISK_SEGMENTS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), + DISK_WRITE_CONCURRENCY_KEY, + DISK_WRITE_CONCURRENCY_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), + DISK_WRITE_MAXIMUM_THREADS_KEY, + DISK_WRITE_MAXIMUM_THREADS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), + DISK_WRITE_MIN_THREADS_KEY, + DISK_WRITE_MINIMUM_THREADS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), + DISK_STORAGE_PATH_KEY, + DISK_STORAGE_PATH_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), + DISK_MAX_SIZE_IN_BYTES_KEY, + DISK_CACHE_MAX_SIZE_IN_BYTES_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()) + ) + + ) + ); + + /** + * Fetches setting list for a combination of cache type and store name. + * @param cacheType cache type + * @param cacheStoreType store type + * @return settings + */ + public static final Map> getSettingListForCacheTypeAndStore(CacheType cacheType, CacheStoreType cacheStoreType) { + Map>> cacheTypeSettings = CACHE_TYPE_MAP.get(cacheType); + if (cacheTypeSettings == null) { + throw new IllegalArgumentException("No settings exist with corresponding cache type: " + cacheType); + } + Map> settingList = cacheTypeSettings.get(cacheStoreType); + if (settingList == null) { + throw new IllegalArgumentException( + "No settings exist for cache store name: " + cacheStoreType + " associated with cache type: " + cacheType + ); + } + return settingList; + } /** * Default constructor. Added to fix javadocs. diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java similarity index 84% rename from plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCache.java rename to plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 8ecf38169fe8e..36bbe76ff77fa 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -6,16 +6,17 @@ * compatible open source license. */ -package org.opensearch.cache; +package org.opensearch.cache.store.disk; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; +import org.opensearch.cache.EhcacheSettings; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalReason; -import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; @@ -25,6 +26,7 @@ import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.collect.Tuple; import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -34,6 +36,7 @@ import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; +import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; @@ -57,10 +60,13 @@ import org.ehcache.spi.loaderwriter.CacheLoadingException; import org.ehcache.spi.loaderwriter.CacheWritingException; -import static org.opensearch.cache.EhcacheSettings.DISK_SEGMENTS; -import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_CONCURRENCY; -import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MAXIMUM_THREADS; -import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MINIMUM_THREADS; +import static org.opensearch.cache.EhcacheSettings.DISK_CACHE_ALIAS_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_SEGMENT_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_CONCURRENCY_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MAXIMUM_THREADS_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MIN_THREADS_KEY; /** * This variant of disk cache uses Ehcache underneath. @@ -75,6 +81,11 @@ public class EhcacheDiskCache implements StoreAwareCache { private static final Logger logger = LogManager.getLogger(EhcacheDiskCache.class); + // Unique id associated with this cache. + private final static String UNIQUE_ID = UUID.randomUUID().toString(); + private final static String THREAD_POOL_ALIAS_PREFIX = "ehcachePool"; + private final static int MINIMUM_MAX_SIZE_IN_BYTES = 1024 * 100; // 100KB + // A Cache manager can create many caches. private final PersistentCacheManager cacheManager; @@ -82,28 +93,16 @@ public class EhcacheDiskCache implements StoreAwareCache { private Cache cache; private final long maxWeightInBytes; private final String storagePath; - private final Class keyType; - private final Class valueType; - private final TimeValue expireAfterAccess; - private final DiskCacheStats stats = new DiskCacheStats(); - private final EhCacheEventListener ehCacheEventListener; - private final String threadPoolAlias; - private final Settings settings; - - private final static String DISK_CACHE_ALIAS = "ehDiskCache"; - - private final static String THREAD_POOL_ALIAS_PREFIX = "ehcachePool"; - - private final static int MINIMUM_MAX_SIZE_IN_BYTES = 1024 * 100; // 100KB - private final StoreAwareCacheEventListener eventListener; + private final CacheType cacheType; + private final String diskCacheAlias; /** * Used in computeIfAbsent to synchronize loading of a given key. This is needed as ehcache doesn't provide a @@ -119,9 +118,15 @@ private EhcacheDiskCache(Builder builder) { if (this.maxWeightInBytes <= MINIMUM_MAX_SIZE_IN_BYTES) { throw new IllegalArgumentException("Ehcache Disk tier cache size should be greater than " + MINIMUM_MAX_SIZE_IN_BYTES); } + this.cacheType = Objects.requireNonNull(builder.cacheType, "Cache type shouldn't be null"); + if (builder.diskCacheAlias == null || builder.diskCacheAlias.isBlank()) { + this.diskCacheAlias = this.cacheType + "#" + UNIQUE_ID; + } else { + this.diskCacheAlias = builder.diskCacheAlias; + } this.storagePath = Objects.requireNonNull(builder.storagePath, "Storage path shouldn't be null"); if (builder.threadPoolAlias == null || builder.threadPoolAlias.isBlank()) { - this.threadPoolAlias = THREAD_POOL_ALIAS_PREFIX + "DiskWrite"; + this.threadPoolAlias = THREAD_POOL_ALIAS_PREFIX + "DiskWrite#" + UNIQUE_ID; } else { this.threadPoolAlias = builder.threadPoolAlias; } @@ -161,8 +166,12 @@ public Duration getExpiryForUpdate(K key, Supplier oldValue, V newV .withService( new OffHeapDiskStoreConfiguration( this.threadPoolAlias, - DISK_WRITE_CONCURRENCY.get(settings), - DISK_SEGMENTS.get(settings) + (Integer) EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK) + .get(DISK_WRITE_CONCURRENCY_KEY) + .get(settings), + (Integer) EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK) + .get(DISK_SEGMENT_KEY) + .get(settings) ) ) ); @@ -203,9 +212,17 @@ private PersistentCacheManager buildCacheManager() { .with(CacheManagerBuilder.persistence(new File(storagePath))) .using( PooledExecutionServiceConfigurationBuilder.newPooledExecutionServiceConfigurationBuilder() - .defaultPool(THREAD_POOL_ALIAS_PREFIX + "Default", 1, 3) // Default pool used for other tasks like - // event listeners - .pool(this.threadPoolAlias, DISK_WRITE_MINIMUM_THREADS.get(settings), DISK_WRITE_MAXIMUM_THREADS.get(settings)) + .defaultPool(THREAD_POOL_ALIAS_PREFIX + "Default#" + UNIQUE_ID, 1, 3) // Default pool used for other tasks + // like event listeners + .pool( + this.threadPoolAlias, + (Integer) EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK) + .get(DISK_WRITE_MIN_THREADS_KEY) + .get(settings), + (Integer) EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK) + .get(DISK_WRITE_MAXIMUM_THREADS_KEY) + .get(settings) + ) .build() ) .build(true); @@ -364,10 +381,10 @@ public void refresh() { @Override public void close() { - cacheManager.removeCache(DISK_CACHE_ALIAS); + cacheManager.removeCache(this.diskCacheAlias); cacheManager.close(); try { - cacheManager.destroyCache(DISK_CACHE_ALIAS); + cacheManager.destroyCache(this.diskCacheAlias); } catch (CachePersistenceException e) { throw new OpenSearchException("Exception occurred while destroying ehcache and associated data", e); } @@ -497,24 +514,37 @@ public void onEvent(CacheEvent event) { /** * Factory to create an ehcache disk cache. */ - static class EhcacheDiskCacheFactory implements StoreAwareCache.Factory { + public static class EhcacheDiskCacheFactory implements StoreAwareCache.Factory { + + /** + * Ehcache disk cache name. + */ + public static final String EHCACHE_DISK_CACHE_NAME = "ehcacheDiskCache"; + + /** + * Default constructor. + */ + public EhcacheDiskCacheFactory() {} @Override - public StoreAwareCache create(StoreAwareCacheConfig storeAwareCacheConfig) { - return new Builder().setStoragePath(storeAwareCacheConfig.getStoragePath()) - .setDiskCacheAlias(storeAwareCacheConfig.getCacheAlias()) - .setKeyType(storeAwareCacheConfig.getKeyType()) - .setValueType(storeAwareCacheConfig.getValueType()) - .setEventListener(storeAwareCacheConfig.getEventListener()) - .setExpireAfterAccess(storeAwareCacheConfig.getExpireAfterAcess()) - .setMaximumWeightInBytes(storeAwareCacheConfig.getMaxWeightInBytes()) - .setSettings(storeAwareCacheConfig.getSettings()) + public StoreAwareCache create(StoreAwareCacheConfig config, CacheType cacheType) { + Map> settingList = EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK); + Settings settings = config.getSettings(); + + return new Builder().setStoragePath((String) settingList.get(DISK_SEGMENT_KEY).get(settings)) + .setDiskCacheAlias((String) settingList.get(DISK_CACHE_ALIAS_KEY).get(settings)) + .setKeyType((config.getKeyType())) + .setValueType(config.getValueType()) + .setEventListener(config.getEventListener()) + .setExpireAfterAccess((TimeValue) settingList.get(DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY).get(settings)) + .setMaximumWeightInBytes((Long) settingList.get(DISK_MAX_SIZE_IN_BYTES_KEY).get(settings)) + .setSettings(settings) .build(); } @Override public String getCacheName() { - return CacheProvider.CacheType.EHCACHE.getValue(); + return EHCACHE_DISK_CACHE_NAME; } } @@ -525,6 +555,7 @@ public String getCacheName() { */ public static class Builder extends StoreAwareCacheBuilder { + private CacheType cacheType; private String storagePath; private String threadPoolAlias; @@ -543,6 +574,16 @@ public static class Builder extends StoreAwareCacheBuilder { */ public Builder() {} + /** + * Sets the desired cache type. + * @param cacheType + * @return builder + */ + public Builder setCacheType(CacheType cacheType) { + this.cacheType = cacheType; + return this; + } + /** * Sets the key type of value. * @param keyType type of key diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/package-info.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/package-info.java new file mode 100644 index 0000000000000..79f8eec2f3f4c --- /dev/null +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/package-info.java @@ -0,0 +1,11 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +/** + * Base package for disk cache related stuff. + */ +package org.opensearch.cache.store.disk; diff --git a/plugins/cache-ehcache/src/main/plugin-metadata/plugin-security.policy b/plugins/cache-ehcache/src/main/plugin-metadata/plugin-security.policy index cbd43446c6ba0..40007eea62dba 100644 --- a/plugins/cache-ehcache/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/cache-ehcache/src/main/plugin-metadata/plugin-security.policy @@ -7,9 +7,7 @@ */ grant { - // For ehcache - permission java.lang.RuntimePermission "createClassLoader"; - permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + permission java.lang.RuntimePermission "createClassLoader"; }; - diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java similarity index 97% rename from plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhCacheDiskCacheTests.java rename to plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index dc9354e26168a..cbbea4b319856 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -6,8 +6,9 @@ * compatible open source license. */ -package org.opensearch.cache; +package org.opensearch.cache.store.disk; +import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; @@ -45,6 +46,7 @@ public void testBasicGetAndPut() throws IOException { .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) @@ -85,6 +87,7 @@ public void testConcurrentPut() throws Exception { .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) @@ -129,6 +132,7 @@ public void testEhcacheParallelGets() throws Exception { .setIsEventListenerModeSync(true) // For accurate count .setKeyType(String.class) .setValueType(String.class) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) @@ -171,6 +175,7 @@ public void testEhcacheKeyIterator() throws Exception { .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) @@ -208,6 +213,7 @@ public void testEvictions() throws Exception { .setThreadPoolAlias("ehcacheTest") .setKeyType(String.class) .setValueType(String.class) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) @@ -237,6 +243,7 @@ public void testComputeIfAbsentConcurrently() throws Exception { .setThreadPoolAlias("ehcacheTest") .setKeyType(String.class) .setValueType(String.class) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) @@ -307,6 +314,7 @@ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception .setThreadPoolAlias("ehcacheTest") .setKeyType(String.class) .setValueType(String.class) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) @@ -363,6 +371,7 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) diff --git a/server/src/main/java/org/opensearch/common/cache/CacheType.java b/server/src/main/java/org/opensearch/common/cache/CacheType.java new file mode 100644 index 0000000000000..c5aeb7cd1fa40 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/CacheType.java @@ -0,0 +1,29 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache; + +import org.opensearch.common.annotation.ExperimentalApi; + +/** + * Cache types available within OpenSearch. + */ +@ExperimentalApi +public enum CacheType { + INDICES_REQUEST_CACHE("indices.requests.cache"); + + private final String settingPrefix; + + CacheType(String settingPrefix) { + this.settingPrefix = settingPrefix; + } + + public String getSettingPrefix() { + return settingPrefix; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java b/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java index fd046757a2a53..ed9dcc83a40f7 100644 --- a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java +++ b/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java @@ -10,7 +10,6 @@ import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.plugins.CachePlugin; @@ -48,44 +47,4 @@ private Map> getCacheStoreTypeFact public Map> getCacheStoreTypeFactories() { return cacheStoreTypeFactories; } - - /** - * Given a map of storeType and cacheName setting, extract a specific implementation. - * type. - * @param cacheStoreTypeSettings Setting map - * @return CacheStoreType - */ - public Map getCacheStoreType(Map> cacheStoreTypeSettings) { - Map cacheStoreTypeFactoryMap = new HashMap<>(); - for (Map.Entry> cacheStoreTypeFactoryEntry : cacheStoreTypeFactories.entrySet()) { - CacheStoreType cacheStoreType = cacheStoreTypeFactoryEntry.getKey(); - if (!cacheStoreTypeSettings.containsKey(cacheStoreType)) { - continue; - } - for (StoreAwareCache.Factory factory : cacheStoreTypeFactoryEntry.getValue()) { - if (factory.getCacheName().equals(cacheStoreTypeSettings.get(cacheStoreType).get(settings))) { - cacheStoreTypeFactoryMap.put(cacheStoreType, factory); - break; - } - } - } - return cacheStoreTypeFactoryMap; - } - - /** - * Cache types available. - */ - public enum CacheType { - EHCACHE("ehcache"); - - private final String value; - - CacheType(String value) { - this.value = value; - } - - public String getValue() { - return value; - } - } } diff --git a/server/src/main/java/org/opensearch/common/cache/settings/CacheSettings.java b/server/src/main/java/org/opensearch/common/cache/settings/CacheSettings.java new file mode 100644 index 0000000000000..9125b1f2bb8cf --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/settings/CacheSettings.java @@ -0,0 +1,50 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.settings; + +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.settings.Setting; + +/** + * Settings related to cache. + */ +@ExperimentalApi +public class CacheSettings { + + /** + * Stores a disk cache store name for cache types within OpenSearch. + * Setting pattern: {cache_type}.disk.store.name. Example: indices.request.cache.disk.store.name + */ + public static final Setting.AffixSetting CACHE_TYPE_DISK_STORE_NAME = Setting.suffixKeySetting( + "disk.store.name", + (key) -> Setting.simpleString(key, "", Setting.Property.NodeScope) + ); + + /** + * Stores an onHeap cache store name for cache types within OpenSearch. + * Setting pattern: {cache_type}.onheap.store.name. + */ + public static final Setting.AffixSetting CACHE_TYPE_ONHEAP_STORE_NAME = Setting.suffixKeySetting( + "onheap.store.name", + (key) -> Setting.simpleString(key, "", Setting.Property.NodeScope) + ); + + public static Setting getConcreteSettingForCacheType(CacheType cacheType, CacheStoreType cacheStoreType) { + switch (cacheStoreType) { + case DISK: + return CACHE_TYPE_DISK_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix()); + case ON_HEAP: + return CACHE_TYPE_ONHEAP_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix()); + default: + throw new IllegalArgumentException("Invalid cache store type: " + cacheStoreType); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/settings/package-info.java b/server/src/main/java/org/opensearch/common/cache/settings/package-info.java new file mode 100644 index 0000000000000..7fa82021c5557 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/settings/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Base package for cache settings */ +package org.opensearch.common.cache.settings; diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java index 4cf000dfff8a6..46bb7f1be6986 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java @@ -9,6 +9,7 @@ package org.opensearch.common.cache.store; import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; import org.opensearch.common.cache.store.enums.CacheStoreType; @@ -27,8 +28,9 @@ public interface StoreAwareCache extends ICache { /** * Provides a way to create a new cache. */ + @ExperimentalApi interface Factory { - StoreAwareCache create(StoreAwareCacheConfig storeAwareCacheConfig); + StoreAwareCache create(StoreAwareCacheConfig config, CacheType cacheType); String getCacheName(); } diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java b/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java index 514c0c1bd8708..a9c734eca1e53 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java +++ b/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java @@ -8,25 +8,18 @@ package org.opensearch.common.cache.store.config; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.TimeValue; - -import java.util.function.ToLongBiFunction; /** - * Configurations related to store aware caches + * Common configurations related to store aware caches. * - * @opensearch.internal + * @opensearch.experimental */ +@ExperimentalApi public class StoreAwareCacheConfig { - private long maxWeightInBytes; - - private ToLongBiFunction weigher; - - private TimeValue expireAfterAcess; - private StoreAwareCacheEventListener eventListener; private Settings settings; @@ -35,42 +28,14 @@ public class StoreAwareCacheConfig { private Class valueType; - private String storagePath; - - private String threadPoolAlias; - - private String cacheAlias; - - private String settingPrefix; - - // Provides capability to make event listener to run in sync/async mode. - private boolean isEventListenerModeSync; - private StoreAwareCacheConfig(Builder builder) { - this.cacheAlias = builder.cacheAlias; this.keyType = builder.keyType; } - public long getMaxWeightInBytes() { - return maxWeightInBytes; - } - - public ToLongBiFunction getWeigher() { - return weigher; - } - - public TimeValue getExpireAfterAcess() { - return expireAfterAcess; - } - public StoreAwareCacheEventListener getEventListener() { return eventListener; } - public boolean isEventListenerModeSync() { - return isEventListenerModeSync; - } - public Class getKeyType() { return keyType; } @@ -79,26 +44,10 @@ public Class getValueType() { return valueType; } - public String getCacheAlias() { - return cacheAlias; - } - public Settings getSettings() { return settings; } - public String getSettingPrefix() { - return settingPrefix; - } - - public String getStoragePath() { - return storagePath; - } - - public String getThreadPoolAlias() { - return threadPoolAlias; - } - /** * Builder class to build Cache config related parameters. * @param Type of key. @@ -106,12 +55,6 @@ public String getThreadPoolAlias() { */ public static class Builder { - private long maxWeightInBytes; - - private ToLongBiFunction weigher; - - private TimeValue expireAfterAcess; - private StoreAwareCacheEventListener eventListener; private Settings settings; @@ -120,34 +63,8 @@ public static class Builder { private Class valueType; - private String storagePath; - - private String threadPoolAlias; - - private String cacheAlias; - - private String settingPrefix; - - // Provides capability to make event listener to run in sync/async mode. - private boolean isEventListenerModeSync; - public Builder() {} - public Builder setMaxWeightInBytes(long maxWeightInBytes) { - this.maxWeightInBytes = maxWeightInBytes; - return this; - } - - public Builder setWeigher(ToLongBiFunction weigher) { - this.weigher = weigher; - return this; - } - - public Builder setExpireAfterAcess(TimeValue expireAfterAcess) { - this.expireAfterAcess = expireAfterAcess; - return this; - } - public Builder setEventListener(StoreAwareCacheEventListener listener) { this.eventListener = listener; return this; @@ -168,26 +85,6 @@ public Builder setValueType(Class keyType) { return this; } - public Builder setStoragePath(String storagePath) { - this.storagePath = storagePath; - return this; - } - - public Builder setThreadPoolAlias(String threadPoolAlias) { - this.threadPoolAlias = threadPoolAlias; - return this; - } - - public Builder setCacheAlias(String cacheAlias) { - this.cacheAlias = cacheAlias; - return this; - } - - public Builder setIsEventListenerModeSync(boolean isEventListenerModeSync) { - this.isEventListenerModeSync = isEventListenerModeSync; - return this; - } - public StoreAwareCacheConfig build() { return new StoreAwareCacheConfig<>(this); } diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java index 1e1aeba332c2f..db37e20c29c9b 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java +++ b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java @@ -19,5 +19,5 @@ public enum CacheStoreType { ON_HEAP, - DISK; + DISK } diff --git a/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java b/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java index 6d7e4b39aaf9f..bc6e9e10b5f1b 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java +++ b/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java @@ -8,6 +8,7 @@ package org.opensearch.common.cache.store.listeners; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; import org.opensearch.common.cache.store.enums.CacheStoreType; @@ -16,8 +17,9 @@ * @param Type of key * @param Type of value * - * @opensearch.internal + * @opensearch.experimental */ +@ExperimentalApi public interface StoreAwareCacheEventListener { void onMiss(K key, CacheStoreType cacheStoreType); diff --git a/server/src/main/java/org/opensearch/common/settings/Setting.java b/server/src/main/java/org/opensearch/common/settings/Setting.java index 0e96edff0681c..fea4c165809ba 100644 --- a/server/src/main/java/org/opensearch/common/settings/Setting.java +++ b/server/src/main/java/org/opensearch/common/settings/Setting.java @@ -978,6 +978,9 @@ private Setting getConcreteSetting(String namespace, String key) { * Get a setting with the given namespace filled in for prefix and suffix. */ public Setting getConcreteSettingForNamespace(String namespace) { + if (namespace == null) { + throw new IllegalArgumentException("Namespace should not be null"); + } String fullKey = key.toConcreteKey(namespace).toString(); return getConcreteSetting(namespace, fullKey); } @@ -2804,6 +2807,12 @@ public static AffixSetting prefixKeySetting(String prefix, Function AffixSetting suffixKeySetting(String suffix, Function> delegateFactory) { + BiFunction> delegateFactoryWithNamespace = (ns, k) -> delegateFactory.apply(k); + AffixKey affixKey = new AffixKey(null, suffix); + return affixKeySetting(affixKey, delegateFactoryWithNamespace); + } + /** * This setting type allows to validate settings that have the same type and a common prefix and suffix. For instance * storage.${backend}.enable=[true|false] can easily be added with this setting. Yet, affix key settings don't support updaters @@ -2943,12 +2952,14 @@ public static final class AffixKey implements Key { assert prefix != null || suffix != null : "Either prefix or suffix must be non-null"; this.prefix = prefix; - if (prefix.endsWith(".") == false) { + if (prefix != null && prefix.endsWith(".") == false) { throw new IllegalArgumentException("prefix must end with a '.'"); } this.suffix = suffix; if (suffix == null) { pattern = Pattern.compile("(" + Pattern.quote(prefix) + "((?:[-\\w]+[.])*[-\\w]+$))"); + } else if (prefix == null) { + pattern = Pattern.compile("((?:[-\\w]+[.])*[-\\w]+\\." + Pattern.quote(suffix) + ")"); } else { // the last part of this regexp is to support both list and group keys pattern = Pattern.compile("(" + Pattern.quote(prefix) + "([-\\w]+)\\." + Pattern.quote(suffix) + ")(?:\\..*)?"); diff --git a/server/src/main/java/org/opensearch/plugins/CachePlugin.java b/server/src/main/java/org/opensearch/plugins/CachePlugin.java index 1774236489048..8fce0d6051618 100644 --- a/server/src/main/java/org/opensearch/plugins/CachePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/CachePlugin.java @@ -22,6 +22,19 @@ @ExperimentalApi public interface CachePlugin { + /** + * Returns a map of cacheStoreType -> a factory via which objects can be created on demand. + * For example: + * If there are two implementations of this plugin, lets say A and B, each may return below which can be + * aggregated by fetching all plugins. + * + * A -> Map.of(DISK, new ADiskCache.Factor(), + * ON_HEAP, new AOnHeapCache.Factor()) + * + * B -> Map.of(ON_HEAP, new ADiskCache.Factor()) + * + * @return + */ Map getCacheStoreTypeMap(); String getName(); diff --git a/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java b/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java new file mode 100644 index 0000000000000..c267ffadf0a0f --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java @@ -0,0 +1,40 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.provider; + +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.settings.Settings; +import org.opensearch.plugins.CachePlugin; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.List; +import java.util.Map; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class CacheProviderTests extends OpenSearchTestCase { + + public void testWithMultiplePlugins() { + CachePlugin mockPlugin1 = mock(CachePlugin.class); + StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); + CachePlugin mockPlugin2 = mock(CachePlugin.class); + StoreAwareCache.Factory factory2 = mock(StoreAwareCache.Factory.class); + + when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1, CacheStoreType.ON_HEAP, factory1)); + when(mockPlugin2.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory2, CacheStoreType.ON_HEAP, factory2)); + + CacheProvider cacheProvider = new CacheProvider(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY); + + Map> cacheStoreTypeListMap = cacheProvider.getCacheStoreTypeFactories(); + assertEquals(2, cacheStoreTypeListMap.get(CacheStoreType.DISK).size()); + assertEquals(2, cacheStoreTypeListMap.get(CacheStoreType.ON_HEAP).size()); + } +} diff --git a/server/src/test/java/org/opensearch/common/settings/SettingTests.java b/server/src/test/java/org/opensearch/common/settings/SettingTests.java index 13cecc7157d82..c6da96b521276 100644 --- a/server/src/test/java/org/opensearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/opensearch/common/settings/SettingTests.java @@ -909,6 +909,18 @@ public void testDynamicKeySetting() { } } + public void testAffixKeySettingWithDynamicPrefix() { + Setting.AffixSetting setting = Setting.suffixKeySetting( + "enable", + (key) -> Setting.boolSetting(key, false, Property.NodeScope) + ); + Setting concreteSetting = setting.getConcreteSettingForNamespace("foo.bar"); + assertEquals("foo.bar.enable", concreteSetting.getKey()); + + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> setting.getConcreteSettingForNamespace("foo.")); + assertEquals("key [foo..enable] must match [*.enable] but didn't.", ex.getMessage()); + } + public void testAffixKeySetting() { Setting setting = Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope)); assertTrue(setting.hasComplexMatcher()); From 8bb9e5801a44fb1db439ca9880c5ba9f9210aaf8 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Mon, 29 Jan 2024 15:52:39 -0800 Subject: [PATCH 09/22] Fixing gradle failures for server subproject Signed-off-by: Sagar Upadhyaya --- .../src/main/java/org/opensearch/plugins/CachePlugin.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/opensearch/plugins/CachePlugin.java b/server/src/main/java/org/opensearch/plugins/CachePlugin.java index 8fce0d6051618..836c577835d6f 100644 --- a/server/src/main/java/org/opensearch/plugins/CachePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/CachePlugin.java @@ -23,15 +23,15 @@ public interface CachePlugin { /** - * Returns a map of cacheStoreType -> a factory via which objects can be created on demand. + * Returns a map of cacheStoreType and a factory via which objects can be created on demand. * For example: * If there are two implementations of this plugin, lets say A and B, each may return below which can be * aggregated by fetching all plugins. * - * A -> Map.of(DISK, new ADiskCache.Factor(), + * A: Map.of(DISK, new ADiskCache.Factor(), * ON_HEAP, new AOnHeapCache.Factor()) * - * B -> Map.of(ON_HEAP, new ADiskCache.Factor()) + * B: Map.of(ON_HEAP, new ADiskCache.Factor()) * * @return */ From e846ab6d3304ce4becbf7de8c7e2b35567cd18f0 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Mon, 29 Jan 2024 16:19:54 -0800 Subject: [PATCH 10/22] Fixing server:javadoc failure Signed-off-by: Sagar Upadhyaya --- server/src/main/java/org/opensearch/plugins/CachePlugin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/plugins/CachePlugin.java b/server/src/main/java/org/opensearch/plugins/CachePlugin.java index 836c577835d6f..a45c87913c3fd 100644 --- a/server/src/main/java/org/opensearch/plugins/CachePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/CachePlugin.java @@ -33,7 +33,7 @@ public interface CachePlugin { * * B: Map.of(ON_HEAP, new ADiskCache.Factor()) * - * @return + * @return Map of cacheStoreType and an associated factory. */ Map getCacheStoreTypeMap(); From aa4069bc2890442329f74a084bdf4c2a619c92e9 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Mon, 29 Jan 2024 16:32:03 -0800 Subject: [PATCH 11/22] Fixing cache-ehcache plugin javadov failures Signed-off-by: Sagar Upadhyaya --- .../org/opensearch/cache/store/disk/EhcacheDiskCache.java | 8 ++++---- .../cache/store/StoreAwareCacheRemovalNotification.java | 4 +++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 36bbe76ff77fa..18855c3323097 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -266,7 +266,7 @@ public void put(K key, V value) { * @param key Type of key * @param loader loader to load the value in case key is missing * @return value - * @throws Exception + * @throws Exception when either internal get or put calls fail. */ @Override public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { @@ -367,7 +367,7 @@ public Iterable keys() { /** * Gives the current count of keys in disk cache. - * @return + * @return current count of keys */ @Override public long count() { @@ -576,7 +576,7 @@ public Builder() {} /** * Sets the desired cache type. - * @param cacheType + * @param cacheType cache type * @return builder */ public Builder setCacheType(CacheType cacheType) { @@ -626,7 +626,7 @@ public Builder setThreadPoolAlias(String threadPoolAlias) { /** * Cache alias - * @param diskCacheAlias + * @param diskCacheAlias disk cache alias * @return builder */ public Builder setDiskCacheAlias(String diskCacheAlias) { diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java index 492dbff3532a1..0c09af3140cf4 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java @@ -8,6 +8,7 @@ package org.opensearch.common.cache.store; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.store.enums.CacheStoreType; @@ -17,8 +18,9 @@ * @param Type of key. * @param Type of value. * - * @opensearch.internal + * @opensearch.experimental */ +@ExperimentalApi public class StoreAwareCacheRemovalNotification extends RemovalNotification { private final CacheStoreType cacheStoreType; From c2236c985f53d9b25950959bce673b6bcaf5db04 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Tue, 30 Jan 2024 20:59:47 -0800 Subject: [PATCH 12/22] Addressing comment and additional unit tests Signed-off-by: Sagar Upadhyaya --- .../org/opensearch/cache/EhcacheSettings.java | 3 +- .../cache/store/disk/EhcacheDiskCache.java | 19 ++++-- .../store/disk/EhCacheDiskCacheTests.java | 60 ++++++++++++++++- .../common/cache/provider/CacheProvider.java | 36 ++++++++-- .../store/config/StoreAwareCacheConfig.java | 7 +- .../cache/provider/CacheProviderTests.java | 67 +++++++++++++++++-- 6 files changed, 170 insertions(+), 22 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java index 4eb9c6e31ce93..2150c6e598cc2 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java @@ -92,7 +92,7 @@ public class EhcacheSettings { */ public static final Setting.AffixSetting DISK_CACHE_EXPIRE_AFTER_ACCESS_SETTING = Setting.suffixKeySetting( DISK_CACHE_SETTING_SUFFIX + ".expire_after_access", - (key) -> Setting.positiveTimeSetting(key, new TimeValue(0), NodeScope) + (key) -> Setting.positiveTimeSetting(key, TimeValue.MAX_VALUE, NodeScope) ); /** @@ -167,7 +167,6 @@ public class EhcacheSettings { DISK_MAX_SIZE_IN_BYTES_KEY, DISK_CACHE_MAX_SIZE_IN_BYTES_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()) ) - ) ); diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 18855c3323097..91635dda1f668 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -64,6 +64,8 @@ import static org.opensearch.cache.EhcacheSettings.DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY; import static org.opensearch.cache.EhcacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; import static org.opensearch.cache.EhcacheSettings.DISK_SEGMENT_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_SETTING; import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_CONCURRENCY_KEY; import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MAXIMUM_THREADS_KEY; import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MIN_THREADS_KEY; @@ -120,11 +122,14 @@ private EhcacheDiskCache(Builder builder) { } this.cacheType = Objects.requireNonNull(builder.cacheType, "Cache type shouldn't be null"); if (builder.diskCacheAlias == null || builder.diskCacheAlias.isBlank()) { - this.diskCacheAlias = this.cacheType + "#" + UNIQUE_ID; + this.diskCacheAlias = "ehcacheDiskCache#" + this.cacheType; } else { this.diskCacheAlias = builder.diskCacheAlias; } - this.storagePath = Objects.requireNonNull(builder.storagePath, "Storage path shouldn't be null"); + this.storagePath = builder.storagePath; + if (this.storagePath == null || this.storagePath.isBlank()) { + throw new IllegalArgumentException("Storage path shouldn't be null or empty"); + } if (builder.threadPoolAlias == null || builder.threadPoolAlias.isBlank()) { this.threadPoolAlias = THREAD_POOL_ALIAS_PREFIX + "DiskWrite#" + UNIQUE_ID; } else { @@ -141,7 +146,7 @@ private EhcacheDiskCache(Builder builder) { private Cache buildCache(Duration expireAfterAccess, Builder builder) { try { return this.cacheManager.createCache( - builder.diskCacheAlias, + this.diskCacheAlias, CacheConfigurationBuilder.newCacheConfigurationBuilder( this.keyType, this.valueType, @@ -210,6 +215,7 @@ private PersistentCacheManager buildCacheManager() { // In case we use multiple ehCaches, we can define this cache manager at a global level. return CacheManagerBuilder.newCacheManagerBuilder() .with(CacheManagerBuilder.persistence(new File(storagePath))) + .using( PooledExecutionServiceConfigurationBuilder.newPooledExecutionServiceConfigurationBuilder() .defaultPool(THREAD_POOL_ALIAS_PREFIX + "Default#" + UNIQUE_ID, 1, 3) // Default pool used for other tasks @@ -530,9 +536,12 @@ public EhcacheDiskCacheFactory() {} public StoreAwareCache create(StoreAwareCacheConfig config, CacheType cacheType) { Map> settingList = EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK); Settings settings = config.getSettings(); - - return new Builder().setStoragePath((String) settingList.get(DISK_SEGMENT_KEY).get(settings)) + Setting stringSetting = DISK_STORAGE_PATH_SETTING.getConcreteSettingForNamespace( + CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ); + return new Builder().setStoragePath((String) settingList.get(DISK_STORAGE_PATH_KEY).get(settings)) .setDiskCacheAlias((String) settingList.get(DISK_CACHE_ALIAS_KEY).get(settings)) + .setCacheType(cacheType) .setKeyType((config.getKeyType())) .setValueType(config.getValueType()) .setEventListener(config.getEventListener()) diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index cbbea4b319856..0f687cf923e66 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -8,10 +8,12 @@ package org.opensearch.cache.store.disk; +import org.opensearch.cache.EhcacheSettings; import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; +import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; import org.opensearch.common.settings.Settings; @@ -31,6 +33,8 @@ import java.util.concurrent.Phaser; import java.util.concurrent.atomic.AtomicInteger; +import static org.opensearch.cache.EhcacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; +import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_KEY; import static org.hamcrest.CoreMatchers.instanceOf; public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { @@ -41,8 +45,7 @@ public void testBasicGetAndPut() throws IOException { Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") - .setThreadPoolAlias("ehcacheTest") + StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) @@ -78,6 +81,59 @@ public void testBasicGetAndPut() throws IOException { } } + public void testBasicGetAndPutUsingFactory() throws IOException { + MockEventListener mockEventListener = new MockEventListener<>(); + try (NodeEnvironment env = newNodeEnvironment(Settings.EMPTY)) { + StoreAwareCache.Factory ehcacheFactory = new EhcacheDiskCache.EhcacheDiskCacheFactory(); + StoreAwareCache ehcacheTest = ehcacheFactory.create( + new StoreAwareCacheConfig.Builder().setValueType(String.class) + .setKeyType(String.class) + .setEventListener(mockEventListener) + .setSettings( + Settings.builder() + .put( + EhcacheSettings.getSettingListForCacheTypeAndStore(CacheType.INDICES_REQUEST_CACHE, CacheStoreType.DISK) + .get(DISK_MAX_SIZE_IN_BYTES_KEY) + .getKey(), + CACHE_SIZE_IN_BYTES + ) + .put( + EhcacheSettings.getSettingListForCacheTypeAndStore(CacheType.INDICES_REQUEST_CACHE, CacheStoreType.DISK) + .get(DISK_STORAGE_PATH_KEY) + .getKey(), + env.nodePaths()[0].indicesPath.toString() + "/request_cache" + ) + .build() + ) + .build(), + CacheType.INDICES_REQUEST_CACHE + ); + int randomKeys = randomIntBetween(10, 100); + Map keyValueMap = new HashMap<>(); + for (int i = 0; i < randomKeys; i++) { + keyValueMap.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + ehcacheTest.put(entry.getKey(), entry.getValue()); + } + for (Map.Entry entry : keyValueMap.entrySet()) { + String value = ehcacheTest.get(entry.getKey()); + assertEquals(entry.getValue(), value); + } + assertEquals(randomKeys, mockEventListener.onCachedCount.get()); + assertEquals(randomKeys, mockEventListener.onHitCount.get()); + + // Validate misses + int expectedNumberOfMisses = randomIntBetween(10, 200); + for (int i = 0; i < expectedNumberOfMisses; i++) { + ehcacheTest.get(UUID.randomUUID().toString()); + } + + assertEquals(expectedNumberOfMisses, mockEventListener.onMissCount.get()); + ehcacheTest.close(); + } + } + public void testConcurrentPut() throws Exception { Settings settings = Settings.builder().build(); MockEventListener mockEventListener = new MockEventListener<>(); diff --git a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java b/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java index ed9dcc83a40f7..d9850d7f2d207 100644 --- a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java +++ b/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java @@ -8,24 +8,26 @@ package org.opensearch.common.cache.provider; +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.settings.CacheSettings; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.plugins.CachePlugin; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; /** * Holds all the cache factories and provides a way to fetch them when needed. */ public class CacheProvider { - private final Map> cacheStoreTypeFactories; - + private final Map> cacheStoreTypeFactories; private final Settings settings; public CacheProvider(List cachePlugins, Settings settings) { @@ -33,18 +35,38 @@ public CacheProvider(List cachePlugins, Settings settings) { this.settings = settings; } - private Map> getCacheStoreTypeFactories(List cachePlugins) { - Map> cacheStoreTypeFactories = new HashMap<>(); + private Map> getCacheStoreTypeFactories(List cachePlugins) { + Map> cacheStoreTypeFactories = new HashMap<>(); for (CachePlugin cachePlugin : cachePlugins) { Map factoryMap = cachePlugin.getCacheStoreTypeMap(); for (Map.Entry entry : factoryMap.entrySet()) { - cacheStoreTypeFactories.computeIfAbsent(entry.getKey(), k -> new ArrayList<>()).add(entry.getValue()); + if (cacheStoreTypeFactories.computeIfAbsent(entry.getKey(), k -> new HashMap<>()) + .putIfAbsent(entry.getValue().getCacheName(), entry.getValue()) != null) { + throw new IllegalArgumentException( + "Cache name: " + entry.getValue().getCacheName() + " is " + "already registered for store type: " + entry.getKey() + ); + } } } return Collections.unmodifiableMap(cacheStoreTypeFactories); } - public Map> getCacheStoreTypeFactories() { + // Package private for testing. + protected Map> getCacheStoreTypeFactories() { return cacheStoreTypeFactories; } + + public Optional getStoreAwareCacheForCacheType(CacheStoreType cacheStoreType, CacheType cacheType) { + if (!cacheStoreTypeFactories.containsKey(cacheStoreType) || cacheStoreTypeFactories.get(cacheStoreType).isEmpty()) { + return Optional.empty(); + } + + Setting cacheSettingForCacheType = CacheSettings.getConcreteSettingForCacheType(cacheType, cacheStoreType); + String storeName = cacheSettingForCacheType.get(settings); + if (storeName == null || storeName.isBlank()) { + return Optional.empty(); + } else { + return Optional.ofNullable(cacheStoreTypeFactories.get(cacheStoreType).get(storeName)); + } + } } diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java b/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java index a9c734eca1e53..98aba608e0756 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java +++ b/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java @@ -30,6 +30,9 @@ public class StoreAwareCacheConfig { private StoreAwareCacheConfig(Builder builder) { this.keyType = builder.keyType; + this.valueType = builder.valueType; + this.settings = builder.settings; + this.eventListener = builder.eventListener; } public StoreAwareCacheEventListener getEventListener() { @@ -80,8 +83,8 @@ public Builder setKeyType(Class keyType) { return this; } - public Builder setValueType(Class keyType) { - this.keyType = keyType; + public Builder setValueType(Class valueType) { + this.valueType = valueType; return this; } diff --git a/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java b/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java index c267ffadf0a0f..f25bf39c93a18 100644 --- a/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java +++ b/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java @@ -8,6 +8,7 @@ package org.opensearch.common.cache.provider; +import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.store.StoreAwareCache; import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.settings.Settings; @@ -27,14 +28,72 @@ public void testWithMultiplePlugins() { StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); CachePlugin mockPlugin2 = mock(CachePlugin.class); StoreAwareCache.Factory factory2 = mock(StoreAwareCache.Factory.class); - + when(factory1.getCacheName()).thenReturn("cache1"); + when(factory2.getCacheName()).thenReturn("cache2"); when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1, CacheStoreType.ON_HEAP, factory1)); when(mockPlugin2.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory2, CacheStoreType.ON_HEAP, factory2)); CacheProvider cacheProvider = new CacheProvider(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY); - Map> cacheStoreTypeListMap = cacheProvider.getCacheStoreTypeFactories(); - assertEquals(2, cacheStoreTypeListMap.get(CacheStoreType.DISK).size()); - assertEquals(2, cacheStoreTypeListMap.get(CacheStoreType.ON_HEAP).size()); + Map> cacheStoreTypeListMap = cacheProvider.getCacheStoreTypeFactories(); + assertNotNull(cacheStoreTypeListMap.get(CacheStoreType.DISK).get("cache1")); + assertNotNull(cacheStoreTypeListMap.get(CacheStoreType.DISK).get("cache2")); + assertNotNull(cacheStoreTypeListMap.get(CacheStoreType.ON_HEAP).get("cache1")); + assertNotNull(cacheStoreTypeListMap.get(CacheStoreType.ON_HEAP).get("cache2")); + } + + public void testWithSameCacheStoreTypeAndName() { + CachePlugin mockPlugin1 = mock(CachePlugin.class); + StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); + CachePlugin mockPlugin2 = mock(CachePlugin.class); + StoreAwareCache.Factory factory2 = mock(StoreAwareCache.Factory.class); + when(factory1.getCacheName()).thenReturn("cache"); + when(factory2.getCacheName()).thenReturn("cache"); + when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1)); + when(mockPlugin2.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory2)); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> new CacheProvider(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY) + ); + assertEquals("Cache name: cache is already registered for store type: DISK", ex.getMessage()); + } + + public void testWithCacheFactoryPresentForCacheType() { + CachePlugin mockPlugin1 = mock(CachePlugin.class); + StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); + when(factory1.getCacheName()).thenReturn("cache1"); + when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1)); + + CacheProvider cacheProvider = new CacheProvider( + List.of(mockPlugin1), + Settings.builder().put(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ".disk.store.name", "cache1").build() + ); + assertTrue(cacheProvider.getStoreAwareCacheForCacheType(CacheStoreType.DISK, CacheType.INDICES_REQUEST_CACHE).isPresent()); + } + + public void testWithCacheFactoryNotPresentForCacheType() { + CachePlugin mockPlugin1 = mock(CachePlugin.class); + StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); + when(factory1.getCacheName()).thenReturn("cache1"); + when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1)); + + CacheProvider cacheProvider = new CacheProvider( + List.of(mockPlugin1), + Settings.builder().put(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ".disk.store.name", "cache2").build() + ); + assertFalse(cacheProvider.getStoreAwareCacheForCacheType(CacheStoreType.DISK, CacheType.INDICES_REQUEST_CACHE).isPresent()); + + assertFalse(cacheProvider.getStoreAwareCacheForCacheType(CacheStoreType.ON_HEAP, CacheType.INDICES_REQUEST_CACHE).isPresent()); + } + + public void testWithNoStoreNameForCacheType() { + CachePlugin mockPlugin1 = mock(CachePlugin.class); + StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); + when(factory1.getCacheName()).thenReturn("cache1"); + when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1)); + + CacheProvider cacheProvider = new CacheProvider(List.of(mockPlugin1), Settings.EMPTY); + assertFalse(cacheProvider.getStoreAwareCacheForCacheType(CacheStoreType.DISK, CacheType.INDICES_REQUEST_CACHE).isPresent()); } } From 65ff2c2ee8ded783fff3020f2266e15134ece32a Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Tue, 30 Jan 2024 21:11:14 -0800 Subject: [PATCH 13/22] Moving ehcache version info to the plugin Signed-off-by: Sagar Upadhyaya --- buildSrc/version.properties | 1 - plugins/cache-ehcache/build.gradle | 4 ++++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 2ea97fe682cd9..dd7f2e1eaabf0 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -72,4 +72,3 @@ resteasy = 6.2.4.Final # opentelemetry dependencies opentelemetry = 1.34.1 opentelemetrysemconv = 1.23.1-alpha -ehcache = 3.10.8 diff --git a/plugins/cache-ehcache/build.gradle b/plugins/cache-ehcache/build.gradle index ea5fed57929e7..5c36563cfd9ab 100644 --- a/plugins/cache-ehcache/build.gradle +++ b/plugins/cache-ehcache/build.gradle @@ -19,6 +19,10 @@ opensearchplugin { hasClientJar = true } +versions << [ + 'ehcache' : '3.10.8' +] + dependencies { api "org.ehcache:ehcache:${versions.ehcache}" } From 8f861eed13111e93174685e2816ceff2043c53b6 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Wed, 14 Feb 2024 10:00:31 -0800 Subject: [PATCH 14/22] Moving tieredCache to module and refactoring caches interface and listener logic Signed-off-by: Sagar Upadhyaya --- modules/cache-common/build.gradle | 39 + .../common/tier/TieredSpilloverCache.java | 328 +++++ .../tier/TieredSpilloverCachePlugin.java | 63 + .../tier/TieredSpilloverCacheSettings.java | 43 + .../cache/common}/tier/package-info.java | 4 +- .../plugin-metadata/plugin-security.policy | 6 +- .../tier/TieredSpilloverCachePluginTests.java | 27 + .../tier/TieredSpilloverCacheTests.java | 1129 +++++++++++++++++ .../opensearch/cache/EhcacheCachePlugin.java | 18 +- ...ngs.java => EhcacheDiskCacheSettings.java} | 115 +- .../cache/store/disk/EhcacheDiskCache.java | 129 +- .../opensearch/cache/EhcachePluginTests.java | 11 +- .../store/disk/EhCacheDiskCacheTests.java | 127 +- .../org/opensearch/common/cache/ICache.java | 14 +- .../common/cache/LoadAwareCacheLoader.java | 5 +- .../common/cache/provider/CacheProvider.java | 44 +- .../common/cache/settings/CacheSettings.java | 30 +- .../cache/store/OpenSearchOnHeapCache.java | 82 +- .../common/cache/store/StoreAwareCache.java | 37 - .../StoreAwareCacheRemovalNotification.java | 35 - .../cache/store/StoreAwareCacheValue.java | 35 - ...reCacheBuilder.java => ICacheBuilder.java} | 36 +- ...AwareCacheConfig.java => CacheConfig.java} | 67 +- .../cache/store/enums/CacheStoreType.java | 23 - .../StoreAwareCacheEventListener.java | 32 - .../OpenSearchOnHeapCacheSettings.java | 67 + .../{enums => settings}/package-info.java | 4 +- .../cache/tier/TieredSpilloverCache.java | 294 ----- .../org/opensearch/plugins/CachePlugin.java | 6 +- .../cache/provider/CacheProviderTests.java | 73 +- .../cache/tier/TieredSpilloverCacheTests.java | 797 ------------ 31 files changed, 2057 insertions(+), 1663 deletions(-) create mode 100644 modules/cache-common/build.gradle create mode 100644 modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java create mode 100644 modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java create mode 100644 modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheSettings.java rename {server/src/main/java/org/opensearch/common/cache => modules/cache-common/src/main/java/org/opensearch/cache/common}/tier/package-info.java (70%) rename server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java => modules/cache-common/src/main/plugin-metadata/plugin-security.policy (58%) create mode 100644 modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java create mode 100644 modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java rename plugins/cache-ehcache/src/main/java/org/opensearch/cache/{EhcacheSettings.java => EhcacheDiskCacheSettings.java} (59%) delete mode 100644 server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java delete mode 100644 server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java delete mode 100644 server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java rename server/src/main/java/org/opensearch/common/cache/store/builders/{StoreAwareCacheBuilder.java => ICacheBuilder.java} (57%) rename server/src/main/java/org/opensearch/common/cache/store/config/{StoreAwareCacheConfig.java => CacheConfig.java} (54%) delete mode 100644 server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java delete mode 100644 server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/settings/OpenSearchOnHeapCacheSettings.java rename server/src/main/java/org/opensearch/common/cache/store/{enums => settings}/package-info.java (68%) delete mode 100644 server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java delete mode 100644 server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java diff --git a/modules/cache-common/build.gradle b/modules/cache-common/build.gradle new file mode 100644 index 0000000000000..75931ac55ceae --- /dev/null +++ b/modules/cache-common/build.gradle @@ -0,0 +1,39 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +opensearchplugin { + description 'Module for caches which are optional and do not require additional security permission' + classname 'org.opensearch.cache.common.tier.TieredSpilloverCachePlugin' +} + +test { + // TODO: Adding permission in plugin-security.policy doesn't seem to work. + systemProperty 'tests.security.manager', 'false' +} diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java new file mode 100644 index 0000000000000..0c8003ac7b365 --- /dev/null +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java @@ -0,0 +1,328 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache.common.tier; + +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.provider.CacheProvider; +import org.opensearch.common.cache.store.config.CacheConfig; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ReleasableLock; +import org.opensearch.common.util.iterable.Iterables; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Function; + +/** + * This cache spillover the evicted items from heap tier to disk tier. All the new items are first cached on heap + * and the items evicted from on heap cache are moved to disk based cache. If disk based cache also gets full, + * then items are eventually evicted from it and removed which will result in cache miss. + * + * @param Type of key + * @param Type of value + * + * @opensearch.experimental + */ +@ExperimentalApi +public class TieredSpilloverCache implements ICache { + + private final ICache diskCache; + private final ICache onHeapCache; + private final RemovalListener removalListener; + ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); + ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock()); + ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock()); + + /** + * Maintains caching tiers in ascending order of cache latency. + */ + private final List> cacheList; + + TieredSpilloverCache(Builder builder) { + Objects.requireNonNull(builder.onHeapCacheFactory, "onHeap cache builder can't be null"); + Objects.requireNonNull(builder.diskCacheFactory, "disk cache builder can't be null"); + this.removalListener = Objects.requireNonNull(builder.removalListener, "Removal listener can't be null"); + + this.onHeapCache = builder.onHeapCacheFactory.create( + new CacheConfig.Builder().setRemovalListener(new RemovalListener() { + @Override + public void onRemoval(RemovalNotification notification) { + try (ReleasableLock ignore = writeLock.acquire()) { + diskCache.put(notification.getKey(), notification.getValue()); + } + removalListener.onRemoval(notification); + } + }) + .setKeyType(builder.cacheConfig.getKeyType()) + .setValueType(builder.cacheConfig.getValueType()) + .setSettings(builder.cacheConfig.getSettings()) + .setWeigher(builder.cacheConfig.getWeigher()) + .build(), + builder.cacheType + ); + this.diskCache = builder.diskCacheFactory.create(builder.cacheConfig, builder.cacheType); + this.cacheList = Arrays.asList(onHeapCache, diskCache); + } + + // Package private for testing + ICache getOnHeapCache() { + return onHeapCache; + } + + // Package private for testing + ICache getDiskCache() { + return diskCache; + } + + @Override + public V get(K key) { + return getValueFromTieredCache().apply(key); + } + + @Override + public void put(K key, V value) { + try (ReleasableLock ignore = writeLock.acquire()) { + onHeapCache.put(key, value); + } + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + V cacheValue = getValueFromTieredCache().apply(key); + if (cacheValue == null) { + // Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside. + // This is needed as there can be many requests for the same key at the same time and we only want to load + // the value once. + V value = null; + try (ReleasableLock ignore = writeLock.acquire()) { + value = onHeapCache.computeIfAbsent(key, loader); + } + return value; + } + return cacheValue; + } + + @Override + public void invalidate(K key) { + // We are trying to invalidate the key from all caches though it would be present in only of them. + // Doing this as we don't know where it is located. We could do a get from both and check that, but what will + // also trigger a hit/miss listener event, so ignoring it for now. + try (ReleasableLock ignore = writeLock.acquire()) { + for (ICache cache : cacheList) { + cache.invalidate(key); + } + } + } + + @Override + public void invalidateAll() { + try (ReleasableLock ignore = writeLock.acquire()) { + for (ICache cache : cacheList) { + cache.invalidateAll(); + } + } + } + + /** + * Provides an iteration over both onHeap and disk keys. This is not protected from any mutations to the cache. + * @return An iterable over (onHeap + disk) keys + */ + @SuppressWarnings("unchecked") + @Override + public Iterable keys() { + return Iterables.concat(onHeapCache.keys(), diskCache.keys()); + } + + @Override + public long count() { + long count = 0; + for (ICache cache : cacheList) { + count += cache.count(); + } + return count; + } + + @Override + public void refresh() { + try (ReleasableLock ignore = writeLock.acquire()) { + for (ICache cache : cacheList) { + cache.refresh(); + } + } + } + + @Override + public void close() throws IOException { + for (ICache cache : cacheList) { + cache.close(); + } + } + + private Function getValueFromTieredCache() { + return key -> { + try (ReleasableLock ignore = readLock.acquire()) { + for (ICache cache : cacheList) { + V value = cache.get(key); + if (value != null) { + // update hit stats + return value; + } else { + // update miss stats + } + } + } + return null; + }; + } + + /** + * Factory to create TieredSpilloverCache objects. + */ + public static class TieredSpilloverCacheFactory implements ICache.Factory { + + /** + * Defines cache name + */ + public static final String TIERED_SPILLOVER_CACHE_NAME = "tiered_spillover"; + + /** + * Cache provider which is needed to extract factories for desired cache store. + */ + private final CacheProvider cacheProvider; + + /** + * Parameterized constructor + * @param cacheProvider Contains info about various caches. + */ + public TieredSpilloverCacheFactory(CacheProvider cacheProvider) { + this.cacheProvider = cacheProvider; + } + + @Override + public ICache create(CacheConfig config, CacheType cacheType) { + Settings settings = config.getSettings(); + Setting onHeapSetting = TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace( + cacheType.getSettingPrefix() + ); + Optional onHeapCacheFactory = cacheProvider.getCacheFactoryForCacheStoreName(onHeapSetting.get(settings)); + if (onHeapCacheFactory.isEmpty()) { + throw new IllegalArgumentException( + "No associated onHeapCache found for tieredSpilloverCache for " + "cacheType:" + cacheType + ); + } + Setting onDiskSetting = TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace( + cacheType.getSettingPrefix() + ); + Optional diskCacheFactory = cacheProvider.getCacheFactoryForCacheStoreName(onDiskSetting.get(settings)); + if (diskCacheFactory.isEmpty()) { + throw new IllegalArgumentException( + "No associated diskCache found for tieredSpilloverCache for " + "cacheType:" + cacheType + ); + } + return new Builder().setDiskCacheFactory(diskCacheFactory.get()) + .setOnHeapCacheFactory(onHeapCacheFactory.get()) + .setRemovalListener(config.getRemovalListener()) + .setCacheConfig(config) + .setCacheType(cacheType) + .build(); + } + + @Override + public String getCacheName() { + return TIERED_SPILLOVER_CACHE_NAME; + } + } + + /** + * Builder object for tiered spillover cache. + * @param Type of key + * @param Type of value + */ + public static class Builder { + private ICache.Factory onHeapCacheFactory; + private ICache.Factory diskCacheFactory; + private RemovalListener removalListener; + private CacheConfig cacheConfig; + private CacheType cacheType; + + /** + * Default constructor + */ + public Builder() {} + + /** + * Set onHeap cache factory + * @param onHeapCacheFactory Factory for onHeap cache. + * @return builder + */ + public Builder setOnHeapCacheFactory(ICache.Factory onHeapCacheFactory) { + this.onHeapCacheFactory = onHeapCacheFactory; + return this; + } + + /** + * Set disk cache factory + * @param diskCacheFactory Factory for disk cache. + * @return builder + */ + public Builder setDiskCacheFactory(ICache.Factory diskCacheFactory) { + this.diskCacheFactory = diskCacheFactory; + return this; + } + + /** + * Set removal listener for tiered cache. + * @param removalListener Removal listener + * @return builder + */ + public Builder setRemovalListener(RemovalListener removalListener) { + this.removalListener = removalListener; + return this; + } + + /** + * Set cache config. + * @param cacheConfig cache config. + * @return builder + */ + public Builder setCacheConfig(CacheConfig cacheConfig) { + this.cacheConfig = cacheConfig; + return this; + } + + /** + * Set cache type. + * @param cacheType Cache type + * @return builder + */ + public Builder setCacheType(CacheType cacheType) { + this.cacheType = cacheType; + return this; + } + + /** + * Build tiered spillover cache. + * @return TieredSpilloverCache + */ + public TieredSpilloverCache build() { + return new TieredSpilloverCache<>(this); + } + } +} diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java new file mode 100644 index 0000000000000..f3e0265ab92d4 --- /dev/null +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java @@ -0,0 +1,63 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache.common.tier; + +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.provider.CacheProvider; +import org.opensearch.common.settings.Setting; +import org.opensearch.plugins.CachePlugin; +import org.opensearch.plugins.Plugin; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * Plugin for TieredSpilloverCache. + */ +public class TieredSpilloverCachePlugin extends Plugin implements CachePlugin { + + /** + * Plugin name + */ + public static final String TIERED_CACHE_SPILLOVER_PLUGIN_NAME = "tieredSpilloverCachePlugin"; + + /** + * Default constructor + */ + TieredSpilloverCachePlugin() {} + + @Override + public Map getCacheFactoryMap(CacheProvider cacheProvider) { + return Map.of( + TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME, + new TieredSpilloverCache.TieredSpilloverCacheFactory(cacheProvider) + ); + } + + @Override + public List> getSettings() { + List> settingList = new ArrayList<>(); + for (CacheType cacheType : CacheType.values()) { + settingList.add( + TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix()) + ); + settingList.add( + TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix()) + ); + } + return settingList; + } + + @Override + public String getName() { + return TIERED_CACHE_SPILLOVER_PLUGIN_NAME; + } +} diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheSettings.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheSettings.java new file mode 100644 index 0000000000000..50b4177f599d1 --- /dev/null +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheSettings.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache.common.tier; + +import org.opensearch.common.settings.Setting; + +import static org.opensearch.common.settings.Setting.Property.NodeScope; + +/** + * Settings related to TieredSpilloverCache. + */ +public class TieredSpilloverCacheSettings { + + /** + * Setting which defines the onHeap cache store to be used in TieredSpilloverCache. + * + * Pattern: {cache_type}.tiered_spillover.onheap.store.name + * Example: indices.request.cache.tiered_spillover.onheap.store.name + */ + public static final Setting.AffixSetting TIERED_SPILLOVER_ONHEAP_STORE_NAME = Setting.suffixKeySetting( + TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME + ".onheap.store.name", + (key) -> Setting.simpleString(key, "", NodeScope) + ); + + /** + * Setting which defines the disk cache store to be used in TieredSpilloverCache. + */ + public static final Setting.AffixSetting TIERED_SPILLOVER_DISK_STORE_NAME = Setting.suffixKeySetting( + TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME + ".disk.store.name", + (key) -> Setting.simpleString(key, "", NodeScope) + ); + + /** + * Default constructor + */ + TieredSpilloverCacheSettings() {} +} diff --git a/server/src/main/java/org/opensearch/common/cache/tier/package-info.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/package-info.java similarity index 70% rename from server/src/main/java/org/opensearch/common/cache/tier/package-info.java rename to modules/cache-common/src/main/java/org/opensearch/cache/common/tier/package-info.java index 7ad81dbe3073c..fa2de3c14b5dc 100644 --- a/server/src/main/java/org/opensearch/common/cache/tier/package-info.java +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/package-info.java @@ -6,5 +6,5 @@ * compatible open source license. */ -/** Base package for cache tier support. */ -package org.opensearch.common.cache.tier; +/** Package related to cache tiers **/ +package org.opensearch.cache.common.tier; diff --git a/server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java b/modules/cache-common/src/main/plugin-metadata/plugin-security.policy similarity index 58% rename from server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java rename to modules/cache-common/src/main/plugin-metadata/plugin-security.policy index c3222ca3ffb62..12fe9f2ddb60b 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java +++ b/modules/cache-common/src/main/plugin-metadata/plugin-security.policy @@ -6,5 +6,7 @@ * compatible open source license. */ -/** Package related to tiered cache listeners */ -package org.opensearch.common.cache.store.listeners; +grant { + permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + permission java.lang.RuntimePermission "createClassLoader"; +}; diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java new file mode 100644 index 0000000000000..1775e70faf1fe --- /dev/null +++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java @@ -0,0 +1,27 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache.common.tier; + +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.provider.CacheProvider; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Map; + +import static org.mockito.Mockito.mock; + +public class TieredSpilloverCachePluginTests extends OpenSearchTestCase { + + public void testGetCacheFactoryMap() { + TieredSpilloverCachePlugin tieredSpilloverCachePlugin = new TieredSpilloverCachePlugin(); + Map map = tieredSpilloverCachePlugin.getCacheFactoryMap(mock(CacheProvider.class)); + assertNotNull(map.get(TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME)); + assertEquals(TieredSpilloverCachePlugin.TIERED_CACHE_SPILLOVER_PLUGIN_NAME, tieredSpilloverCachePlugin.getName()); + } +} diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java new file mode 100644 index 0000000000000..5ae4491c53891 --- /dev/null +++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java @@ -0,0 +1,1129 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cache.common.tier; + +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.provider.CacheProvider; +import org.opensearch.common.cache.store.OpenSearchOnHeapCache; +import org.opensearch.common.cache.store.builders.ICacheBuilder; +import org.opensearch.common.cache.store.config.CacheConfig; +import org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings; +import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.plugins.CachePlugin; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TieredSpilloverCacheTests extends OpenSearchTestCase { + + public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int keyValueSize = 50; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + randomIntBetween(1, 4), + removalListener, + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(), + 0 + ); + int numOfItems1 = randomIntBetween(1, onHeapCacheSize / 2 - 1); + List keys = new ArrayList<>(); + // Put values in cache. + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + keys.add(key); + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + assertEquals(0, removalListener.evictionsMetric.count()); + + // Try to hit cache again with some randomization. + int numOfItems2 = randomIntBetween(1, onHeapCacheSize / 2 - 1); + int cacheHit = 0; + int cacheMiss = 0; + for (int iter = 0; iter < numOfItems2; iter++) { + if (randomBoolean()) { + // Hit cache with stored key + cacheHit++; + int index = randomIntBetween(0, keys.size() - 1); + tieredSpilloverCache.computeIfAbsent(keys.get(index), getLoadAwareCacheLoader()); + } else { + // Hit cache with randomized key which is expected to miss cache always. + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), getLoadAwareCacheLoader()); + cacheMiss++; + } + } + assertEquals(0, removalListener.evictionsMetric.count()); + } + + public void testComputeIfAbsentWithFactoryBasedCacheCreation() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + int keyValueSize = 50; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + + // Set the desired settings needed to create a TieredSpilloverCache object with INDICES_REQUEST_CACHE cacheType. + Settings settings = Settings.builder() + .put( + TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace( + CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ).getKey(), + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME + ) + .put( + TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace( + CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ).getKey(), + MockOnDiskCache.MockDiskCacheFactory.NAME + ) + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(); + CachePlugin onHeapCachePlugin = mock(CachePlugin.class); + CachePlugin diskCachePlugin = mock(CachePlugin.class); + when(onHeapCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( + Map.of(OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory()) + ); + when(diskCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( + Map.of(MockOnDiskCache.MockDiskCacheFactory.NAME, new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))) + ); + CacheProvider cacheProvider = new CacheProvider(List.of(onHeapCachePlugin, diskCachePlugin), settings); + + ICache tieredSpilloverICache = new TieredSpilloverCache.TieredSpilloverCacheFactory(cacheProvider).create( + new CacheConfig.Builder().setKeyType(String.class) + .setKeyType(String.class) + .setWeigher((k, v) -> keyValueSize) + .setRemovalListener(removalListener) + .setSettings(settings) + .build(), + CacheType.INDICES_REQUEST_CACHE + ); + + TieredSpilloverCache tieredSpilloverCache = (TieredSpilloverCache) tieredSpilloverICache; + + // Put values in cache more than it's size and cause evictions from onHeap. + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + long actualDiskCacheSize = tieredSpilloverCache.getDiskCache().count(); + assertEquals(actualDiskCacheSize, removalListener.evictionsMetric.count()); // Evictions from onHeap equal to + // disk cache size. + + tieredSpilloverCache.getOnHeapCache().keys().forEach(onHeapKeys::add); + tieredSpilloverCache.getDiskCache().keys().forEach(diskTierKeys::add); + + assertEquals(tieredSpilloverCache.getOnHeapCache().count(), onHeapKeys.size()); + assertEquals(tieredSpilloverCache.getDiskCache().count(), diskTierKeys.size()); + } + + public void testWithFactoryCreationWithOnHeapCacheNotPresent() { + int onHeapCacheSize = randomIntBetween(10, 30); + int keyValueSize = 50; + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + + // Set the settings without onHeap cache settings. + Settings settings = Settings.builder() + .put( + TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace( + CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ).getKey(), + MockOnDiskCache.MockDiskCacheFactory.NAME + ) + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(); + CachePlugin onHeapCachePlugin = mock(CachePlugin.class); + CachePlugin diskCachePlugin = mock(CachePlugin.class); + when(onHeapCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( + Map.of(OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory()) + ); + when(diskCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( + Map.of(MockOnDiskCache.MockDiskCacheFactory.NAME, new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))) + ); + CacheProvider cacheProvider = new CacheProvider(List.of(onHeapCachePlugin, diskCachePlugin), settings); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> new TieredSpilloverCache.TieredSpilloverCacheFactory(cacheProvider).create( + new CacheConfig.Builder().setKeyType(String.class) + .setKeyType(String.class) + .setWeigher((k, v) -> keyValueSize) + .setRemovalListener(removalListener) + .setSettings(settings) + .build(), + CacheType.INDICES_REQUEST_CACHE + ) + ); + assertEquals( + ex.getMessage(), + "No associated onHeapCache found for tieredSpilloverCache for " + "cacheType:" + CacheType.INDICES_REQUEST_CACHE + ); + } + + public void testWithFactoryCreationWithDiskCacheNotPresent() { + int onHeapCacheSize = randomIntBetween(10, 30); + int keyValueSize = 50; + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + + // Set the settings without onHeap cache settings. + Settings settings = Settings.builder() + .put( + TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace( + CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ).getKey(), + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME + ) + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(); + CachePlugin onHeapCachePlugin = mock(CachePlugin.class); + CachePlugin diskCachePlugin = mock(CachePlugin.class); + when(onHeapCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( + Map.of(OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory()) + ); + when(diskCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( + Map.of(MockOnDiskCache.MockDiskCacheFactory.NAME, new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))) + ); + CacheProvider cacheProvider = new CacheProvider(List.of(onHeapCachePlugin, diskCachePlugin), settings); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> new TieredSpilloverCache.TieredSpilloverCacheFactory(cacheProvider).create( + new CacheConfig.Builder().setKeyType(String.class) + .setKeyType(String.class) + .setWeigher((k, v) -> keyValueSize) + .setRemovalListener(removalListener) + .setSettings(settings) + .build(), + CacheType.INDICES_REQUEST_CACHE + ) + ); + assertEquals( + ex.getMessage(), + "No associated diskCache found for tieredSpilloverCache for " + "cacheType:" + CacheType.INDICES_REQUEST_CACHE + ); + } + + public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + int keyValueSize = 50; + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(); + CacheConfig cacheConfig = new CacheConfig.Builder().setKeyType(String.class) + .setKeyType(String.class) + .setWeigher((k, v) -> keyValueSize) + .setRemovalListener(removalListener) + .setSettings( + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build() + ) + .build(); + + ICache.Factory mockDiskCacheFactory = new MockOnDiskCache.MockDiskCacheFactory(0, diskCacheSize); + + TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() + .setOnHeapCacheFactory(onHeapCacheFactory) + .setDiskCacheFactory(mockDiskCacheFactory) + .setCacheConfig(cacheConfig) + .setRemovalListener(removalListener) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) + .build(); + + // Put values in cache more than it's size and cause evictions from onHeap. + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + long actualDiskCacheSize = tieredSpilloverCache.getDiskCache().count(); + assertEquals(actualDiskCacheSize, removalListener.evictionsMetric.count()); // Evictions from onHeap equal to + // disk cache size. + + tieredSpilloverCache.getOnHeapCache().keys().forEach(onHeapKeys::add); + tieredSpilloverCache.getDiskCache().keys().forEach(diskTierKeys::add); + + assertEquals(tieredSpilloverCache.getOnHeapCache().count(), onHeapKeys.size()); + assertEquals(tieredSpilloverCache.getDiskCache().count(), diskTierKeys.size()); + + // Try to hit cache again with some randomization. + int numOfItems2 = randomIntBetween(50, 200); + int onHeapCacheHit = 0; + int diskCacheHit = 0; + int cacheMiss = 0; + for (int iter = 0; iter < numOfItems2; iter++) { + if (randomBoolean()) { // Hit cache with key stored in onHeap cache. + onHeapCacheHit++; + int index = randomIntBetween(0, onHeapKeys.size() - 1); + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(onHeapKeys.get(index), loadAwareCacheLoader); + assertFalse(loadAwareCacheLoader.isLoaded()); + } else { // Hit cache with key stored in disk cache. + diskCacheHit++; + int index = randomIntBetween(0, diskTierKeys.size() - 1); + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(diskTierKeys.get(index), loadAwareCacheLoader); + assertFalse(loadAwareCacheLoader.isLoaded()); + } + } + for (int iter = 0; iter < randomIntBetween(50, 200); iter++) { + // Hit cache with randomized key which is expected to miss cache always. + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); + cacheMiss++; + } + } + + public void testComputeIfAbsentWithEvictionsFromBothTier() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + int keyValueSize = 50; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + removalListener, + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(), + 0 + ); + + int numOfItems = randomIntBetween(totalSize + 1, totalSize * 3); + for (int iter = 0; iter < numOfItems; iter++) { + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); + } + assertTrue(removalListener.evictionsMetric.count() > 0); + // assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0); + // assertTrue(eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count() > 0); + } + + public void testGetAndCount() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + int keyValueSize = 50; + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + removalListener, + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(), + 0 + ); + + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + if (iter > (onHeapCacheSize - 1)) { + // All these are bound to go to disk based cache. + diskTierKeys.add(key); + } else { + onHeapKeys.add(key); + } + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader); + } + + for (int iter = 0; iter < numOfItems1; iter++) { + if (randomBoolean()) { + if (randomBoolean()) { + int index = randomIntBetween(0, onHeapKeys.size() - 1); + assertNotNull(tieredSpilloverCache.get(onHeapKeys.get(index))); + } else { + int index = randomIntBetween(0, diskTierKeys.size() - 1); + assertNotNull(tieredSpilloverCache.get(diskTierKeys.get(index))); + } + } else { + assertNull(tieredSpilloverCache.get(UUID.randomUUID().toString())); + } + } + assertEquals(numOfItems1, tieredSpilloverCache.count()); + } + + public void testPut() { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + int keyValueSize = 50; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + removalListener, + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(), + 0 + ); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + tieredSpilloverCache.put(key, value); + // assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).cachedCount.count()); + assertEquals(1, tieredSpilloverCache.count()); + } + + public void testPutAndVerifyNewItemsArePresentOnHeapCache() throws Exception { + int onHeapCacheSize = randomIntBetween(200, 400); + int diskCacheSize = randomIntBetween(450, 800); + int keyValueSize = 50; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + keyValueSize, + diskCacheSize, + removalListener, + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + (onHeapCacheSize * keyValueSize) + "b" + ) + .build(), + 0 + ); + + for (int i = 0; i < onHeapCacheSize; i++) { + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) { + return UUID.randomUUID().toString(); + } + }); + } + + assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(0, tieredSpilloverCache.getDiskCache().count()); + + // Again try to put OnHeap cache capacity amount of new items. + List newKeyList = new ArrayList<>(); + for (int i = 0; i < onHeapCacheSize; i++) { + newKeyList.add(UUID.randomUUID().toString()); + } + + for (int i = 0; i < newKeyList.size(); i++) { + tieredSpilloverCache.computeIfAbsent(newKeyList.get(i), new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) { + return UUID.randomUUID().toString(); + } + }); + } + + // Verify that new items are part of onHeap cache. + List actualOnHeapCacheKeys = new ArrayList<>(); + tieredSpilloverCache.getOnHeapCache().keys().forEach(actualOnHeapCacheKeys::add); + + assertEquals(newKeyList.size(), actualOnHeapCacheKeys.size()); + for (int i = 0; i < actualOnHeapCacheKeys.size(); i++) { + assertTrue(newKeyList.contains(actualOnHeapCacheKeys.get(i))); + } + assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(onHeapCacheSize, tieredSpilloverCache.getDiskCache().count()); + } + + public void testInvalidate() { + int onHeapCacheSize = 1; + int diskCacheSize = 10; + int keyValueSize = 20; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + removalListener, + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(), + 0 + ); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + // First try to invalidate without the key present in cache. + tieredSpilloverCache.invalidate(key); + // assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); + + // Now try to invalidate with the key present in onHeap cache. + tieredSpilloverCache.put(key, value); + tieredSpilloverCache.invalidate(key); + // assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); + assertEquals(0, tieredSpilloverCache.count()); + + tieredSpilloverCache.put(key, value); + // Put another key/value so that one of the item is evicted to disk cache. + String key2 = UUID.randomUUID().toString(); + tieredSpilloverCache.put(key2, UUID.randomUUID().toString()); + assertEquals(2, tieredSpilloverCache.count()); + // Again invalidate older key + tieredSpilloverCache.invalidate(key); + // assertEquals(1, eventListener.enumMap.get(CacheStoreType.DISK).invalidationMetric.count()); + assertEquals(1, tieredSpilloverCache.count()); + } + + public void testCacheKeys() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int keyValueSize = 50; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + keyValueSize, + diskCacheSize, + removalListener, + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(), + 0 + ); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + // During first round add onHeapCacheSize entries. Will go to onHeap cache initially. + for (int i = 0; i < onHeapCacheSize; i++) { + String key = UUID.randomUUID().toString(); + diskTierKeys.add(key); + tieredSpilloverCache.computeIfAbsent(key, getLoadAwareCacheLoader()); + } + // In another round, add another onHeapCacheSize entries. These will go to onHeap and above ones will be + // evicted to onDisk cache. + for (int i = 0; i < onHeapCacheSize; i++) { + String key = UUID.randomUUID().toString(); + onHeapKeys.add(key); + tieredSpilloverCache.computeIfAbsent(key, getLoadAwareCacheLoader()); + } + + List actualOnHeapKeys = new ArrayList<>(); + List actualOnDiskKeys = new ArrayList<>(); + Iterable onHeapiterable = tieredSpilloverCache.getOnHeapCache().keys(); + Iterable onDiskiterable = tieredSpilloverCache.getDiskCache().keys(); + onHeapiterable.iterator().forEachRemaining(actualOnHeapKeys::add); + onDiskiterable.iterator().forEachRemaining(actualOnDiskKeys::add); + for (String onHeapKey : onHeapKeys) { + assertTrue(actualOnHeapKeys.contains(onHeapKey)); + } + for (String onDiskKey : actualOnDiskKeys) { + assertTrue(actualOnDiskKeys.contains(onDiskKey)); + } + + // Testing keys() which returns all keys. + List actualMergedKeys = new ArrayList<>(); + List expectedMergedKeys = new ArrayList<>(); + expectedMergedKeys.addAll(onHeapKeys); + expectedMergedKeys.addAll(diskTierKeys); + + Iterable mergedIterable = tieredSpilloverCache.keys(); + mergedIterable.iterator().forEachRemaining(actualMergedKeys::add); + + assertEquals(expectedMergedKeys.size(), actualMergedKeys.size()); + for (String key : expectedMergedKeys) { + assertTrue(actualMergedKeys.contains(key)); + } + } + + public void testRefresh() { + int diskCacheSize = randomIntBetween(60, 100); + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + 50, + diskCacheSize, + removalListener, + Settings.EMPTY, + 0 + ); + tieredSpilloverCache.refresh(); + } + + public void testInvalidateAll() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int keyValueSize = 50; + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + keyValueSize, + diskCacheSize, + removalListener, + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(), + 0 + ); + // Put values in cache more than it's size and cause evictions from onHeap. + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List onHeapKeys = new ArrayList<>(); + List diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + if (iter > (onHeapCacheSize - 1)) { + // All these are bound to go to disk based cache. + diskTierKeys.add(key); + } else { + onHeapKeys.add(key); + } + LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + assertEquals(numOfItems1, tieredSpilloverCache.count()); + tieredSpilloverCache.invalidateAll(); + assertEquals(0, tieredSpilloverCache.count()); + } + + public void testComputeIfAbsentConcurrently() throws Exception { + int onHeapCacheSize = randomIntBetween(100, 300); + int diskCacheSize = randomIntBetween(200, 400); + int keyValueSize = 50; + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + Settings settings = Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + onHeapCacheSize * keyValueSize + "b" + ) + .build(); + + TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( + keyValueSize, + diskCacheSize, + removalListener, + settings, + 0 + ); + + int numberOfSameKeys = randomIntBetween(10, onHeapCacheSize - 1); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + + Thread[] threads = new Thread[numberOfSameKeys]; + Phaser phaser = new Phaser(numberOfSameKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(numberOfSameKeys); // To wait for all threads to finish. + + List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + + for (int i = 0; i < numberOfSameKeys; i++) { + threads[i] = new Thread(() -> { + try { + LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader<>() { + boolean isLoaded = false; + + @Override + public boolean isLoaded() { + return isLoaded; + } + + @Override + public String load(String key) { + isLoaded = true; + return value; + } + }; + loadAwareCacheLoaderList.add(loadAwareCacheLoader); + phaser.arriveAndAwaitAdvance(); + tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader); + } catch (Exception e) { + throw new RuntimeException(e); + } + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); // Wait for rest of tasks to be cancelled. + int numberOfTimesKeyLoaded = 0; + assertEquals(numberOfSameKeys, loadAwareCacheLoaderList.size()); + for (int i = 0; i < loadAwareCacheLoaderList.size(); i++) { + LoadAwareCacheLoader loader = loadAwareCacheLoaderList.get(i); + if (loader.isLoaded()) { + numberOfTimesKeyLoaded++; + } + } + assertEquals(1, numberOfTimesKeyLoaded); // It should be loaded only once. + } + + public void testConcurrencyForEvictionFlow() throws Exception { + int diskCacheSize = randomIntBetween(450, 800); + + MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>(); + + ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(); + ICache.Factory diskCacheFactory = new MockOnDiskCache.MockDiskCacheFactory(500, diskCacheSize); + CacheConfig cacheConfig = new CacheConfig.Builder().setKeyType(String.class) + .setKeyType(String.class) + .setWeigher((k, v) -> 150) + .setRemovalListener(removalListener) + .setSettings( + Settings.builder() + .put( + OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(MAXIMUM_SIZE_IN_BYTES_KEY) + .getKey(), + 200 + "b" + ) + .build() + ) + .build(); + TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() + .setOnHeapCacheFactory(onHeapCacheFactory) + .setDiskCacheFactory(diskCacheFactory) + .setRemovalListener(removalListener) + .setCacheConfig(cacheConfig) + .setCacheType(CacheType.INDICES_REQUEST_CACHE) + .build(); + + String keyToBeEvicted = "key1"; + String secondKey = "key2"; + + // Put first key on tiered cache. Will go into onHeap cache. + tieredSpilloverCache.computeIfAbsent(keyToBeEvicted, new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) { + return UUID.randomUUID().toString(); + } + }); + CountDownLatch countDownLatch = new CountDownLatch(1); + CountDownLatch countDownLatch1 = new CountDownLatch(1); + // Put second key on tiered cache. Will cause eviction of first key from onHeap cache and should go into + // disk cache. + LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); + Thread thread = new Thread(() -> { + try { + tieredSpilloverCache.computeIfAbsent(secondKey, loadAwareCacheLoader); + countDownLatch1.countDown(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + thread.start(); + assertBusy(() -> { assertTrue(loadAwareCacheLoader.isLoaded()); }, 100, TimeUnit.MILLISECONDS); // We wait for new key to be loaded + // after which it eviction flow is + // guaranteed to occur. + ICache onDiskCache = tieredSpilloverCache.getDiskCache(); + + // Now on a different thread, try to get key(above one which got evicted) from tiered cache. We expect this + // should return not null value as it should be present on diskCache. + AtomicReference actualValue = new AtomicReference<>(); + Thread thread1 = new Thread(() -> { + try { + actualValue.set(tieredSpilloverCache.get(keyToBeEvicted)); + } catch (Exception e) { + throw new RuntimeException(e); + } + countDownLatch.countDown(); + }); + thread1.start(); + countDownLatch.await(); + assertNotNull(actualValue.get()); + countDownLatch1.await(); + assertEquals(1, removalListener.evictionsMetric.count()); + // assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + assertEquals(1, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(1, onDiskCache.count()); + assertNotNull(onDiskCache.get(keyToBeEvicted)); + } + + class MockCacheRemovalListener implements RemovalListener { + final CounterMetric evictionsMetric = new CounterMetric(); + + @Override + public void onRemoval(RemovalNotification notification) { + evictionsMetric.inc(); + } + } + + private LoadAwareCacheLoader getLoadAwareCacheLoader() { + return new LoadAwareCacheLoader<>() { + boolean isLoaded = false; + + @Override + public String load(String key) { + isLoaded = true; + return UUID.randomUUID().toString(); + } + + @Override + public boolean isLoaded() { + return isLoaded; + } + }; + } + + private TieredSpilloverCache intializeTieredSpilloverCache( + int keyValueSize, + int diskCacheSize, + RemovalListener removalListener, + Settings settings, + long diskDeliberateDelay + ) { + // ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(); + ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(); + CacheConfig cacheConfig = new CacheConfig.Builder().setKeyType(String.class) + .setKeyType(String.class) + .setWeigher((k, v) -> keyValueSize) + .setRemovalListener(removalListener) + .setSettings(settings) + .build(); + + ICache.Factory mockDiskCacheFactory = new MockOnDiskCache.MockDiskCacheFactory(diskDeliberateDelay, diskCacheSize); + + return new TieredSpilloverCache.Builder().setCacheType(CacheType.INDICES_REQUEST_CACHE) + .setRemovalListener(removalListener) + .setOnHeapCacheFactory(onHeapCacheFactory) + .setDiskCacheFactory(mockDiskCacheFactory) + .setCacheConfig(cacheConfig) + .build(); + } +} + +/** + * Wrapper OpenSearchOnHeap cache which tracks its own stats. + * @param Type of key + * @param Type of value + */ +class OpenSearchOnHeapCacheWrapper extends OpenSearchOnHeapCache { + + StatsHolder statsHolder = new StatsHolder(); + + public OpenSearchOnHeapCacheWrapper(Builder builder) { + super(builder); + } + + @Override + public V get(K key) { + V value = super.get(key); + if (value != null) { + statsHolder.hitCount.inc(); + } else { + statsHolder.missCount.inc(); + } + return value; + } + + @Override + public void put(K key, V value) { + super.put(key, value); + statsHolder.onCachedMetric.inc(); + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + V value = super.computeIfAbsent(key, loader); + if (loader.isLoaded()) { + statsHolder.missCount.inc(); + statsHolder.onCachedMetric.inc(); + } else { + statsHolder.hitCount.inc(); + } + return value; + } + + @Override + public void invalidate(K key) { + super.invalidate(key); + } + + @Override + public void invalidateAll() { + super.invalidateAll(); + } + + @Override + public Iterable keys() { + return super.keys(); + } + + @Override + public long count() { + return super.count(); + } + + @Override + public void refresh() { + super.refresh(); + } + + @Override + public void close() {} + + @Override + public void onRemoval(RemovalNotification notification) { + super.onRemoval(notification); + } + + /** + * Factory for the wrapper cache class + */ + static class OpenSearchOnHeapCacheWrapperFactory extends OpenSearchOnHeapCacheFactory { + + @Override + public ICache create(CacheConfig config, CacheType cacheType) { + Map> settingList = OpenSearchOnHeapCacheSettings.getSettingListForCacheType(cacheType); + Settings settings = config.getSettings(); + return new OpenSearchOnHeapCacheWrapper<>( + (Builder) new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( + ((ByteSizeValue) settingList.get(MAXIMUM_SIZE_IN_BYTES_KEY).get(settings)).getBytes() + ).setWeigher(config.getWeigher()) + ); + } + + @Override + public String getCacheName() { + return super.getCacheName(); + } + } + + class StatsHolder { + CounterMetric hitCount = new CounterMetric(); + CounterMetric missCount = new CounterMetric(); + CounterMetric evictionMetric = new CounterMetric(); + CounterMetric onCachedMetric = new CounterMetric(); + } +} + +class MockOnDiskCache implements ICache { + + Map cache; + int maxSize; + long delay; + + MockOnDiskCache(int maxSize, long delay) { + this.maxSize = maxSize; + this.delay = delay; + this.cache = new ConcurrentHashMap(); + } + + @Override + public V get(K key) { + V value = cache.get(key); + return value; + } + + @Override + public void put(K key, V value) { + if (this.cache.size() >= maxSize) { // For simplification + // eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, value, RemovalReason.EVICTED, + // CacheStoreType.DISK)); + return; + } + try { + Thread.sleep(delay); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + this.cache.put(key, value); + // eventListener.onCached(key, value, CacheStoreType.DISK); + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + V value = cache.computeIfAbsent(key, key1 -> { + try { + return loader.load(key); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + // if (!loader.isLoaded()) { + // eventListener.onHit(key, value, CacheStoreType.DISK); + // } else { + // eventListener.onMiss(key, CacheStoreType.DISK); + // eventListener.onCached(key, value, CacheStoreType.DISK); + // } + return value; + } + + @Override + public void invalidate(K key) { + if (this.cache.containsKey(key)) { + // eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, null, RemovalReason.INVALIDATED, CacheStoreType.DISK)); + } + this.cache.remove(key); + } + + @Override + public void invalidateAll() { + this.cache.clear(); + } + + @Override + public Iterable keys() { + return this.cache.keySet(); + } + + @Override + public long count() { + return this.cache.size(); + } + + @Override + public void refresh() {} + + @Override + public void close() { + + } + + public static class MockDiskCacheFactory implements Factory { + + static final String NAME = "mockDiskCache"; + final long delay; + final int maxSize; + + MockDiskCacheFactory(long delay, int maxSize) { + this.delay = delay; + this.maxSize = maxSize; + } + + @Override + public ICache create(CacheConfig config, CacheType cacheType) { + return new Builder().setMaxSize(maxSize).setDeliberateDelay(delay).build(); + } + + @Override + public String getCacheName() { + return NAME; + } + } + + public static class Builder extends ICacheBuilder { + + int maxSize; + long delay; + + @Override + public ICache build() { + return new MockOnDiskCache(this.maxSize, this.delay); + } + + public Builder setMaxSize(int maxSize) { + this.maxSize = maxSize; + return this; + } + + public Builder setDeliberateDelay(long millis) { + this.delay = millis; + return this; + } + } +} diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java index b82b52806a8b5..648986957e11e 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java @@ -10,8 +10,8 @@ import org.opensearch.cache.store.disk.EhcacheDiskCache; import org.opensearch.common.cache.CacheType; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.common.settings.Setting; import org.opensearch.plugins.CachePlugin; import org.opensearch.plugins.Plugin; @@ -20,7 +20,7 @@ import java.util.List; import java.util.Map; -import static org.opensearch.cache.EhcacheSettings.CACHE_TYPE_MAP; +import static org.opensearch.cache.EhcacheDiskCacheSettings.CACHE_TYPE_MAP; /** * Ehcache based cache plugin. @@ -35,18 +35,16 @@ public class EhcacheCachePlugin extends Plugin implements CachePlugin { public EhcacheCachePlugin() {} @Override - public Map getCacheStoreTypeMap() { - return Map.of(CacheStoreType.DISK, new EhcacheDiskCache.EhcacheDiskCacheFactory()); + public Map getCacheFactoryMap(CacheProvider cacheProvider) { + return Map.of(EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME, new EhcacheDiskCache.EhcacheDiskCacheFactory()); } @Override public List> getSettings() { List> settingList = new ArrayList<>(); - for (Map.Entry>>> entry : CACHE_TYPE_MAP.entrySet()) { - for (Map.Entry>> cacheStoreTypeMap : entry.getValue().entrySet()) { - for (Map.Entry> entry1 : cacheStoreTypeMap.getValue().entrySet()) { - settingList.add(entry1.getValue()); - } + for (Map.Entry>> entry : CACHE_TYPE_MAP.entrySet()) { + for (Map.Entry> entry1 : entry.getValue().entrySet()) { + settingList.add(entry1.getValue()); } } return settingList; diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java similarity index 59% rename from plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java rename to plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java index 2150c6e598cc2..47bbf2ce6c223 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheSettings.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java @@ -8,40 +8,39 @@ package org.opensearch.cache; +import org.opensearch.cache.store.disk.EhcacheDiskCache; import org.opensearch.common.cache.CacheType; -import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.settings.Setting; import org.opensearch.common.unit.TimeValue; +import java.util.HashMap; import java.util.Map; import static org.opensearch.common.settings.Setting.Property.NodeScope; /** - * Settings related to ehcache. + * Settings related to ehcache disk cache. */ -public class EhcacheSettings { - - static final String DISK_CACHE_SETTING_SUFFIX = "disk.ehcache"; +public class EhcacheDiskCacheSettings { /** * Ehcache disk write minimum threads for its pool * - * Setting pattern: {cache_type}.disk.ehcache.min_threads + * Setting pattern: {cache_type}.ehcache_disk.min_threads */ public static final Setting.AffixSetting DISK_WRITE_MINIMUM_THREADS_SETTING = Setting.suffixKeySetting( - DISK_CACHE_SETTING_SUFFIX + ".min_threads", + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".min_threads", (key) -> Setting.intSetting(key, 2, 1, 5, NodeScope) ); /** * Ehcache disk write maximum threads for its pool * - * Setting pattern: {cache_type}.disk.ehcache.max_threads + * Setting pattern: {cache_type}.ehcache_disk.max_threads */ public static final Setting.AffixSetting DISK_WRITE_MAXIMUM_THREADS_SETTING = Setting.suffixKeySetting( - DISK_CACHE_SETTING_SUFFIX + ".max_threads", + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".max_threads", (key) -> Setting.intSetting(key, 2, 1, 20, NodeScope) ); @@ -56,7 +55,7 @@ public class EhcacheSettings { * */ public static final Setting.AffixSetting DISK_WRITE_CONCURRENCY_SETTING = Setting.suffixKeySetting( - DISK_CACHE_SETTING_SUFFIX + ".concurrency", + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".concurrency", (key) -> Setting.intSetting(key, 1, 1, 3, NodeScope) ); @@ -67,7 +66,7 @@ public class EhcacheSettings { * Default value is 16 within Ehcache. */ public static final Setting.AffixSetting DISK_SEGMENTS_SETTING = Setting.suffixKeySetting( - DISK_CACHE_SETTING_SUFFIX + ".segments", + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".segments", (key) -> Setting.intSetting(key, 16, 1, 32, NodeScope) ); @@ -75,7 +74,7 @@ public class EhcacheSettings { * Storage path for disk cache. */ public static final Setting.AffixSetting DISK_STORAGE_PATH_SETTING = Setting.suffixKeySetting( - DISK_CACHE_SETTING_SUFFIX + ".storage.path", + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".storage.path", (key) -> Setting.simpleString(key, "", NodeScope) ); @@ -83,7 +82,7 @@ public class EhcacheSettings { * Disk cache alias. */ public static final Setting.AffixSetting DISK_CACHE_ALIAS_SETTING = Setting.suffixKeySetting( - DISK_CACHE_SETTING_SUFFIX + ".alias", + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".alias", (key) -> Setting.simpleString(key, "", NodeScope) ); @@ -91,7 +90,7 @@ public class EhcacheSettings { * Disk cache expire after access setting. */ public static final Setting.AffixSetting DISK_CACHE_EXPIRE_AFTER_ACCESS_SETTING = Setting.suffixKeySetting( - DISK_CACHE_SETTING_SUFFIX + ".expire_after_access", + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".expire_after_access", (key) -> Setting.positiveTimeSetting(key, TimeValue.MAX_VALUE, NodeScope) ); @@ -99,7 +98,7 @@ public class EhcacheSettings { * Disk cache max size setting. */ public static final Setting.AffixSetting DISK_CACHE_MAX_SIZE_IN_BYTES_SETTING = Setting.suffixKeySetting( - DISK_CACHE_SETTING_SUFFIX + ".max_size_in_bytes", + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".max_size_in_bytes", (key) -> Setting.longSetting(key, 1073741824L, NodeScope) ); @@ -140,58 +139,72 @@ public class EhcacheSettings { */ public static final String DISK_STORAGE_PATH_KEY = "disk_storage_path"; + /** + * Map of key to setting. + */ + private static final Map> KEY_SETTING_MAP = Map.of( + DISK_SEGMENT_KEY, + DISK_SEGMENTS_SETTING, + DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY, + DISK_CACHE_EXPIRE_AFTER_ACCESS_SETTING, + DISK_CACHE_ALIAS_KEY, + DISK_CACHE_ALIAS_SETTING, + DISK_SEGMENTS_KEY, + DISK_SEGMENTS_SETTING, + DISK_WRITE_CONCURRENCY_KEY, + DISK_WRITE_CONCURRENCY_SETTING, + DISK_WRITE_MAXIMUM_THREADS_KEY, + DISK_WRITE_MAXIMUM_THREADS_SETTING, + DISK_WRITE_MIN_THREADS_KEY, + DISK_WRITE_MINIMUM_THREADS_SETTING, + DISK_STORAGE_PATH_KEY, + DISK_STORAGE_PATH_SETTING, + DISK_MAX_SIZE_IN_BYTES_KEY, + DISK_CACHE_MAX_SIZE_IN_BYTES_SETTING + ); + /** * Map to store desired settings for a cache type. */ - public static final Map>>> CACHE_TYPE_MAP = Map.of( - CacheType.INDICES_REQUEST_CACHE, - Map.of( - CacheStoreType.DISK, - Map.of( - DISK_SEGMENT_KEY, - DISK_SEGMENTS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), - DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY, - DISK_CACHE_EXPIRE_AFTER_ACCESS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), - DISK_CACHE_ALIAS_KEY, - DISK_CACHE_ALIAS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), - DISK_SEGMENTS_KEY, - DISK_SEGMENTS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), - DISK_WRITE_CONCURRENCY_KEY, - DISK_WRITE_CONCURRENCY_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), - DISK_WRITE_MAXIMUM_THREADS_KEY, - DISK_WRITE_MAXIMUM_THREADS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), - DISK_WRITE_MIN_THREADS_KEY, - DISK_WRITE_MINIMUM_THREADS_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), - DISK_STORAGE_PATH_KEY, - DISK_STORAGE_PATH_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()), - DISK_MAX_SIZE_IN_BYTES_KEY, - DISK_CACHE_MAX_SIZE_IN_BYTES_SETTING.getConcreteSettingForNamespace(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()) - ) - ) - ); + public static final Map>> CACHE_TYPE_MAP = getCacheTypeMap(); + + /** + * Used to form concrete setting for cache types and return desired map + * @return map of cacheType and associated settings. + */ + private static final Map>> getCacheTypeMap() { + Map>> cacheTypeMap = new HashMap<>(); + for (CacheType cacheType : CacheType.values()) { + Map> settingMap = new HashMap<>(); + for (Map.Entry> entry : KEY_SETTING_MAP.entrySet()) { + settingMap.put(entry.getKey(), entry.getValue().getConcreteSettingForNamespace(cacheType.getSettingPrefix())); + } + cacheTypeMap.put(cacheType, settingMap); + } + return cacheTypeMap; + } /** * Fetches setting list for a combination of cache type and store name. * @param cacheType cache type - * @param cacheStoreType store type * @return settings */ - public static final Map> getSettingListForCacheTypeAndStore(CacheType cacheType, CacheStoreType cacheStoreType) { - Map>> cacheTypeSettings = CACHE_TYPE_MAP.get(cacheType); + public static final Map> getSettingListForCacheType(CacheType cacheType) { + Map> cacheTypeSettings = CACHE_TYPE_MAP.get(cacheType); if (cacheTypeSettings == null) { - throw new IllegalArgumentException("No settings exist with corresponding cache type: " + cacheType); - } - Map> settingList = cacheTypeSettings.get(cacheStoreType); - if (settingList == null) { throw new IllegalArgumentException( - "No settings exist for cache store name: " + cacheStoreType + " associated with cache type: " + cacheType + "No settings exist for cache store name: " + + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + + "associated with " + + "cache type: " + + cacheType ); } - return settingList; + return cacheTypeSettings; } /** * Default constructor. Added to fix javadocs. */ - public EhcacheSettings() {} + public EhcacheDiskCacheSettings() {} } diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 91635dda1f668..fc3cb6092c9d6 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -11,19 +11,18 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; -import org.opensearch.cache.EhcacheSettings; +import org.opensearch.cache.EhcacheDiskCacheSettings; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.RemovalReason; import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; -import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.store.builders.ICacheBuilder; +import org.opensearch.common.cache.store.config.CacheConfig; import org.opensearch.common.collect.Tuple; import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.settings.Setting; @@ -60,15 +59,14 @@ import org.ehcache.spi.loaderwriter.CacheLoadingException; import org.ehcache.spi.loaderwriter.CacheWritingException; -import static org.opensearch.cache.EhcacheSettings.DISK_CACHE_ALIAS_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_SEGMENT_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_SETTING; -import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_CONCURRENCY_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MAXIMUM_THREADS_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_WRITE_MIN_THREADS_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_CACHE_ALIAS_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_SEGMENT_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_STORAGE_PATH_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_WRITE_CONCURRENCY_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_WRITE_MAXIMUM_THREADS_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_WRITE_MIN_THREADS_KEY; /** * This variant of disk cache uses Ehcache underneath. @@ -79,7 +77,7 @@ * */ @ExperimentalApi -public class EhcacheDiskCache implements StoreAwareCache { +public class EhcacheDiskCache implements ICache { private static final Logger logger = LogManager.getLogger(EhcacheDiskCache.class); @@ -102,7 +100,7 @@ public class EhcacheDiskCache implements StoreAwareCache { private final EhCacheEventListener ehCacheEventListener; private final String threadPoolAlias; private final Settings settings; - private final StoreAwareCacheEventListener eventListener; + private final RemovalListener removalListener; private final CacheType cacheType; private final String diskCacheAlias; @@ -137,9 +135,9 @@ private EhcacheDiskCache(Builder builder) { } this.settings = Objects.requireNonNull(builder.getSettings(), "Settings objects shouldn't be null"); this.cacheManager = buildCacheManager(); - Objects.requireNonNull(builder.getEventListener(), "Listener can't be null"); - this.eventListener = builder.getEventListener(); - this.ehCacheEventListener = new EhCacheEventListener(builder.getEventListener()); + Objects.requireNonNull(builder.getRemovalListener(), "Removal listener can't be null"); + this.removalListener = builder.getRemovalListener(); + this.ehCacheEventListener = new EhCacheEventListener(builder.getRemovalListener()); this.cache = buildCache(Duration.ofMillis(expireAfterAccess.getMillis()), builder); } @@ -171,12 +169,10 @@ public Duration getExpiryForUpdate(K key, Supplier oldValue, V newV .withService( new OffHeapDiskStoreConfiguration( this.threadPoolAlias, - (Integer) EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK) + (Integer) EhcacheDiskCacheSettings.getSettingListForCacheType(cacheType) .get(DISK_WRITE_CONCURRENCY_KEY) .get(settings), - (Integer) EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK) - .get(DISK_SEGMENT_KEY) - .get(settings) + (Integer) EhcacheDiskCacheSettings.getSettingListForCacheType(cacheType).get(DISK_SEGMENT_KEY).get(settings) ) ) ); @@ -222,10 +218,10 @@ private PersistentCacheManager buildCacheManager() { // like event listeners .pool( this.threadPoolAlias, - (Integer) EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK) + (Integer) EhcacheDiskCacheSettings.getSettingListForCacheType(cacheType) .get(DISK_WRITE_MIN_THREADS_KEY) .get(settings), - (Integer) EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK) + (Integer) EhcacheDiskCacheSettings.getSettingListForCacheType(cacheType) .get(DISK_WRITE_MAXIMUM_THREADS_KEY) .get(settings) ) @@ -245,11 +241,6 @@ public V get(K key) { } catch (CacheLoadingException ex) { throw new OpenSearchException("Exception occurred while trying to fetch item from ehcache disk cache"); } - if (value != null) { - eventListener.onHit(key, value, CacheStoreType.DISK); - } else { - eventListener.onMiss(key, CacheStoreType.DISK); - } return value; } @@ -283,11 +274,6 @@ public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Except if (value == null) { value = compute(key, loader); } - if (!loader.isLoaded()) { - eventListener.onHit(key, value, CacheStoreType.DISK); - } else { - eventListener.onMiss(key, CacheStoreType.DISK); - } return value; } @@ -396,26 +382,9 @@ public void close() { } } - /** - * Relevant stats for this cache. - * @return CacheStats - */ - @Override - public CacheStats stats() { - return stats; - } - - /** - * Returns the tier type. - * @return CacheStoreType.DISK - */ - @Override - public CacheStoreType getTierType() { - return CacheStoreType.DISK; - } - /** * Stats related to disk cache. + * TODO: Remove this once cache stats are integrated. */ static class DiskCacheStats implements CacheStats { private final CounterMetric count = new CounterMetric(); @@ -459,10 +428,10 @@ public K next() { */ class EhCacheEventListener implements CacheEventListener { - private final StoreAwareCacheEventListener eventListener; + private final RemovalListener removalListener; - EhCacheEventListener(StoreAwareCacheEventListener eventListener) { - this.eventListener = eventListener; + EhCacheEventListener(RemovalListener removalListener) { + this.removalListener = removalListener; } @Override @@ -470,41 +439,22 @@ public void onEvent(CacheEvent event) { switch (event.getType()) { case CREATED: stats.count.inc(); - this.eventListener.onCached(event.getKey(), event.getNewValue(), CacheStoreType.DISK); + // this.eventListener.onCached(event.getKey(), event.getNewValue(), CacheStoreType.DISK); assert event.getOldValue() == null; break; case EVICTED: - this.eventListener.onRemoval( - new StoreAwareCacheRemovalNotification<>( - event.getKey(), - event.getOldValue(), - RemovalReason.EVICTED, - CacheStoreType.DISK - ) - ); + this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.EVICTED)); stats.count.dec(); assert event.getNewValue() == null; break; case REMOVED: stats.count.dec(); - this.eventListener.onRemoval( - new StoreAwareCacheRemovalNotification<>( - event.getKey(), - event.getOldValue(), - RemovalReason.EXPLICIT, - CacheStoreType.DISK - ) - ); + this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.EXPLICIT)); assert event.getNewValue() == null; break; case EXPIRED: - this.eventListener.onRemoval( - new StoreAwareCacheRemovalNotification<>( - event.getKey(), - event.getOldValue(), - RemovalReason.INVALIDATED, - CacheStoreType.DISK - ) + this.removalListener.onRemoval( + new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.INVALIDATED) ); stats.count.dec(); assert event.getNewValue() == null; @@ -520,12 +470,12 @@ public void onEvent(CacheEvent event) { /** * Factory to create an ehcache disk cache. */ - public static class EhcacheDiskCacheFactory implements StoreAwareCache.Factory { + public static class EhcacheDiskCacheFactory implements ICache.Factory { /** * Ehcache disk cache name. */ - public static final String EHCACHE_DISK_CACHE_NAME = "ehcacheDiskCache"; + public static final String EHCACHE_DISK_CACHE_NAME = "ehcache_disk"; /** * Default constructor. @@ -533,18 +483,15 @@ public static class EhcacheDiskCacheFactory implements StoreAwareCache.Factory { public EhcacheDiskCacheFactory() {} @Override - public StoreAwareCache create(StoreAwareCacheConfig config, CacheType cacheType) { - Map> settingList = EhcacheSettings.getSettingListForCacheTypeAndStore(cacheType, CacheStoreType.DISK); + public ICache create(CacheConfig config, CacheType cacheType) { + Map> settingList = EhcacheDiskCacheSettings.getSettingListForCacheType(cacheType); Settings settings = config.getSettings(); - Setting stringSetting = DISK_STORAGE_PATH_SETTING.getConcreteSettingForNamespace( - CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() - ); return new Builder().setStoragePath((String) settingList.get(DISK_STORAGE_PATH_KEY).get(settings)) .setDiskCacheAlias((String) settingList.get(DISK_CACHE_ALIAS_KEY).get(settings)) .setCacheType(cacheType) .setKeyType((config.getKeyType())) .setValueType(config.getValueType()) - .setEventListener(config.getEventListener()) + .setRemovalListener(config.getRemovalListener()) .setExpireAfterAccess((TimeValue) settingList.get(DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY).get(settings)) .setMaximumWeightInBytes((Long) settingList.get(DISK_MAX_SIZE_IN_BYTES_KEY).get(settings)) .setSettings(settings) @@ -562,7 +509,7 @@ public String getCacheName() { * @param Type of key * @param Type of value */ - public static class Builder extends StoreAwareCacheBuilder { + public static class Builder extends ICacheBuilder { private CacheType cacheType; private String storagePath; diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java index 61442c3aa66a5..c92fd7a356d07 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java @@ -8,19 +8,22 @@ package org.opensearch.cache; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.cache.store.disk.EhcacheDiskCache; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.test.OpenSearchTestCase; import java.util.Map; +import static org.mockito.Mockito.mock; + public class EhcachePluginTests extends OpenSearchTestCase { private EhcacheCachePlugin ehcacheCachePlugin = new EhcacheCachePlugin(); public void testGetCacheStoreTypeMap() { - Map factoryMap = ehcacheCachePlugin.getCacheStoreTypeMap(); + Map factoryMap = ehcacheCachePlugin.getCacheFactoryMap(mock(CacheProvider.class)); assertNotNull(factoryMap); - assertNotNull(factoryMap.get(CacheStoreType.DISK)); + assertNotNull(factoryMap.get(EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME)); } } diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 0f687cf923e66..5c21b0b7cf182 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -8,14 +8,14 @@ package org.opensearch.cache.store.disk; -import org.opensearch.cache.EhcacheSettings; +import org.opensearch.cache.EhcacheDiskCacheSettings; import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.LoadAwareCacheLoader; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.store.config.CacheConfig; +import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.env.NodeEnvironment; @@ -31,10 +31,9 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Phaser; -import java.util.concurrent.atomic.AtomicInteger; -import static org.opensearch.cache.EhcacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; -import static org.opensearch.cache.EhcacheSettings.DISK_STORAGE_PATH_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_STORAGE_PATH_KEY; import static org.hamcrest.CoreMatchers.instanceOf; public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { @@ -43,9 +42,9 @@ public class EhCacheDiskCacheTests extends OpenSearchSingleNodeTestCase { public void testBasicGetAndPut() throws IOException { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener removalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) .setValueType(String.class) @@ -53,7 +52,7 @@ public void testBasicGetAndPut() throws IOException { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(removalListener) .build(); int randomKeys = randomIntBetween(10, 100); Map keyValueMap = new HashMap<>(); @@ -67,8 +66,7 @@ public void testBasicGetAndPut() throws IOException { String value = ehcacheTest.get(entry.getKey()); assertEquals(entry.getValue(), value); } - assertEquals(randomKeys, mockEventListener.onCachedCount.get()); - assertEquals(randomKeys, mockEventListener.onHitCount.get()); + assertEquals(randomKeys, ehcacheTest.count()); // Validate misses int expectedNumberOfMisses = randomIntBetween(10, 200); @@ -76,29 +74,28 @@ public void testBasicGetAndPut() throws IOException { ehcacheTest.get(UUID.randomUUID().toString()); } - assertEquals(expectedNumberOfMisses, mockEventListener.onMissCount.get()); ehcacheTest.close(); } } public void testBasicGetAndPutUsingFactory() throws IOException { - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener removalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(Settings.EMPTY)) { - StoreAwareCache.Factory ehcacheFactory = new EhcacheDiskCache.EhcacheDiskCacheFactory(); - StoreAwareCache ehcacheTest = ehcacheFactory.create( - new StoreAwareCacheConfig.Builder().setValueType(String.class) + ICache.Factory ehcacheFactory = new EhcacheDiskCache.EhcacheDiskCacheFactory(); + ICache ehcacheTest = ehcacheFactory.create( + new CacheConfig.Builder().setValueType(String.class) .setKeyType(String.class) - .setEventListener(mockEventListener) + .setRemovalListener(removalListener) .setSettings( Settings.builder() .put( - EhcacheSettings.getSettingListForCacheTypeAndStore(CacheType.INDICES_REQUEST_CACHE, CacheStoreType.DISK) + EhcacheDiskCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) .get(DISK_MAX_SIZE_IN_BYTES_KEY) .getKey(), CACHE_SIZE_IN_BYTES ) .put( - EhcacheSettings.getSettingListForCacheTypeAndStore(CacheType.INDICES_REQUEST_CACHE, CacheStoreType.DISK) + EhcacheDiskCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) .get(DISK_STORAGE_PATH_KEY) .getKey(), env.nodePaths()[0].indicesPath.toString() + "/request_cache" @@ -120,8 +117,7 @@ public void testBasicGetAndPutUsingFactory() throws IOException { String value = ehcacheTest.get(entry.getKey()); assertEquals(entry.getValue(), value); } - assertEquals(randomKeys, mockEventListener.onCachedCount.get()); - assertEquals(randomKeys, mockEventListener.onHitCount.get()); + assertEquals(randomKeys, ehcacheTest.count()); // Validate misses int expectedNumberOfMisses = randomIntBetween(10, 200); @@ -129,16 +125,15 @@ public void testBasicGetAndPutUsingFactory() throws IOException { ehcacheTest.get(UUID.randomUUID().toString()); } - assertEquals(expectedNumberOfMisses, mockEventListener.onMissCount.get()); ehcacheTest.close(); } } public void testConcurrentPut() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener removalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) @@ -147,7 +142,7 @@ public void testConcurrentPut() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(removalListener) .build(); int randomKeys = randomIntBetween(20, 100); Thread[] threads = new Thread[randomKeys]; @@ -173,16 +168,16 @@ public void testConcurrentPut() throws Exception { String value = ehcacheTest.get(entry.getKey()); assertEquals(entry.getValue(), value); } - assertEquals(randomKeys, mockEventListener.onCachedCount.get()); + assertEquals(randomKeys, ehcacheTest.count()); ehcacheTest.close(); } } public void testEhcacheParallelGets() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener removalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setIsEventListenerModeSync(true) // For accurate count @@ -192,7 +187,7 @@ public void testEhcacheParallelGets() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(removalListener) .build(); int randomKeys = randomIntBetween(20, 100); Thread[] threads = new Thread[randomKeys]; @@ -218,7 +213,6 @@ public void testEhcacheParallelGets() throws Exception { } phaser.arriveAndAwaitAdvance(); // Will trigger parallel puts above. countDownLatch.await(); // Wait for all threads to finish - assertEquals(randomKeys, mockEventListener.onHitCount.get()); ehcacheTest.close(); } } @@ -226,7 +220,7 @@ public void testEhcacheParallelGets() throws Exception { public void testEhcacheKeyIterator() throws Exception { Settings settings = Settings.builder().build(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setKeyType(String.class) @@ -235,7 +229,7 @@ public void testEhcacheKeyIterator() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(new MockEventListener<>()) + .setRemovalListener(new MockRemovalListener<>()) .build(); int randomKeys = randomIntBetween(2, 100); @@ -253,7 +247,6 @@ public void testEhcacheKeyIterator() throws Exception { keysCount++; assertNotNull(ehcacheTest.get(key)); } - assertEquals(CacheStoreType.DISK, ehcacheTest.getTierType()); assertEquals(keysCount, randomKeys); ehcacheTest.close(); } @@ -261,9 +254,9 @@ public void testEhcacheKeyIterator() throws Exception { public void testEvictions() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener removalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setIsEventListenerModeSync(true) .setThreadPoolAlias("ehcacheTest") @@ -273,7 +266,7 @@ public void testEvictions() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(removalListener) .build(); // Generate a string with 100 characters @@ -284,16 +277,16 @@ public void testEvictions() throws Exception { String key = "Key" + i; ehcacheTest.put(key, value); } - assertTrue(mockEventListener.onRemovalCount.get() > 0); + assertEquals(660, removalListener.evictionMetric.count()); ehcacheTest.close(); } } public void testComputeIfAbsentConcurrently() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener removalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setIsEventListenerModeSync(true) .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setThreadPoolAlias("ehcacheTest") @@ -303,7 +296,7 @@ public void testComputeIfAbsentConcurrently() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(removalListener) .build(); int numberOfRequest = 2;// randomIntBetween(200, 400); @@ -353,18 +346,16 @@ public String load(String key) { } assertEquals(1, numberOfTimesValueLoaded); assertEquals(0, ((EhcacheDiskCache) ehcacheTest).getCompletableFutureMap().size()); - assertEquals(1, mockEventListener.onMissCount.get()); - assertEquals(1, mockEventListener.onCachedCount.get()); - assertEquals(numberOfRequest - 1, mockEventListener.onHitCount.get()); + assertEquals(1, ehcacheTest.count()); ehcacheTest.close(); } } public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener removalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") .setIsEventListenerModeSync(true) .setThreadPoolAlias("ehcacheTest") @@ -374,7 +365,7 @@ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(removalListener) .build(); int numberOfRequest = randomIntBetween(200, 400); @@ -419,9 +410,9 @@ public String load(String key) throws Exception { public void testComputeIfAbsentWithNullValueLoading() throws Exception { Settings settings = Settings.builder().build(); - MockEventListener mockEventListener = new MockEventListener<>(); + MockRemovalListener removalListener = new MockRemovalListener<>(); try (NodeEnvironment env = newNodeEnvironment(settings)) { - StoreAwareCache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") + ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setIsEventListenerModeSync(true) .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") @@ -431,7 +422,7 @@ public void testComputeIfAbsentWithNullValueLoading() throws Exception { .setSettings(settings) .setExpireAfterAccess(TimeValue.MAX_VALUE) .setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES) - .setEventListener(mockEventListener) + .setRemovalListener(removalListener) .build(); int numberOfRequest = randomIntBetween(200, 400); @@ -491,37 +482,13 @@ private static String generateRandomString(int length) { return randomString.toString(); } - class MockEventListener implements StoreAwareCacheEventListener { + static class MockRemovalListener implements RemovalListener { - AtomicInteger onMissCount = new AtomicInteger(); - AtomicInteger onHitCount = new AtomicInteger(); - AtomicInteger onCachedCount = new AtomicInteger(); - AtomicInteger onRemovalCount = new AtomicInteger(); - - MockEventListener() {} - - @Override - public void onMiss(K key, CacheStoreType cacheStoreType) { - assert cacheStoreType.equals(CacheStoreType.DISK); - onMissCount.incrementAndGet(); - } - - @Override - public void onRemoval(StoreAwareCacheRemovalNotification notification) { - assert notification.getCacheStoreType().equals(CacheStoreType.DISK); - onRemovalCount.incrementAndGet(); - } - - @Override - public void onHit(K key, V value, CacheStoreType cacheStoreType) { - assert cacheStoreType.equals(CacheStoreType.DISK); - onHitCount.incrementAndGet(); - } + CounterMetric evictionMetric = new CounterMetric(); @Override - public void onCached(K key, V value, CacheStoreType cacheStoreType) { - assert cacheStoreType.equals(CacheStoreType.DISK); - onCachedCount.incrementAndGet(); + public void onRemoval(RemovalNotification notification) { + evictionMetric.inc(); } } } diff --git a/server/src/main/java/org/opensearch/common/cache/ICache.java b/server/src/main/java/org/opensearch/common/cache/ICache.java index fad2a31786825..107b4315ea48b 100644 --- a/server/src/main/java/org/opensearch/common/cache/ICache.java +++ b/server/src/main/java/org/opensearch/common/cache/ICache.java @@ -8,7 +8,8 @@ package org.opensearch.common.cache; -import org.opensearch.common.cache.stats.CacheStats; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.store.config.CacheConfig; import java.io.Closeable; @@ -19,6 +20,7 @@ * * @opensearch.experimental */ +@ExperimentalApi public interface ICache extends Closeable { V get(K key); @@ -36,5 +38,13 @@ public interface ICache extends Closeable { void refresh(); - CacheStats stats(); + /** + * Factory to create objects. + */ + @ExperimentalApi + interface Factory { + ICache create(CacheConfig config, CacheType cacheType); + + String getCacheName(); + } } diff --git a/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java b/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java index 57aa4aa39c782..aafd46560021b 100644 --- a/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java +++ b/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java @@ -8,13 +8,16 @@ package org.opensearch.common.cache; +import org.opensearch.common.annotation.ExperimentalApi; + /** * Extends a cache loader with awareness of whether the data is loaded or not. * @param Type of key. * @param Type of value. * - * @opensearch.internal + * @opensearch.experimental */ +@ExperimentalApi public interface LoadAwareCacheLoader extends CacheLoader { boolean isLoaded(); } diff --git a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java b/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java index d9850d7f2d207..5cb4b48ff6922 100644 --- a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java +++ b/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java @@ -8,10 +8,11 @@ package org.opensearch.common.cache.provider; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.settings.CacheSettings; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.OpenSearchOnHeapCache; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.plugins.CachePlugin; @@ -25,9 +26,10 @@ /** * Holds all the cache factories and provides a way to fetch them when needed. */ +@ExperimentalApi public class CacheProvider { - private final Map> cacheStoreTypeFactories; + private final Map cacheStoreTypeFactories; private final Settings settings; public CacheProvider(List cachePlugins, Settings settings) { @@ -35,38 +37,46 @@ public CacheProvider(List cachePlugins, Settings settings) { this.settings = settings; } - private Map> getCacheStoreTypeFactories(List cachePlugins) { - Map> cacheStoreTypeFactories = new HashMap<>(); + private Map getCacheStoreTypeFactories(List cachePlugins) { + Map cacheStoreTypeFactories = new HashMap<>(); for (CachePlugin cachePlugin : cachePlugins) { - Map factoryMap = cachePlugin.getCacheStoreTypeMap(); - for (Map.Entry entry : factoryMap.entrySet()) { - if (cacheStoreTypeFactories.computeIfAbsent(entry.getKey(), k -> new HashMap<>()) - .putIfAbsent(entry.getValue().getCacheName(), entry.getValue()) != null) { - throw new IllegalArgumentException( - "Cache name: " + entry.getValue().getCacheName() + " is " + "already registered for store type: " + entry.getKey() - ); + Map factoryMap = cachePlugin.getCacheFactoryMap(this); + for (Map.Entry entry : factoryMap.entrySet()) { + if (cacheStoreTypeFactories.put(entry.getKey(), entry.getValue()) != null) { + throw new IllegalArgumentException("Cache name: " + entry.getKey() + " is " + "already registered"); } } } + // Add the core OpenSearchOnHeapCache as well. + cacheStoreTypeFactories.put( + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, + new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory() + ); return Collections.unmodifiableMap(cacheStoreTypeFactories); } // Package private for testing. - protected Map> getCacheStoreTypeFactories() { + protected Map getCacheStoreTypeFactories() { return cacheStoreTypeFactories; } - public Optional getStoreAwareCacheForCacheType(CacheStoreType cacheStoreType, CacheType cacheType) { - if (!cacheStoreTypeFactories.containsKey(cacheStoreType) || cacheStoreTypeFactories.get(cacheStoreType).isEmpty()) { + public Optional getCacheFactoryForCacheStoreName(String cacheStoreName) { + if (cacheStoreName == null || cacheStoreName.isBlank()) { return Optional.empty(); + } else { + return Optional.ofNullable(cacheStoreTypeFactories.get(cacheStoreName)); } + } - Setting cacheSettingForCacheType = CacheSettings.getConcreteSettingForCacheType(cacheType, cacheStoreType); + public Optional getCacheFactoryForCacheType(CacheType cacheType) { + Setting cacheSettingForCacheType = CacheSettings.CACHE_TYPE_STORE_NAME.getConcreteSettingForNamespace( + cacheType.getSettingPrefix() + ); String storeName = cacheSettingForCacheType.get(settings); if (storeName == null || storeName.isBlank()) { return Optional.empty(); } else { - return Optional.ofNullable(cacheStoreTypeFactories.get(cacheStoreType).get(storeName)); + return Optional.ofNullable(cacheStoreTypeFactories.get(storeName)); } } } diff --git a/server/src/main/java/org/opensearch/common/cache/settings/CacheSettings.java b/server/src/main/java/org/opensearch/common/cache/settings/CacheSettings.java index 9125b1f2bb8cf..eb4563fda2275 100644 --- a/server/src/main/java/org/opensearch/common/cache/settings/CacheSettings.java +++ b/server/src/main/java/org/opensearch/common/cache/settings/CacheSettings.java @@ -10,7 +10,6 @@ import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.CacheType; -import org.opensearch.common.cache.store.enums.CacheStoreType; import org.opensearch.common.settings.Setting; /** @@ -20,31 +19,16 @@ public class CacheSettings { /** - * Stores a disk cache store name for cache types within OpenSearch. - * Setting pattern: {cache_type}.disk.store.name. Example: indices.request.cache.disk.store.name + * Used to store cache store name for desired cache types within OpenSearch. + * Setting pattern: {cache_type}.store.name + * Example: indices.request.cache.store.name */ - public static final Setting.AffixSetting CACHE_TYPE_DISK_STORE_NAME = Setting.suffixKeySetting( - "disk.store.name", + public static final Setting.AffixSetting CACHE_TYPE_STORE_NAME = Setting.suffixKeySetting( + "store.name", (key) -> Setting.simpleString(key, "", Setting.Property.NodeScope) ); - /** - * Stores an onHeap cache store name for cache types within OpenSearch. - * Setting pattern: {cache_type}.onheap.store.name. - */ - public static final Setting.AffixSetting CACHE_TYPE_ONHEAP_STORE_NAME = Setting.suffixKeySetting( - "onheap.store.name", - (key) -> Setting.simpleString(key, "", Setting.Property.NodeScope) - ); - - public static Setting getConcreteSettingForCacheType(CacheType cacheType, CacheStoreType cacheStoreType) { - switch (cacheStoreType) { - case DISK: - return CACHE_TYPE_DISK_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix()); - case ON_HEAP: - return CACHE_TYPE_ONHEAP_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix()); - default: - throw new IllegalArgumentException("Invalid cache store type: " + cacheStoreType); - } + public static Setting getConcreteSettingForCacheType(CacheType cacheType) { + return CACHE_TYPE_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix()); } } diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index 5b9ff5921a01c..101c70e478c62 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -10,13 +10,21 @@ import org.opensearch.common.cache.Cache; import org.opensearch.common.cache.CacheBuilder; +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; -import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.store.builders.ICacheBuilder; +import org.opensearch.common.cache.store.config.CacheConfig; +import org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.unit.ByteSizeValue; + +import java.util.Map; + +import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY; /** * This variant of on-heap cache uses OpenSearch custom cache implementation. @@ -25,13 +33,10 @@ * * @opensearch.experimental */ -public class OpenSearchOnHeapCache implements StoreAwareCache, RemovalListener { +public class OpenSearchOnHeapCache implements ICache, RemovalListener { private final Cache cache; - - private final StoreAwareCacheEventListener eventListener; - - private final CacheStats stats = new OpenSearchOnHeapCacheStats(); + private final RemovalListener removalListener; public OpenSearchOnHeapCache(Builder builder) { CacheBuilder cacheBuilder = CacheBuilder.builder() @@ -42,35 +47,23 @@ public OpenSearchOnHeapCache(Builder builder) { cacheBuilder.setExpireAfterAccess(builder.getExpireAfterAcess()); } cache = cacheBuilder.build(); - this.eventListener = builder.getEventListener(); + this.removalListener = builder.getRemovalListener(); } @Override public V get(K key) { V value = cache.get(key); - if (value != null) { - eventListener.onHit(key, value, CacheStoreType.ON_HEAP); - } else { - eventListener.onMiss(key, CacheStoreType.ON_HEAP); - } return value; } @Override public void put(K key, V value) { cache.put(key, value); - eventListener.onCached(key, value, CacheStoreType.ON_HEAP); } @Override public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { V value = cache.computeIfAbsent(key, key1 -> loader.load(key)); - if (!loader.isLoaded()) { - eventListener.onHit(key, value, CacheStoreType.ON_HEAP); - } else { - eventListener.onMiss(key, CacheStoreType.ON_HEAP); - eventListener.onCached(key, value, CacheStoreType.ON_HEAP); - } return value; } @@ -91,7 +84,7 @@ public Iterable keys() { @Override public long count() { - return stats.count(); + return cache.count(); } @Override @@ -102,35 +95,30 @@ public void refresh() { @Override public void close() {} - @Override - public CacheStats stats() { - return stats; - } - - @Override - public CacheStoreType getTierType() { - return CacheStoreType.ON_HEAP; - } - @Override public void onRemoval(RemovalNotification notification) { - eventListener.onRemoval( - new StoreAwareCacheRemovalNotification<>( - notification.getKey(), - notification.getValue(), - notification.getRemovalReason(), - CacheStoreType.ON_HEAP - ) - ); + this.removalListener.onRemoval(notification); } /** - * Stats for opensearch on heap cache. + * Factory to create OpenSearchOnheap cache. */ - class OpenSearchOnHeapCacheStats implements CacheStats { + public static class OpenSearchOnHeapCacheFactory implements Factory { + + public static final String NAME = "opensearch_onheap"; + + @Override + public ICache create(CacheConfig config, CacheType cacheType) { + Map> settingList = OpenSearchOnHeapCacheSettings.getSettingListForCacheType(cacheType); + Settings settings = config.getSettings(); + return new Builder().setMaximumWeightInBytes( + ((ByteSizeValue) settingList.get(MAXIMUM_SIZE_IN_BYTES_KEY).get(settings)).getBytes() + ).setWeigher(config.getWeigher()).setRemovalListener(config.getRemovalListener()).build(); + } + @Override - public long count() { - return cache.count(); + public String getCacheName() { + return NAME; } } @@ -139,10 +127,10 @@ public long count() { * @param Type of key * @param Type of value */ - public static class Builder extends StoreAwareCacheBuilder { + public static class Builder extends ICacheBuilder { @Override - public StoreAwareCache build() { + public ICache build() { return new OpenSearchOnHeapCache(this); } } diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java deleted file mode 100644 index 46bb7f1be6986..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.store; - -import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.common.cache.CacheType; -import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.store.config.StoreAwareCacheConfig; -import org.opensearch.common.cache.store.enums.CacheStoreType; - -/** - * Represents a cache with a specific type of store like onHeap, disk etc. - * @param Type of key. - * @param Type of value. - * - * @opensearch.experimental - */ -@ExperimentalApi -public interface StoreAwareCache extends ICache { - CacheStoreType getTierType(); - - /** - * Provides a way to create a new cache. - */ - @ExperimentalApi - interface Factory { - StoreAwareCache create(StoreAwareCacheConfig config, CacheType cacheType); - - String getCacheName(); - } -} diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java deleted file mode 100644 index 0c09af3140cf4..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.store; - -import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.common.cache.RemovalNotification; -import org.opensearch.common.cache.RemovalReason; -import org.opensearch.common.cache.store.enums.CacheStoreType; - -/** - * Removal notification for store aware cache. - * @param Type of key. - * @param Type of value. - * - * @opensearch.experimental - */ -@ExperimentalApi -public class StoreAwareCacheRemovalNotification extends RemovalNotification { - private final CacheStoreType cacheStoreType; - - public StoreAwareCacheRemovalNotification(K key, V value, RemovalReason removalReason, CacheStoreType cacheStoreType) { - super(key, value, removalReason); - this.cacheStoreType = cacheStoreType; - } - - public CacheStoreType getCacheStoreType() { - return cacheStoreType; - } -} diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java deleted file mode 100644 index 4fbbbbfebfaa7..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.store; - -import org.opensearch.common.cache.store.enums.CacheStoreType; - -/** - * Represents a store aware cache value. - * @param Type of value. - * - * @opensearch.internal - */ -public class StoreAwareCacheValue { - private final V value; - private final CacheStoreType source; - - public StoreAwareCacheValue(V value, CacheStoreType source) { - this.value = value; - this.source = source; - } - - public V getValue() { - return value; - } - - public CacheStoreType getCacheStoreType() { - return source; - } -} diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java b/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java similarity index 57% rename from server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java rename to server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java index 3b52afebf5579..7ca9080ec1aa6 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/ICacheBuilder.java @@ -8,8 +8,9 @@ package org.opensearch.common.cache.store.builders; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -20,9 +21,10 @@ * @param Type of key. * @param Type of value. * - * @opensearch.internal + * @opensearch.experimental */ -public abstract class StoreAwareCacheBuilder { +@ExperimentalApi +public abstract class ICacheBuilder { private long maxWeightInBytes; @@ -30,34 +32,34 @@ public abstract class StoreAwareCacheBuilder { private TimeValue expireAfterAcess; - private StoreAwareCacheEventListener eventListener; - private Settings settings; - public StoreAwareCacheBuilder() {} + private RemovalListener removalListener; + + public ICacheBuilder() {} - public StoreAwareCacheBuilder setMaximumWeightInBytes(long sizeInBytes) { + public ICacheBuilder setMaximumWeightInBytes(long sizeInBytes) { this.maxWeightInBytes = sizeInBytes; return this; } - public StoreAwareCacheBuilder setWeigher(ToLongBiFunction weigher) { + public ICacheBuilder setWeigher(ToLongBiFunction weigher) { this.weigher = weigher; return this; } - public StoreAwareCacheBuilder setExpireAfterAccess(TimeValue expireAfterAcess) { + public ICacheBuilder setExpireAfterAccess(TimeValue expireAfterAcess) { this.expireAfterAcess = expireAfterAcess; return this; } - public StoreAwareCacheBuilder setEventListener(StoreAwareCacheEventListener eventListener) { - this.eventListener = eventListener; + public ICacheBuilder setSettings(Settings settings) { + this.settings = settings; return this; } - public StoreAwareCacheBuilder setSettings(Settings settings) { - this.settings = settings; + public ICacheBuilder setRemovalListener(RemovalListener removalListener) { + this.removalListener = removalListener; return this; } @@ -73,13 +75,13 @@ public ToLongBiFunction getWeigher() { return weigher; } - public StoreAwareCacheEventListener getEventListener() { - return this.eventListener; + public RemovalListener getRemovalListener() { + return this.removalListener; } public Settings getSettings() { return settings; } - public abstract StoreAwareCache build(); + public abstract ICache build(); } diff --git a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java b/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java similarity index 54% rename from server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java rename to server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java index 98aba608e0756..6fefea6578fb9 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/config/StoreAwareCacheConfig.java +++ b/server/src/main/java/org/opensearch/common/cache/store/config/CacheConfig.java @@ -9,34 +9,44 @@ package org.opensearch.common.cache.store.config; import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.settings.Settings; +import java.util.function.ToLongBiFunction; + /** * Common configurations related to store aware caches. * * @opensearch.experimental */ @ExperimentalApi -public class StoreAwareCacheConfig { +public class CacheConfig { - private StoreAwareCacheEventListener eventListener; + private final Settings settings; - private Settings settings; + /** + * Defines the key type. + */ + private final Class keyType; - private Class keyType; + /** + * Defines the value type. + */ + private final Class valueType; - private Class valueType; + /** + * Represents a function that calculates the size or weight of a key-value pair. + */ + private final ToLongBiFunction weigher; - private StoreAwareCacheConfig(Builder builder) { + private final RemovalListener removalListener; + + private CacheConfig(Builder builder) { this.keyType = builder.keyType; this.valueType = builder.valueType; this.settings = builder.settings; - this.eventListener = builder.eventListener; - } - - public StoreAwareCacheEventListener getEventListener() { - return eventListener; + this.removalListener = builder.removalListener; + this.weigher = builder.weigher; } public Class getKeyType() { @@ -51,6 +61,14 @@ public Settings getSettings() { return settings; } + public RemovalListener getRemovalListener() { + return removalListener; + } + + public ToLongBiFunction getWeigher() { + return weigher; + } + /** * Builder class to build Cache config related parameters. * @param Type of key. @@ -58,20 +76,17 @@ public Settings getSettings() { */ public static class Builder { - private StoreAwareCacheEventListener eventListener; - private Settings settings; private Class keyType; private Class valueType; - public Builder() {} + private RemovalListener removalListener; - public Builder setEventListener(StoreAwareCacheEventListener listener) { - this.eventListener = listener; - return this; - } + private ToLongBiFunction weigher; + + public Builder() {} public Builder setSettings(Settings settings) { this.settings = settings; @@ -88,8 +103,18 @@ public Builder setValueType(Class valueType) { return this; } - public StoreAwareCacheConfig build() { - return new StoreAwareCacheConfig<>(this); + public Builder setRemovalListener(RemovalListener removalListener) { + this.removalListener = removalListener; + return this; + } + + public Builder setWeigher(ToLongBiFunction weigher) { + this.weigher = weigher; + return this; + } + + public CacheConfig build() { + return new CacheConfig<>(this); } } } diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java deleted file mode 100644 index db37e20c29c9b..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.store.enums; - -import org.opensearch.common.annotation.ExperimentalApi; - -/** - * Cache store types in tiered cache. - * - * @opensearch.internal - */ -@ExperimentalApi -public enum CacheStoreType { - - ON_HEAP, - DISK -} diff --git a/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java b/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java deleted file mode 100644 index bc6e9e10b5f1b..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.store.listeners; - -import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.enums.CacheStoreType; - -/** - * This can be used to listen to tiered caching events - * @param Type of key - * @param Type of value - * - * @opensearch.experimental - */ -@ExperimentalApi -public interface StoreAwareCacheEventListener { - - void onMiss(K key, CacheStoreType cacheStoreType); - - void onRemoval(StoreAwareCacheRemovalNotification notification); - - void onHit(K key, V value, CacheStoreType cacheStoreType); - - void onCached(K key, V value, CacheStoreType cacheStoreType); -} diff --git a/server/src/main/java/org/opensearch/common/cache/store/settings/OpenSearchOnHeapCacheSettings.java b/server/src/main/java/org/opensearch/common/cache/store/settings/OpenSearchOnHeapCacheSettings.java new file mode 100644 index 0000000000000..bfd2d937fb430 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/settings/OpenSearchOnHeapCacheSettings.java @@ -0,0 +1,67 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store.settings; + +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.store.OpenSearchOnHeapCache; +import org.opensearch.common.settings.Setting; +import org.opensearch.core.common.unit.ByteSizeValue; + +import java.util.HashMap; +import java.util.Map; + +import static org.opensearch.common.settings.Setting.Property.NodeScope; + +/** + * Settings for OpenSearchOnHeap + */ +public class OpenSearchOnHeapCacheSettings { + + /** + * Setting to define maximum size for the cache as a percentage of heap memory available. + * + * Setting pattern: {cache_type}.opensearch_onheap.size + */ + public static final Setting.AffixSetting MAXIMUM_SIZE_IN_BYTES = Setting.suffixKeySetting( + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME + ".size", + (key) -> Setting.memorySizeSetting(key, "1%", NodeScope) + ); + + public static final String MAXIMUM_SIZE_IN_BYTES_KEY = "maximum_size_in_bytes"; + + private static final Map> KEY_SETTING_MAP = Map.of(MAXIMUM_SIZE_IN_BYTES_KEY, MAXIMUM_SIZE_IN_BYTES); + + public static final Map>> CACHE_TYPE_MAP = getCacheTypeMap(); + + private static Map>> getCacheTypeMap() { + Map>> cacheTypeMap = new HashMap<>(); + for (CacheType cacheType : CacheType.values()) { + Map> settingMap = new HashMap<>(); + for (Map.Entry> entry : KEY_SETTING_MAP.entrySet()) { + settingMap.put(entry.getKey(), entry.getValue().getConcreteSettingForNamespace(cacheType.getSettingPrefix())); + } + cacheTypeMap.put(cacheType, settingMap); + } + return cacheTypeMap; + } + + public static Map> getSettingListForCacheType(CacheType cacheType) { + Map> cacheTypeSettings = CACHE_TYPE_MAP.get(cacheType); + if (cacheTypeSettings == null) { + throw new IllegalArgumentException( + "No settings exist for cache store name: " + + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME + + "associated with " + + "cache type: " + + cacheType + ); + } + return cacheTypeSettings; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/settings/package-info.java similarity index 68% rename from server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java rename to server/src/main/java/org/opensearch/common/cache/store/settings/package-info.java index 7a4e0fa7201fd..91613876a5f31 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java +++ b/server/src/main/java/org/opensearch/common/cache/store/settings/package-info.java @@ -6,5 +6,5 @@ * compatible open source license. */ -/** Package related to tiered cache enums */ -package org.opensearch.common.cache.store.enums; +/** Base package for cache setting **/ +package org.opensearch.common.cache.store.settings; diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java deleted file mode 100644 index 027eef358c2fa..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java +++ /dev/null @@ -1,294 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.tier; - -import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.LoadAwareCacheLoader; -import org.opensearch.common.cache.RemovalReason; -import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.StoreAwareCacheValue; -import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; -import org.opensearch.common.util.concurrent.ReleasableLock; -import org.opensearch.common.util.iterable.Iterables; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.function.Function; - -/** - * This cache spillover the evicted items from heap tier to disk tier. All the new items are first cached on heap - * and the items evicted from on heap cache are moved to disk based cache. If disk based cache also gets full, - * then items are eventually evicted from it and removed which will result in cache miss. - * - * @param Type of key - * @param Type of value - * - * @opensearch.experimental - */ -public class TieredSpilloverCache implements ICache, StoreAwareCacheEventListener { - - // TODO: Remove optional when diskCache implementation is integrated. - private final Optional> onDiskCache; - private final StoreAwareCache onHeapCache; - private final StoreAwareCacheEventListener listener; - private final CacheStats stats = new TieredSpillOverCacheStats(); - ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); - ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock()); - ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock()); - - /** - * Maintains caching tiers in ascending order of cache latency. - */ - private final List> cacheList; - - TieredSpilloverCache(Builder builder) { - Objects.requireNonNull(builder.onHeapCacheBuilder, "onHeap cache builder can't be null"); - this.onHeapCache = builder.onHeapCacheBuilder.setEventListener(this).build(); - if (builder.onDiskCacheBuilder != null) { - this.onDiskCache = Optional.of(builder.onDiskCacheBuilder.setEventListener(this).build()); - } else { - this.onDiskCache = Optional.empty(); - } - this.listener = builder.listener; - this.cacheList = this.onDiskCache.map(diskTier -> Arrays.asList(this.onHeapCache, diskTier)).orElse(List.of(this.onHeapCache)); - } - - // Package private for testing - StoreAwareCache getOnHeapCache() { - return onHeapCache; - } - - // Package private for testing - Optional> getOnDiskCache() { - return onDiskCache; - } - - @Override - public V get(K key) { - StoreAwareCacheValue cacheValue = getValueFromTieredCache(true).apply(key); - if (cacheValue == null) { - return null; - } - return cacheValue.getValue(); - } - - @Override - public void put(K key, V value) { - try (ReleasableLock ignore = writeLock.acquire()) { - onHeapCache.put(key, value); - listener.onCached(key, value, CacheStoreType.ON_HEAP); - } - } - - @Override - public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { - // We are skipping calling event listeners at this step as we do another get inside below computeIfAbsent. - // Where we might end up calling onMiss twice for a key not present in onHeap cache. - // Similary we might end up calling both onMiss and onHit for a key, in case we are receiving concurrent - // requests for the same key which requires loading only once. - StoreAwareCacheValue cacheValue = getValueFromTieredCache(false).apply(key); - if (cacheValue == null) { - // Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside. - // This is needed as there can be many requests for the same key at the same time and we only want to load - // the value once. - V value = null; - try (ReleasableLock ignore = writeLock.acquire()) { - value = onHeapCache.computeIfAbsent(key, loader); - } - if (loader.isLoaded()) { - listener.onMiss(key, CacheStoreType.ON_HEAP); - onDiskCache.ifPresent(diskTier -> listener.onMiss(key, CacheStoreType.DISK)); - listener.onCached(key, value, CacheStoreType.ON_HEAP); - } else { - listener.onHit(key, value, CacheStoreType.ON_HEAP); - } - return value; - } - listener.onHit(key, cacheValue.getValue(), cacheValue.getCacheStoreType()); - if (cacheValue.getCacheStoreType().equals(CacheStoreType.DISK)) { - listener.onMiss(key, CacheStoreType.ON_HEAP); - } - return cacheValue.getValue(); - } - - @Override - public void invalidate(K key) { - // We are trying to invalidate the key from all caches though it would be present in only of them. - // Doing this as we don't know where it is located. We could do a get from both and check that, but what will - // also trigger a hit/miss listener event, so ignoring it for now. - try (ReleasableLock ignore = writeLock.acquire()) { - for (StoreAwareCache storeAwareCache : cacheList) { - storeAwareCache.invalidate(key); - } - } - } - - @Override - public void invalidateAll() { - try (ReleasableLock ignore = writeLock.acquire()) { - for (StoreAwareCache storeAwareCache : cacheList) { - storeAwareCache.invalidateAll(); - } - } - } - - /** - * Provides an iteration over both onHeap and disk keys. This is not protected from any mutations to the cache. - * @return An iterable over (onHeap + disk) keys - */ - @Override - public Iterable keys() { - Iterable onDiskKeysIterable; - if (onDiskCache.isPresent()) { - onDiskKeysIterable = onDiskCache.get().keys(); - } else { - onDiskKeysIterable = Collections::emptyIterator; - } - return Iterables.concat(onHeapCache.keys(), onDiskKeysIterable); - } - - @Override - public long count() { - return stats.count(); - } - - @Override - public void refresh() { - try (ReleasableLock ignore = writeLock.acquire()) { - for (StoreAwareCache storeAwareCache : cacheList) { - storeAwareCache.refresh(); - } - } - } - - @Override - public void close() throws IOException { - for (StoreAwareCache storeAwareCache : cacheList) { - storeAwareCache.close(); - } - } - - @Override - public CacheStats stats() { - return stats; - } - - @Override - public void onMiss(K key, CacheStoreType cacheStoreType) { - // Misses for tiered cache are tracked here itself. - } - - @Override - public void onRemoval(StoreAwareCacheRemovalNotification notification) { - if (RemovalReason.EVICTED.equals(notification.getRemovalReason()) - || RemovalReason.CAPACITY.equals(notification.getRemovalReason())) { - switch (notification.getCacheStoreType()) { - case ON_HEAP: - try (ReleasableLock ignore = writeLock.acquire()) { - onDiskCache.ifPresent(diskTier -> { diskTier.put(notification.getKey(), notification.getValue()); }); - } - onDiskCache.ifPresent( - diskTier -> listener.onCached(notification.getKey(), notification.getValue(), CacheStoreType.DISK) - ); - break; - default: - break; - } - } - listener.onRemoval(notification); - } - - @Override - public void onHit(K key, V value, CacheStoreType cacheStoreType) { - // Hits for tiered cache are tracked here itself. - } - - @Override - public void onCached(K key, V value, CacheStoreType cacheStoreType) { - // onCached events for tiered cache are tracked here itself. - } - - private Function> getValueFromTieredCache(boolean triggerEventListener) { - return key -> { - try (ReleasableLock ignore = readLock.acquire()) { - for (StoreAwareCache storeAwareCache : cacheList) { - V value = storeAwareCache.get(key); - if (value != null) { - if (triggerEventListener) { - listener.onHit(key, value, storeAwareCache.getTierType()); - } - return new StoreAwareCacheValue<>(value, storeAwareCache.getTierType()); - } else { - if (triggerEventListener) { - listener.onMiss(key, storeAwareCache.getTierType()); - } - } - } - } - return null; - }; - } - - /** - * Stats for tiered spillover cache. - */ - class TieredSpillOverCacheStats implements CacheStats { - - @Override - public long count() { - long totalCount = 0; - for (StoreAwareCache storeAwareCache : cacheList) { - totalCount += storeAwareCache.count(); - } - return totalCount; - } - } - - /** - * Builder object for tiered spillover cache. - * @param Type of key - * @param Type of value - */ - public static class Builder { - private StoreAwareCacheBuilder onHeapCacheBuilder; - private StoreAwareCacheBuilder onDiskCacheBuilder; - private StoreAwareCacheEventListener listener; - - public Builder() {} - - public Builder setOnHeapCacheBuilder(StoreAwareCacheBuilder onHeapCacheBuilder) { - this.onHeapCacheBuilder = onHeapCacheBuilder; - return this; - } - - public Builder setOnDiskCacheBuilder(StoreAwareCacheBuilder onDiskCacheBuilder) { - this.onDiskCacheBuilder = onDiskCacheBuilder; - return this; - } - - public Builder setListener(StoreAwareCacheEventListener listener) { - this.listener = listener; - return this; - } - - public TieredSpilloverCache build() { - return new TieredSpilloverCache<>(this); - } - } -} diff --git a/server/src/main/java/org/opensearch/plugins/CachePlugin.java b/server/src/main/java/org/opensearch/plugins/CachePlugin.java index a45c87913c3fd..2560f7044e715 100644 --- a/server/src/main/java/org/opensearch/plugins/CachePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/CachePlugin.java @@ -9,8 +9,8 @@ package org.opensearch.plugins; import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.provider.CacheProvider; import java.util.Map; @@ -35,7 +35,7 @@ public interface CachePlugin { * * @return Map of cacheStoreType and an associated factory. */ - Map getCacheStoreTypeMap(); + Map getCacheFactoryMap(CacheProvider cacheProvider); String getName(); } diff --git a/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java b/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java index f25bf39c93a18..112a9c0c57d50 100644 --- a/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java +++ b/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java @@ -9,8 +9,9 @@ package org.opensearch.common.cache.provider; import org.opensearch.common.cache.CacheType; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.settings.CacheSettings; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.plugins.CachePlugin; import org.opensearch.test.OpenSearchTestCase; @@ -18,6 +19,7 @@ import java.util.List; import java.util.Map; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -25,75 +27,72 @@ public class CacheProviderTests extends OpenSearchTestCase { public void testWithMultiplePlugins() { CachePlugin mockPlugin1 = mock(CachePlugin.class); - StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); + ICache.Factory factory1 = mock(ICache.Factory.class); CachePlugin mockPlugin2 = mock(CachePlugin.class); - StoreAwareCache.Factory factory2 = mock(StoreAwareCache.Factory.class); - when(factory1.getCacheName()).thenReturn("cache1"); - when(factory2.getCacheName()).thenReturn("cache2"); - when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1, CacheStoreType.ON_HEAP, factory1)); - when(mockPlugin2.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory2, CacheStoreType.ON_HEAP, factory2)); + ICache.Factory factory2 = mock(ICache.Factory.class); + when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache1", factory1)); + when(mockPlugin2.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache2", factory2)); CacheProvider cacheProvider = new CacheProvider(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY); - Map> cacheStoreTypeListMap = cacheProvider.getCacheStoreTypeFactories(); - assertNotNull(cacheStoreTypeListMap.get(CacheStoreType.DISK).get("cache1")); - assertNotNull(cacheStoreTypeListMap.get(CacheStoreType.DISK).get("cache2")); - assertNotNull(cacheStoreTypeListMap.get(CacheStoreType.ON_HEAP).get("cache1")); - assertNotNull(cacheStoreTypeListMap.get(CacheStoreType.ON_HEAP).get("cache2")); + Map factoryMap = cacheProvider.getCacheStoreTypeFactories(); + assertEquals(factoryMap.get("cache1"), factory1); + assertEquals(factoryMap.get("cache2"), factory2); } public void testWithSameCacheStoreTypeAndName() { CachePlugin mockPlugin1 = mock(CachePlugin.class); - StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); + ICache.Factory factory1 = mock(ICache.Factory.class); CachePlugin mockPlugin2 = mock(CachePlugin.class); - StoreAwareCache.Factory factory2 = mock(StoreAwareCache.Factory.class); + ICache.Factory factory2 = mock(ICache.Factory.class); when(factory1.getCacheName()).thenReturn("cache"); when(factory2.getCacheName()).thenReturn("cache"); - when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1)); - when(mockPlugin2.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory2)); + when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache", factory1)); + when(mockPlugin2.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache", factory2)); IllegalArgumentException ex = assertThrows( IllegalArgumentException.class, () -> new CacheProvider(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY) ); - assertEquals("Cache name: cache is already registered for store type: DISK", ex.getMessage()); + assertEquals("Cache name: cache is already registered", ex.getMessage()); } - public void testWithCacheFactoryPresentForCacheType() { + public void testWithCacheFactoryPresentForIndicesRequestCacheType() { CachePlugin mockPlugin1 = mock(CachePlugin.class); - StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); - when(factory1.getCacheName()).thenReturn("cache1"); - when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1)); + ICache.Factory factory1 = mock(ICache.Factory.class); + when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache1", factory1)); + + Setting indicesRequestCacheSetting = CacheSettings.getConcreteSettingForCacheType(CacheType.INDICES_REQUEST_CACHE); CacheProvider cacheProvider = new CacheProvider( List.of(mockPlugin1), - Settings.builder().put(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ".disk.store.name", "cache1").build() + Settings.builder().put(indicesRequestCacheSetting.getKey(), "cache1").build() ); - assertTrue(cacheProvider.getStoreAwareCacheForCacheType(CacheStoreType.DISK, CacheType.INDICES_REQUEST_CACHE).isPresent()); + assertTrue(cacheProvider.getCacheFactoryForCacheType(CacheType.INDICES_REQUEST_CACHE).isPresent()); + assertEquals(cacheProvider.getCacheFactoryForCacheType(CacheType.INDICES_REQUEST_CACHE).get(), factory1); } - public void testWithCacheFactoryNotPresentForCacheType() { + public void testWithCacheFactoryNotPresentForIndicesRequestCacheType() { CachePlugin mockPlugin1 = mock(CachePlugin.class); - StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); - when(factory1.getCacheName()).thenReturn("cache1"); - when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1)); + ICache.Factory factory1 = mock(ICache.Factory.class); + when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache1", factory1)); + + Setting indicesRequestCacheSetting = CacheSettings.getConcreteSettingForCacheType(CacheType.INDICES_REQUEST_CACHE); CacheProvider cacheProvider = new CacheProvider( List.of(mockPlugin1), - Settings.builder().put(CacheType.INDICES_REQUEST_CACHE.getSettingPrefix() + ".disk.store.name", "cache2").build() + Settings.builder().put(indicesRequestCacheSetting.getKey(), "cache").build() ); - assertFalse(cacheProvider.getStoreAwareCacheForCacheType(CacheStoreType.DISK, CacheType.INDICES_REQUEST_CACHE).isPresent()); - - assertFalse(cacheProvider.getStoreAwareCacheForCacheType(CacheStoreType.ON_HEAP, CacheType.INDICES_REQUEST_CACHE).isPresent()); + assertFalse(cacheProvider.getCacheFactoryForCacheType(CacheType.INDICES_REQUEST_CACHE).isPresent()); } - public void testWithNoStoreNameForCacheType() { + public void testGetCacheFactoryForCacheStoreName() { CachePlugin mockPlugin1 = mock(CachePlugin.class); - StoreAwareCache.Factory factory1 = mock(StoreAwareCache.Factory.class); - when(factory1.getCacheName()).thenReturn("cache1"); - when(mockPlugin1.getCacheStoreTypeMap()).thenReturn(Map.of(CacheStoreType.DISK, factory1)); + ICache.Factory factory1 = mock(ICache.Factory.class); + when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache1", factory1)); CacheProvider cacheProvider = new CacheProvider(List.of(mockPlugin1), Settings.EMPTY); - assertFalse(cacheProvider.getStoreAwareCacheForCacheType(CacheStoreType.DISK, CacheType.INDICES_REQUEST_CACHE).isPresent()); + assertTrue(cacheProvider.getCacheFactoryForCacheStoreName("cache1").isPresent()); + assertFalse(cacheProvider.getCacheFactoryForCacheStoreName("cache").isPresent()); } } diff --git a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java deleted file mode 100644 index cce0449dc88b8..0000000000000 --- a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java +++ /dev/null @@ -1,797 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.tier; - -import org.opensearch.common.cache.LoadAwareCacheLoader; -import org.opensearch.common.cache.RemovalReason; -import org.opensearch.common.cache.stats.CacheStats; -import org.opensearch.common.cache.store.OpenSearchOnHeapCache; -import org.opensearch.common.cache.store.StoreAwareCache; -import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; -import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; -import org.opensearch.common.cache.store.enums.CacheStoreType; -import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; -import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.test.OpenSearchTestCase; - -import java.util.ArrayList; -import java.util.EnumMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Phaser; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - -public class TieredSpilloverCacheTests extends OpenSearchTestCase { - - public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception { - int onHeapCacheSize = randomIntBetween(10, 30); - MockCacheEventListener eventListener = new MockCacheEventListener(); - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - randomIntBetween(1, 4), - eventListener, - 0 - ); - int numOfItems1 = randomIntBetween(1, onHeapCacheSize / 2 - 1); - List keys = new ArrayList<>(); - // Put values in cache. - for (int iter = 0; iter < numOfItems1; iter++) { - String key = UUID.randomUUID().toString(); - keys.add(key); - LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); - } - assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); - assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); - assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); - - // Try to hit cache again with some randomization. - int numOfItems2 = randomIntBetween(1, onHeapCacheSize / 2 - 1); - int cacheHit = 0; - int cacheMiss = 0; - for (int iter = 0; iter < numOfItems2; iter++) { - if (randomBoolean()) { - // Hit cache with stored key - cacheHit++; - int index = randomIntBetween(0, keys.size() - 1); - tieredSpilloverCache.computeIfAbsent(keys.get(index), getLoadAwareCacheLoader()); - } else { - // Hit cache with randomized key which is expected to miss cache always. - tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), getLoadAwareCacheLoader()); - cacheMiss++; - } - } - assertEquals(cacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); - assertEquals(numOfItems1 + cacheMiss, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); - assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); - } - - public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception { - int onHeapCacheSize = randomIntBetween(10, 30); - int diskCacheSize = randomIntBetween(60, 100); - int totalSize = onHeapCacheSize + diskCacheSize; - MockCacheEventListener eventListener = new MockCacheEventListener(); - StoreAwareCacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( - onHeapCacheSize * 50 - ).setWeigher((k, v) -> 50); // Will support onHeapCacheSize entries. - - StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize) - .setDeliberateDelay(0); - - TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() - .setOnHeapCacheBuilder(cacheBuilder) - .setOnDiskCacheBuilder(diskCacheBuilder) - .setListener(eventListener) - .build(); - - // Put values in cache more than it's size and cause evictions from onHeap. - int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); - List onHeapKeys = new ArrayList<>(); - List diskTierKeys = new ArrayList<>(); - for (int iter = 0; iter < numOfItems1; iter++) { - String key = UUID.randomUUID().toString(); - LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); - } - long actualDiskCacheSize = tieredSpilloverCache.getOnDiskCache().get().count(); - assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); - assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); - assertEquals(actualDiskCacheSize, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); - - assertEquals( - eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count(), - eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count() - ); - assertEquals(actualDiskCacheSize, eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count()); - - tieredSpilloverCache.getOnHeapCache().keys().forEach(onHeapKeys::add); - tieredSpilloverCache.getOnDiskCache().get().keys().forEach(diskTierKeys::add); - - assertEquals(tieredSpilloverCache.getOnHeapCache().count(), onHeapKeys.size()); - assertEquals(tieredSpilloverCache.getOnDiskCache().get().count(), diskTierKeys.size()); - - // Try to hit cache again with some randomization. - int numOfItems2 = randomIntBetween(50, 200); - int onHeapCacheHit = 0; - int diskCacheHit = 0; - int cacheMiss = 0; - for (int iter = 0; iter < numOfItems2; iter++) { - if (randomBoolean()) { // Hit cache with key stored in onHeap cache. - onHeapCacheHit++; - int index = randomIntBetween(0, onHeapKeys.size() - 1); - LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(onHeapKeys.get(index), loadAwareCacheLoader); - assertFalse(loadAwareCacheLoader.isLoaded()); - } else { // Hit cache with key stored in disk cache. - diskCacheHit++; - int index = randomIntBetween(0, diskTierKeys.size() - 1); - LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(diskTierKeys.get(index), loadAwareCacheLoader); - assertFalse(loadAwareCacheLoader.isLoaded()); - } - } - for (int iter = 0; iter < randomIntBetween(50, 200); iter++) { - // Hit cache with randomized key which is expected to miss cache always. - LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); - cacheMiss++; - } - // On heap cache misses would also include diskCacheHits as it means it missed onHeap cache. - assertEquals(numOfItems1 + cacheMiss + diskCacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); - assertEquals(onHeapCacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); - assertEquals(cacheMiss + numOfItems1, eventListener.enumMap.get(CacheStoreType.DISK).missCount.count()); - assertEquals(diskCacheHit, eventListener.enumMap.get(CacheStoreType.DISK).hitCount.count()); - } - - public void testComputeIfAbsentWithEvictionsFromBothTier() throws Exception { - int onHeapCacheSize = randomIntBetween(10, 30); - int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); - int totalSize = onHeapCacheSize + diskCacheSize; - - MockCacheEventListener eventListener = new MockCacheEventListener(); - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - - int numOfItems = randomIntBetween(totalSize + 1, totalSize * 3); - for (int iter = 0; iter < numOfItems; iter++) { - LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); - } - assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0); - assertTrue(eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count() > 0); - } - - public void testGetAndCount() throws Exception { - int onHeapCacheSize = randomIntBetween(10, 30); - int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); - int totalSize = onHeapCacheSize + diskCacheSize; - - MockCacheEventListener eventListener = new MockCacheEventListener(); - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - - int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); - List onHeapKeys = new ArrayList<>(); - List diskTierKeys = new ArrayList<>(); - for (int iter = 0; iter < numOfItems1; iter++) { - String key = UUID.randomUUID().toString(); - if (iter > (onHeapCacheSize - 1)) { - // All these are bound to go to disk based cache. - diskTierKeys.add(key); - } else { - onHeapKeys.add(key); - } - LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader); - } - - for (int iter = 0; iter < numOfItems1; iter++) { - if (randomBoolean()) { - if (randomBoolean()) { - int index = randomIntBetween(0, onHeapKeys.size() - 1); - assertNotNull(tieredSpilloverCache.get(onHeapKeys.get(index))); - } else { - int index = randomIntBetween(0, diskTierKeys.size() - 1); - assertNotNull(tieredSpilloverCache.get(diskTierKeys.get(index))); - } - } else { - assertNull(tieredSpilloverCache.get(UUID.randomUUID().toString())); - } - } - assertEquals(numOfItems1, tieredSpilloverCache.count()); - } - - public void testWithDiskTierNull() throws Exception { - int onHeapCacheSize = randomIntBetween(10, 30); - MockCacheEventListener eventListener = new MockCacheEventListener(); - - StoreAwareCacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder() - .setMaximumWeightInBytes(onHeapCacheSize * 20) - .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries - TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() - .setOnHeapCacheBuilder(onHeapCacheBuilder) - .setListener(eventListener) - .build(); - - int numOfItems = randomIntBetween(onHeapCacheSize + 1, onHeapCacheSize * 3); - for (int iter = 0; iter < numOfItems; iter++) { - LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), loadAwareCacheLoader); - } - assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0); - assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count()); - assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count()); - assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).missCount.count()); - } - - public void testPut() { - int onHeapCacheSize = randomIntBetween(10, 30); - int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); - - MockCacheEventListener eventListener = new MockCacheEventListener<>(); - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - String key = UUID.randomUUID().toString(); - String value = UUID.randomUUID().toString(); - tieredSpilloverCache.put(key, value); - assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).cachedCount.count()); - assertEquals(1, tieredSpilloverCache.count()); - } - - public void testPutAndVerifyNewItemsArePresentOnHeapCache() throws Exception { - int onHeapCacheSize = randomIntBetween(200, 400); - int diskCacheSize = randomIntBetween(450, 800); - - MockCacheEventListener eventListener = new MockCacheEventListener<>(); - - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - - for (int i = 0; i < onHeapCacheSize; i++) { - tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), new LoadAwareCacheLoader<>() { - @Override - public boolean isLoaded() { - return false; - } - - @Override - public String load(String key) throws Exception { - return UUID.randomUUID().toString(); - } - }); - } - - assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count()); - assertEquals(0, tieredSpilloverCache.getOnDiskCache().get().count()); - - // Again try to put OnHeap cache capacity amount of new items. - List newKeyList = new ArrayList<>(); - for (int i = 0; i < onHeapCacheSize; i++) { - newKeyList.add(UUID.randomUUID().toString()); - } - - for (int i = 0; i < newKeyList.size(); i++) { - tieredSpilloverCache.computeIfAbsent(newKeyList.get(i), new LoadAwareCacheLoader<>() { - @Override - public boolean isLoaded() { - return false; - } - - @Override - public String load(String key) { - return UUID.randomUUID().toString(); - } - }); - } - - // Verify that new items are part of onHeap cache. - List actualOnHeapCacheKeys = new ArrayList<>(); - tieredSpilloverCache.getOnHeapCache().keys().forEach(actualOnHeapCacheKeys::add); - - assertEquals(newKeyList.size(), actualOnHeapCacheKeys.size()); - for (int i = 0; i < actualOnHeapCacheKeys.size(); i++) { - assertTrue(newKeyList.contains(actualOnHeapCacheKeys.get(i))); - } - - assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count()); - assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnDiskCache().get().count()); - } - - public void testInvalidate() { - int onHeapCacheSize = 1; - int diskCacheSize = 10; - - MockCacheEventListener eventListener = new MockCacheEventListener<>(); - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - String key = UUID.randomUUID().toString(); - String value = UUID.randomUUID().toString(); - // First try to invalidate without the key present in cache. - tieredSpilloverCache.invalidate(key); - assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); - - // Now try to invalidate with the key present in onHeap cache. - tieredSpilloverCache.put(key, value); - tieredSpilloverCache.invalidate(key); - assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); - assertEquals(0, tieredSpilloverCache.count()); - - tieredSpilloverCache.put(key, value); - // Put another key/value so that one of the item is evicted to disk cache. - String key2 = UUID.randomUUID().toString(); - tieredSpilloverCache.put(key2, UUID.randomUUID().toString()); - assertEquals(2, tieredSpilloverCache.count()); - // Again invalidate older key - tieredSpilloverCache.invalidate(key); - assertEquals(1, eventListener.enumMap.get(CacheStoreType.DISK).invalidationMetric.count()); - assertEquals(1, tieredSpilloverCache.count()); - } - - public void testCacheKeys() throws Exception { - int onHeapCacheSize = randomIntBetween(10, 30); - int diskCacheSize = randomIntBetween(60, 100); - int totalSize = onHeapCacheSize + diskCacheSize; - - MockCacheEventListener eventListener = new MockCacheEventListener<>(); - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - List onHeapKeys = new ArrayList<>(); - List diskTierKeys = new ArrayList<>(); - // During first round add onHeapCacheSize entries. Will go to onHeap cache initially. - for (int i = 0; i < onHeapCacheSize; i++) { - String key = UUID.randomUUID().toString(); - diskTierKeys.add(key); - tieredSpilloverCache.computeIfAbsent(key, getLoadAwareCacheLoader()); - } - // In another round, add another onHeapCacheSize entries. These will go to onHeap and above ones will be - // evicted to onDisk cache. - for (int i = 0; i < onHeapCacheSize; i++) { - String key = UUID.randomUUID().toString(); - onHeapKeys.add(key); - tieredSpilloverCache.computeIfAbsent(key, getLoadAwareCacheLoader()); - } - - List actualOnHeapKeys = new ArrayList<>(); - List actualOnDiskKeys = new ArrayList<>(); - Iterable onHeapiterable = tieredSpilloverCache.getOnHeapCache().keys(); - Iterable onDiskiterable = tieredSpilloverCache.getOnDiskCache().get().keys(); - onHeapiterable.iterator().forEachRemaining(actualOnHeapKeys::add); - onDiskiterable.iterator().forEachRemaining(actualOnDiskKeys::add); - for (String onHeapKey : onHeapKeys) { - assertTrue(actualOnHeapKeys.contains(onHeapKey)); - } - for (String onDiskKey : actualOnDiskKeys) { - assertTrue(actualOnDiskKeys.contains(onDiskKey)); - } - - // Testing keys() which returns all keys. - List actualMergedKeys = new ArrayList<>(); - List expectedMergedKeys = new ArrayList<>(); - expectedMergedKeys.addAll(onHeapKeys); - expectedMergedKeys.addAll(diskTierKeys); - - Iterable mergedIterable = tieredSpilloverCache.keys(); - mergedIterable.iterator().forEachRemaining(actualMergedKeys::add); - - assertEquals(expectedMergedKeys.size(), actualMergedKeys.size()); - for (String key : expectedMergedKeys) { - assertTrue(actualMergedKeys.contains(key)); - } - } - - public void testRefresh() { - int onHeapCacheSize = randomIntBetween(10, 30); - int diskCacheSize = randomIntBetween(60, 100); - - MockCacheEventListener eventListener = new MockCacheEventListener<>(); - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - tieredSpilloverCache.refresh(); - } - - public void testInvalidateAll() throws Exception { - int onHeapCacheSize = randomIntBetween(10, 30); - int diskCacheSize = randomIntBetween(60, 100); - int totalSize = onHeapCacheSize + diskCacheSize; - - MockCacheEventListener eventListener = new MockCacheEventListener<>(); - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - // Put values in cache more than it's size and cause evictions from onHeap. - int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); - List onHeapKeys = new ArrayList<>(); - List diskTierKeys = new ArrayList<>(); - for (int iter = 0; iter < numOfItems1; iter++) { - String key = UUID.randomUUID().toString(); - if (iter > (onHeapCacheSize - 1)) { - // All these are bound to go to disk based cache. - diskTierKeys.add(key); - } else { - onHeapKeys.add(key); - } - LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader(); - tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); - } - assertEquals(numOfItems1, tieredSpilloverCache.count()); - tieredSpilloverCache.invalidateAll(); - assertEquals(0, tieredSpilloverCache.count()); - } - - public void testComputeIfAbsentConcurrently() throws Exception { - int onHeapCacheSize = randomIntBetween(100, 300); - int diskCacheSize = randomIntBetween(200, 400); - - MockCacheEventListener eventListener = new MockCacheEventListener<>(); - - TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache( - onHeapCacheSize, - diskCacheSize, - eventListener, - 0 - ); - - int numberOfSameKeys = randomIntBetween(10, onHeapCacheSize - 1); - String key = UUID.randomUUID().toString(); - String value = UUID.randomUUID().toString(); - - Thread[] threads = new Thread[numberOfSameKeys]; - Phaser phaser = new Phaser(numberOfSameKeys + 1); - CountDownLatch countDownLatch = new CountDownLatch(numberOfSameKeys); // To wait for all threads to finish. - - List> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); - - for (int i = 0; i < numberOfSameKeys; i++) { - threads[i] = new Thread(() -> { - try { - LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader() { - boolean isLoaded = false; - - @Override - public boolean isLoaded() { - return isLoaded; - } - - @Override - public Object load(Object key) throws Exception { - isLoaded = true; - return value; - } - }; - loadAwareCacheLoaderList.add(loadAwareCacheLoader); - phaser.arriveAndAwaitAdvance(); - tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader); - } catch (Exception e) { - throw new RuntimeException(e); - } - countDownLatch.countDown(); - }); - threads[i].start(); - } - phaser.arriveAndAwaitAdvance(); - countDownLatch.await(); // Wait for rest of tasks to be cancelled. - int numberOfTimesKeyLoaded = 0; - assertEquals(numberOfSameKeys, loadAwareCacheLoaderList.size()); - for (int i = 0; i < loadAwareCacheLoaderList.size(); i++) { - LoadAwareCacheLoader loader = loadAwareCacheLoaderList.get(i); - if (loader.isLoaded()) { - numberOfTimesKeyLoaded++; - } - } - assertEquals(1, numberOfTimesKeyLoaded); // It should be loaded only once. - } - - public void testConcurrencyForEvictionFlow() throws Exception { - int diskCacheSize = randomIntBetween(450, 800); - - MockCacheEventListener eventListener = new MockCacheEventListener<>(); - - StoreAwareCacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( - 200 - ).setWeigher((k, v) -> 150); - - StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize) - .setDeliberateDelay(500); - - TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder() - .setOnHeapCacheBuilder(cacheBuilder) - .setOnDiskCacheBuilder(diskCacheBuilder) - .setListener(eventListener) - .build(); - - String keyToBeEvicted = "key1"; - String secondKey = "key2"; - - // Put first key on tiered cache. Will go into onHeap cache. - tieredSpilloverCache.computeIfAbsent(keyToBeEvicted, new LoadAwareCacheLoader<>() { - @Override - public boolean isLoaded() { - return false; - } - - @Override - public String load(String key) { - return UUID.randomUUID().toString(); - } - }); - CountDownLatch countDownLatch = new CountDownLatch(1); - CountDownLatch countDownLatch1 = new CountDownLatch(1); - // Put second key on tiered cache. Will cause eviction of first key from onHeap cache and should go into - // disk cache. - LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader(); - Thread thread = new Thread(() -> { - try { - tieredSpilloverCache.computeIfAbsent(secondKey, loadAwareCacheLoader); - countDownLatch1.countDown(); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - thread.start(); - assertBusy(() -> { assertTrue(loadAwareCacheLoader.isLoaded()); }, 100, TimeUnit.MILLISECONDS); // We wait for new key to be loaded - // after which it eviction flow is - // guaranteed to occur. - StoreAwareCache onDiskCache = tieredSpilloverCache.getOnDiskCache().get(); - - // Now on a different thread, try to get key(above one which got evicted) from tiered cache. We expect this - // should return not null value as it should be present on diskCache. - AtomicReference actualValue = new AtomicReference<>(); - Thread thread1 = new Thread(() -> { - try { - actualValue.set(tieredSpilloverCache.get(keyToBeEvicted)); - } catch (Exception e) { - throw new RuntimeException(e); - } - countDownLatch.countDown(); - }); - thread1.start(); - countDownLatch.await(); - assertNotNull(actualValue.get()); - countDownLatch1.await(); - assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); - assertEquals(1, tieredSpilloverCache.getOnHeapCache().count()); - assertEquals(1, onDiskCache.count()); - assertNotNull(onDiskCache.get(keyToBeEvicted)); - } - - class MockCacheEventListener implements StoreAwareCacheEventListener { - - EnumMap enumMap = new EnumMap<>(CacheStoreType.class); - - MockCacheEventListener() { - for (CacheStoreType cacheStoreType : CacheStoreType.values()) { - enumMap.put(cacheStoreType, new TestStatsHolder()); - } - } - - @Override - public void onMiss(K key, CacheStoreType cacheStoreType) { - enumMap.get(cacheStoreType).missCount.inc(); - } - - @Override - public void onRemoval(StoreAwareCacheRemovalNotification notification) { - if (notification.getRemovalReason().equals(RemovalReason.EVICTED)) { - enumMap.get(notification.getCacheStoreType()).evictionsMetric.inc(); - } else if (notification.getRemovalReason().equals(RemovalReason.INVALIDATED)) { - enumMap.get(notification.getCacheStoreType()).invalidationMetric.inc(); - } - } - - @Override - public void onHit(K key, V value, CacheStoreType cacheStoreType) { - enumMap.get(cacheStoreType).hitCount.inc(); - } - - @Override - public void onCached(K key, V value, CacheStoreType cacheStoreType) { - enumMap.get(cacheStoreType).cachedCount.inc(); - } - - class TestStatsHolder { - final CounterMetric evictionsMetric = new CounterMetric(); - final CounterMetric hitCount = new CounterMetric(); - final CounterMetric missCount = new CounterMetric(); - final CounterMetric cachedCount = new CounterMetric(); - final CounterMetric invalidationMetric = new CounterMetric(); - } - } - - private LoadAwareCacheLoader getLoadAwareCacheLoader() { - return new LoadAwareCacheLoader() { - boolean isLoaded = false; - - @Override - public String load(String key) { - isLoaded = true; - return UUID.randomUUID().toString(); - } - - @Override - public boolean isLoaded() { - return isLoaded; - } - }; - } - - private TieredSpilloverCache intializeTieredSpilloverCache( - int onHeapCacheSize, - int diksCacheSize, - StoreAwareCacheEventListener eventListener, - long diskDeliberateDelay - ) { - StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diksCacheSize) - .setDeliberateDelay(diskDeliberateDelay); - StoreAwareCacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder() - .setMaximumWeightInBytes(onHeapCacheSize * 20) - .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries - return new TieredSpilloverCache.Builder().setOnHeapCacheBuilder(onHeapCacheBuilder) - .setOnDiskCacheBuilder(diskCacheBuilder) - .setListener(eventListener) - .build(); - } -} - -class MockOnDiskCache implements StoreAwareCache { - - Map cache; - int maxSize; - - long delay; - StoreAwareCacheEventListener eventListener; - - MockOnDiskCache(int maxSize, StoreAwareCacheEventListener eventListener, long delay) { - this.maxSize = maxSize; - this.eventListener = eventListener; - this.delay = delay; - this.cache = new ConcurrentHashMap(); - } - - @Override - public V get(K key) { - V value = cache.get(key); - if (value != null) { - eventListener.onHit(key, value, CacheStoreType.DISK); - } else { - eventListener.onMiss(key, CacheStoreType.DISK); - } - return value; - } - - @Override - public void put(K key, V value) { - if (this.cache.size() >= maxSize) { // For simplification - eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, value, RemovalReason.EVICTED, CacheStoreType.DISK)); - return; - } - try { - Thread.sleep(delay); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - this.cache.put(key, value); - eventListener.onCached(key, value, CacheStoreType.DISK); - } - - @Override - public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { - V value = cache.computeIfAbsent(key, key1 -> { - try { - return loader.load(key); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - if (!loader.isLoaded()) { - eventListener.onHit(key, value, CacheStoreType.DISK); - } else { - eventListener.onMiss(key, CacheStoreType.DISK); - eventListener.onCached(key, value, CacheStoreType.DISK); - } - return value; - } - - @Override - public void invalidate(K key) { - if (this.cache.containsKey(key)) { - eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, null, RemovalReason.INVALIDATED, CacheStoreType.DISK)); - } - this.cache.remove(key); - } - - @Override - public void invalidateAll() { - this.cache.clear(); - } - - @Override - public Iterable keys() { - return this.cache.keySet(); - } - - @Override - public long count() { - return this.cache.size(); - } - - @Override - public void refresh() {} - - @Override - public void close() { - - } - - @Override - public CacheStats stats() { - return null; - } - - @Override - public CacheStoreType getTierType() { - return CacheStoreType.DISK; - } - - public static class Builder extends StoreAwareCacheBuilder { - - int maxSize; - long delay; - - @Override - public StoreAwareCache build() { - return new MockOnDiskCache(maxSize, this.getEventListener(), delay); - } - - public Builder setMaxSize(int maxSize) { - this.maxSize = maxSize; - return this; - } - - public Builder setDeliberateDelay(long millis) { - this.delay = millis; - return this; - } - } -} From 7f204056452e1b1ff220bc2da0b09e2dae1bc586 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Fri, 23 Feb 2024 10:56:14 -0800 Subject: [PATCH 15/22] Introducing CacheService to create caches Signed-off-by: Sagar Upadhyaya --- CHANGELOG.md | 2 +- .../common/tier/TieredSpilloverCache.java | 51 +++++----- .../tier/TieredSpilloverCachePlugin.java | 5 +- .../tier/TieredSpilloverCachePluginTests.java | 5 +- .../tier/TieredSpilloverCacheTests.java | 66 +++++-------- plugins/cache-ehcache/build.gradle | 1 - .../opensearch/cache/EhcacheCachePlugin.java | 3 +- .../cache/store/disk/EhcacheDiskCache.java | 29 ++---- .../opensearch/cache/EhcachePluginTests.java | 5 +- .../store/disk/EhCacheDiskCacheTests.java | 3 +- .../org/opensearch/common/cache/ICache.java | 3 +- .../CacheModule.java} | 54 ++++------ .../{provider => module}/package-info.java | 2 +- .../common/cache/service/CacheService.java | 56 +++++++++++ .../{stats => service}/package-info.java | 4 +- .../common/cache/stats/CacheStats.java | 18 ---- .../cache/store/OpenSearchOnHeapCache.java | 2 +- .../main/java/org/opensearch/node/Node.java | 6 +- .../org/opensearch/plugins/CachePlugin.java | 3 +- .../common/cache/module/CacheModuleTests.java | 55 +++++++++++ .../cache/provider/CacheProviderTests.java | 98 ------------------- .../cache/service/CacheServiceTests.java | 84 ++++++++++++++++ 22 files changed, 296 insertions(+), 259 deletions(-) rename server/src/main/java/org/opensearch/common/cache/{provider/CacheProvider.java => module/CacheModule.java} (57%) rename server/src/main/java/org/opensearch/common/cache/{provider => module}/package-info.java (84%) create mode 100644 server/src/main/java/org/opensearch/common/cache/service/CacheService.java rename server/src/main/java/org/opensearch/common/cache/{stats => service}/package-info.java (69%) delete mode 100644 server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java create mode 100644 server/src/test/java/org/opensearch/common/cache/module/CacheModuleTests.java delete mode 100644 server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java create mode 100644 server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 8532d56998359..8a1eac061699d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -92,7 +92,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ## [Unreleased 2.x] ### Added -- [Tiered caching] Integrating ehcache as a disk cache option ([#11874](https://github.com/opensearch-project/OpenSearch/pull/11874)) +- [Tiered caching] Introducing cache plugins and exposing Ehcache as one of the pluggable disk cache option ([#11874](https://github.com/opensearch-project/OpenSearch/pull/11874)) - Add support for dependencies in plugin descriptor properties with semver range ([#11441](https://github.com/opensearch-project/OpenSearch/pull/11441)) - Add community_id ingest processor ([#12121](https://github.com/opensearch-project/OpenSearch/pull/12121)) - Introduce query level setting `index.query.max_nested_depth` limiting nested queries ([#3268](https://github.com/opensearch-project/OpenSearch/issues/3268) diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java index 0c8003ac7b365..7b64a7e93fe27 100644 --- a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java @@ -14,7 +14,6 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; -import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.common.cache.store.config.CacheConfig; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; @@ -24,8 +23,8 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Function; @@ -49,7 +48,6 @@ public class TieredSpilloverCache implements ICache { ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock()); ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock()); - /** * Maintains caching tiers in ascending order of cache latency. */ @@ -75,9 +73,11 @@ public void onRemoval(RemovalNotification notification) { .setSettings(builder.cacheConfig.getSettings()) .setWeigher(builder.cacheConfig.getWeigher()) .build(), - builder.cacheType + builder.cacheType, + builder.cacheFactories + ); - this.diskCache = builder.diskCacheFactory.create(builder.cacheConfig, builder.cacheType); + this.diskCache = builder.diskCacheFactory.create(builder.cacheConfig, builder.cacheType, builder.cacheFactories); this.cacheList = Arrays.asList(onHeapCache, diskCache); } @@ -105,6 +105,7 @@ public void put(K key, V value) { @Override public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + V cacheValue = getValueFromTieredCache().apply(key); if (cacheValue == null) { // Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside. @@ -203,41 +204,36 @@ public static class TieredSpilloverCacheFactory implements ICache.Factory { public static final String TIERED_SPILLOVER_CACHE_NAME = "tiered_spillover"; /** - * Cache provider which is needed to extract factories for desired cache store. - */ - private final CacheProvider cacheProvider; - - /** - * Parameterized constructor - * @param cacheProvider Contains info about various caches. + * Default constructor */ - public TieredSpilloverCacheFactory(CacheProvider cacheProvider) { - this.cacheProvider = cacheProvider; - } + public TieredSpilloverCacheFactory() {} @Override - public ICache create(CacheConfig config, CacheType cacheType) { + public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { Settings settings = config.getSettings(); Setting onHeapSetting = TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace( cacheType.getSettingPrefix() ); - Optional onHeapCacheFactory = cacheProvider.getCacheFactoryForCacheStoreName(onHeapSetting.get(settings)); - if (onHeapCacheFactory.isEmpty()) { + String onHeapCacheStoreName = onHeapSetting.get(settings); + if (!cacheFactories.containsKey(onHeapCacheStoreName)) { throw new IllegalArgumentException( "No associated onHeapCache found for tieredSpilloverCache for " + "cacheType:" + cacheType ); } + ICache.Factory onHeapCacheFactory = cacheFactories.get(onHeapCacheStoreName); + Setting onDiskSetting = TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace( cacheType.getSettingPrefix() ); - Optional diskCacheFactory = cacheProvider.getCacheFactoryForCacheStoreName(onDiskSetting.get(settings)); - if (diskCacheFactory.isEmpty()) { + String diskCacheStoreName = onDiskSetting.get(settings); + if (!cacheFactories.containsKey(diskCacheStoreName)) { throw new IllegalArgumentException( "No associated diskCache found for tieredSpilloverCache for " + "cacheType:" + cacheType ); } - return new Builder().setDiskCacheFactory(diskCacheFactory.get()) - .setOnHeapCacheFactory(onHeapCacheFactory.get()) + ICache.Factory diskCacheFactory = cacheFactories.get(diskCacheStoreName); + return new Builder().setDiskCacheFactory(diskCacheFactory) + .setOnHeapCacheFactory(onHeapCacheFactory) .setRemovalListener(config.getRemovalListener()) .setCacheConfig(config) .setCacheType(cacheType) @@ -261,6 +257,7 @@ public static class Builder { private RemovalListener removalListener; private CacheConfig cacheConfig; private CacheType cacheType; + private Map cacheFactories; /** * Default constructor @@ -317,6 +314,16 @@ public Builder setCacheType(CacheType cacheType) { return this; } + /** + * Set cache factories + * @param cacheFactories cache factories + * @return builder + */ + public Builder setCacheFactories(Map cacheFactories) { + this.cacheFactories = cacheFactories; + return this; + } + /** * Build tiered spillover cache. * @return TieredSpilloverCache diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java index f3e0265ab92d4..19abf8ae63c28 100644 --- a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java @@ -10,7 +10,6 @@ import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.common.settings.Setting; import org.opensearch.plugins.CachePlugin; import org.opensearch.plugins.Plugin; @@ -35,10 +34,10 @@ public class TieredSpilloverCachePlugin extends Plugin implements CachePlugin { TieredSpilloverCachePlugin() {} @Override - public Map getCacheFactoryMap(CacheProvider cacheProvider) { + public Map getCacheFactoryMap() { return Map.of( TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME, - new TieredSpilloverCache.TieredSpilloverCacheFactory(cacheProvider) + new TieredSpilloverCache.TieredSpilloverCacheFactory() ); } diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java index 1775e70faf1fe..1172a48e97c6a 100644 --- a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java +++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java @@ -9,18 +9,15 @@ package org.opensearch.cache.common.tier; import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.test.OpenSearchTestCase; import java.util.Map; -import static org.mockito.Mockito.mock; - public class TieredSpilloverCachePluginTests extends OpenSearchTestCase { public void testGetCacheFactoryMap() { TieredSpilloverCachePlugin tieredSpilloverCachePlugin = new TieredSpilloverCachePlugin(); - Map map = tieredSpilloverCachePlugin.getCacheFactoryMap(mock(CacheProvider.class)); + Map map = tieredSpilloverCachePlugin.getCacheFactoryMap(); assertNotNull(map.get(TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME)); assertEquals(TieredSpilloverCachePlugin.TIERED_CACHE_SPILLOVER_PLUGIN_NAME, tieredSpilloverCachePlugin.getName()); } diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java index 5ae4491c53891..87851ca69dcae 100644 --- a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java +++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java @@ -13,7 +13,6 @@ import org.opensearch.common.cache.LoadAwareCacheLoader; import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; -import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.common.cache.store.OpenSearchOnHeapCache; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.config.CacheConfig; @@ -22,7 +21,6 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.core.common.unit.ByteSizeValue; -import org.opensearch.plugins.CachePlugin; import org.opensearch.test.OpenSearchTestCase; import java.util.ArrayList; @@ -37,9 +35,6 @@ import java.util.concurrent.atomic.AtomicReference; import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class TieredSpilloverCacheTests extends OpenSearchTestCase { @@ -121,24 +116,21 @@ public void testComputeIfAbsentWithFactoryBasedCacheCreation() throws Exception onHeapCacheSize * keyValueSize + "b" ) .build(); - CachePlugin onHeapCachePlugin = mock(CachePlugin.class); - CachePlugin diskCachePlugin = mock(CachePlugin.class); - when(onHeapCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( - Map.of(OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory()) - ); - when(diskCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( - Map.of(MockOnDiskCache.MockDiskCacheFactory.NAME, new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))) - ); - CacheProvider cacheProvider = new CacheProvider(List.of(onHeapCachePlugin, diskCachePlugin), settings); - ICache tieredSpilloverICache = new TieredSpilloverCache.TieredSpilloverCacheFactory(cacheProvider).create( + ICache tieredSpilloverICache = new TieredSpilloverCache.TieredSpilloverCacheFactory().create( new CacheConfig.Builder().setKeyType(String.class) .setKeyType(String.class) .setWeigher((k, v) -> keyValueSize) .setRemovalListener(removalListener) .setSettings(settings) .build(), - CacheType.INDICES_REQUEST_CACHE + CacheType.INDICES_REQUEST_CACHE, + Map.of( + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, + new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(), + MockOnDiskCache.MockDiskCacheFactory.NAME, + new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300)) + ) ); TieredSpilloverCache tieredSpilloverCache = (TieredSpilloverCache) tieredSpilloverICache; @@ -183,26 +175,23 @@ public void testWithFactoryCreationWithOnHeapCacheNotPresent() { onHeapCacheSize * keyValueSize + "b" ) .build(); - CachePlugin onHeapCachePlugin = mock(CachePlugin.class); - CachePlugin diskCachePlugin = mock(CachePlugin.class); - when(onHeapCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( - Map.of(OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory()) - ); - when(diskCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( - Map.of(MockOnDiskCache.MockDiskCacheFactory.NAME, new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))) - ); - CacheProvider cacheProvider = new CacheProvider(List.of(onHeapCachePlugin, diskCachePlugin), settings); IllegalArgumentException ex = assertThrows( IllegalArgumentException.class, - () -> new TieredSpilloverCache.TieredSpilloverCacheFactory(cacheProvider).create( + () -> new TieredSpilloverCache.TieredSpilloverCacheFactory().create( new CacheConfig.Builder().setKeyType(String.class) .setKeyType(String.class) .setWeigher((k, v) -> keyValueSize) .setRemovalListener(removalListener) .setSettings(settings) .build(), - CacheType.INDICES_REQUEST_CACHE + CacheType.INDICES_REQUEST_CACHE, + Map.of( + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, + new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(), + MockOnDiskCache.MockDiskCacheFactory.NAME, + new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300)) + ) ) ); assertEquals( @@ -231,26 +220,23 @@ public void testWithFactoryCreationWithDiskCacheNotPresent() { onHeapCacheSize * keyValueSize + "b" ) .build(); - CachePlugin onHeapCachePlugin = mock(CachePlugin.class); - CachePlugin diskCachePlugin = mock(CachePlugin.class); - when(onHeapCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( - Map.of(OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory()) - ); - when(diskCachePlugin.getCacheFactoryMap(any(CacheProvider.class))).thenReturn( - Map.of(MockOnDiskCache.MockDiskCacheFactory.NAME, new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))) - ); - CacheProvider cacheProvider = new CacheProvider(List.of(onHeapCachePlugin, diskCachePlugin), settings); IllegalArgumentException ex = assertThrows( IllegalArgumentException.class, - () -> new TieredSpilloverCache.TieredSpilloverCacheFactory(cacheProvider).create( + () -> new TieredSpilloverCache.TieredSpilloverCacheFactory().create( new CacheConfig.Builder().setKeyType(String.class) .setKeyType(String.class) .setWeigher((k, v) -> keyValueSize) .setRemovalListener(removalListener) .setSettings(settings) .build(), - CacheType.INDICES_REQUEST_CACHE + CacheType.INDICES_REQUEST_CACHE, + Map.of( + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, + new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(), + MockOnDiskCache.MockDiskCacheFactory.NAME, + new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300)) + ) ) ); assertEquals( @@ -977,7 +963,7 @@ public void onRemoval(RemovalNotification notification) { static class OpenSearchOnHeapCacheWrapperFactory extends OpenSearchOnHeapCacheFactory { @Override - public ICache create(CacheConfig config, CacheType cacheType) { + public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { Map> settingList = OpenSearchOnHeapCacheSettings.getSettingListForCacheType(cacheType); Settings settings = config.getSettings(); return new OpenSearchOnHeapCacheWrapper<>( @@ -1096,7 +1082,7 @@ public static class MockDiskCacheFactory implements Factory { } @Override - public ICache create(CacheConfig config, CacheType cacheType) { + public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { return new Builder().setMaxSize(maxSize).setDeliberateDelay(delay).build(); } diff --git a/plugins/cache-ehcache/build.gradle b/plugins/cache-ehcache/build.gradle index 5c36563cfd9ab..65e7daaaacf26 100644 --- a/plugins/cache-ehcache/build.gradle +++ b/plugins/cache-ehcache/build.gradle @@ -16,7 +16,6 @@ apply plugin: 'opensearch.internal-cluster-test' opensearchplugin { description 'Ehcache based cache implementation.' classname 'org.opensearch.cache.EhcacheCachePlugin' - hasClientJar = true } versions << [ diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java index 648986957e11e..ceda96e4a7d7d 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java @@ -11,7 +11,6 @@ import org.opensearch.cache.store.disk.EhcacheDiskCache; import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.common.settings.Setting; import org.opensearch.plugins.CachePlugin; import org.opensearch.plugins.Plugin; @@ -35,7 +34,7 @@ public class EhcacheCachePlugin extends Plugin implements CachePlugin { public EhcacheCachePlugin() {} @Override - public Map getCacheFactoryMap(CacheProvider cacheProvider) { + public Map getCacheFactoryMap() { return Map.of(EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME, new EhcacheDiskCache.EhcacheDiskCacheFactory()); } diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index fc3cb6092c9d6..666d89e98127f 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -20,7 +20,6 @@ import org.opensearch.common.cache.RemovalListener; import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.cache.RemovalReason; -import org.opensearch.common.cache.stats.CacheStats; import org.opensearch.common.cache.store.builders.ICacheBuilder; import org.opensearch.common.cache.store.config.CacheConfig; import org.opensearch.common.collect.Tuple; @@ -96,13 +95,14 @@ public class EhcacheDiskCache implements ICache { private final Class keyType; private final Class valueType; private final TimeValue expireAfterAccess; - private final DiskCacheStats stats = new DiskCacheStats(); private final EhCacheEventListener ehCacheEventListener; private final String threadPoolAlias; private final Settings settings; private final RemovalListener removalListener; private final CacheType cacheType; private final String diskCacheAlias; + // TODO: Move count to stats once those changes are ready. + private final CounterMetric entries = new CounterMetric(); /** * Used in computeIfAbsent to synchronize loading of a given key. This is needed as ehcache doesn't provide a @@ -363,7 +363,7 @@ public Iterable keys() { */ @Override public long count() { - return stats.count(); + return entries.count(); } @Override @@ -382,19 +382,6 @@ public void close() { } } - /** - * Stats related to disk cache. - * TODO: Remove this once cache stats are integrated. - */ - static class DiskCacheStats implements CacheStats { - private final CounterMetric count = new CounterMetric(); - - @Override - public long count() { - return count.count(); - } - } - /** * This iterator wraps ehCache iterator and only iterates over its keys. * @param Type of key @@ -438,17 +425,17 @@ class EhCacheEventListener implements CacheEventListener { public void onEvent(CacheEvent event) { switch (event.getType()) { case CREATED: - stats.count.inc(); + entries.inc(); // this.eventListener.onCached(event.getKey(), event.getNewValue(), CacheStoreType.DISK); assert event.getOldValue() == null; break; case EVICTED: this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.EVICTED)); - stats.count.dec(); + entries.dec(); assert event.getNewValue() == null; break; case REMOVED: - stats.count.dec(); + entries.dec(); this.removalListener.onRemoval(new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.EXPLICIT)); assert event.getNewValue() == null; break; @@ -456,7 +443,7 @@ public void onEvent(CacheEvent event) { this.removalListener.onRemoval( new RemovalNotification<>(event.getKey(), event.getOldValue(), RemovalReason.INVALIDATED) ); - stats.count.dec(); + entries.dec(); assert event.getNewValue() == null; break; case UPDATED: @@ -483,7 +470,7 @@ public static class EhcacheDiskCacheFactory implements ICache.Factory { public EhcacheDiskCacheFactory() {} @Override - public ICache create(CacheConfig config, CacheType cacheType) { + public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { Map> settingList = EhcacheDiskCacheSettings.getSettingListForCacheType(cacheType); Settings settings = config.getSettings(); return new Builder().setStoragePath((String) settingList.get(DISK_STORAGE_PATH_KEY).get(settings)) diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java index c92fd7a356d07..538a45456ddc3 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/EhcachePluginTests.java @@ -10,19 +10,16 @@ import org.opensearch.cache.store.disk.EhcacheDiskCache; import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.provider.CacheProvider; import org.opensearch.test.OpenSearchTestCase; import java.util.Map; -import static org.mockito.Mockito.mock; - public class EhcachePluginTests extends OpenSearchTestCase { private EhcacheCachePlugin ehcacheCachePlugin = new EhcacheCachePlugin(); public void testGetCacheStoreTypeMap() { - Map factoryMap = ehcacheCachePlugin.getCacheFactoryMap(mock(CacheProvider.class)); + Map factoryMap = ehcacheCachePlugin.getCacheFactoryMap(); assertNotNull(factoryMap); assertNotNull(factoryMap.get(EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME)); } diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 5c21b0b7cf182..862bebba7e628 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -103,7 +103,8 @@ public void testBasicGetAndPutUsingFactory() throws IOException { .build() ) .build(), - CacheType.INDICES_REQUEST_CACHE + CacheType.INDICES_REQUEST_CACHE, + Map.of() ); int randomKeys = randomIntBetween(10, 100); Map keyValueMap = new HashMap<>(); diff --git a/server/src/main/java/org/opensearch/common/cache/ICache.java b/server/src/main/java/org/opensearch/common/cache/ICache.java index 107b4315ea48b..f7be46a852631 100644 --- a/server/src/main/java/org/opensearch/common/cache/ICache.java +++ b/server/src/main/java/org/opensearch/common/cache/ICache.java @@ -12,6 +12,7 @@ import org.opensearch.common.cache.store.config.CacheConfig; import java.io.Closeable; +import java.util.Map; /** * Represents a cache interface. @@ -43,7 +44,7 @@ public interface ICache extends Closeable { */ @ExperimentalApi interface Factory { - ICache create(CacheConfig config, CacheType cacheType); + ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories); String getCacheName(); } diff --git a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java b/server/src/main/java/org/opensearch/common/cache/module/CacheModule.java similarity index 57% rename from server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java rename to server/src/main/java/org/opensearch/common/cache/module/CacheModule.java index 5cb4b48ff6922..832a65b573aec 100644 --- a/server/src/main/java/org/opensearch/common/cache/provider/CacheProvider.java +++ b/server/src/main/java/org/opensearch/common/cache/module/CacheModule.java @@ -6,14 +6,12 @@ * compatible open source license. */ -package org.opensearch.common.cache.provider; +package org.opensearch.common.cache.module; import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.common.cache.CacheType; import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.settings.CacheSettings; +import org.opensearch.common.cache.service.CacheService; import org.opensearch.common.cache.store.OpenSearchOnHeapCache; -import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.plugins.CachePlugin; @@ -21,62 +19,48 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; /** * Holds all the cache factories and provides a way to fetch them when needed. */ @ExperimentalApi -public class CacheProvider { +public class CacheModule { private final Map cacheStoreTypeFactories; + + private final CacheService cacheService; private final Settings settings; - public CacheProvider(List cachePlugins, Settings settings) { + public CacheModule(List cachePlugins, Settings settings) { this.cacheStoreTypeFactories = getCacheStoreTypeFactories(cachePlugins); this.settings = settings; + this.cacheService = new CacheService(cacheStoreTypeFactories, settings); } - private Map getCacheStoreTypeFactories(List cachePlugins) { + private static Map getCacheStoreTypeFactories(List cachePlugins) { Map cacheStoreTypeFactories = new HashMap<>(); + // Add the core OpenSearchOnHeapCache as well. + cacheStoreTypeFactories.put( + OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, + new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory() + ); for (CachePlugin cachePlugin : cachePlugins) { - Map factoryMap = cachePlugin.getCacheFactoryMap(this); + Map factoryMap = cachePlugin.getCacheFactoryMap(); for (Map.Entry entry : factoryMap.entrySet()) { if (cacheStoreTypeFactories.put(entry.getKey(), entry.getValue()) != null) { throw new IllegalArgumentException("Cache name: " + entry.getKey() + " is " + "already registered"); } } } - // Add the core OpenSearchOnHeapCache as well. - cacheStoreTypeFactories.put( - OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME, - new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory() - ); return Collections.unmodifiableMap(cacheStoreTypeFactories); } - // Package private for testing. - protected Map getCacheStoreTypeFactories() { - return cacheStoreTypeFactories; - } - - public Optional getCacheFactoryForCacheStoreName(String cacheStoreName) { - if (cacheStoreName == null || cacheStoreName.isBlank()) { - return Optional.empty(); - } else { - return Optional.ofNullable(cacheStoreTypeFactories.get(cacheStoreName)); - } + public CacheService getCacheService() { + return this.cacheService; } - public Optional getCacheFactoryForCacheType(CacheType cacheType) { - Setting cacheSettingForCacheType = CacheSettings.CACHE_TYPE_STORE_NAME.getConcreteSettingForNamespace( - cacheType.getSettingPrefix() - ); - String storeName = cacheSettingForCacheType.get(settings); - if (storeName == null || storeName.isBlank()) { - return Optional.empty(); - } else { - return Optional.ofNullable(cacheStoreTypeFactories.get(storeName)); - } + // Package private for testing. + Map getCacheStoreTypeFactories() { + return cacheStoreTypeFactories; } } diff --git a/server/src/main/java/org/opensearch/common/cache/provider/package-info.java b/server/src/main/java/org/opensearch/common/cache/module/package-info.java similarity index 84% rename from server/src/main/java/org/opensearch/common/cache/provider/package-info.java rename to server/src/main/java/org/opensearch/common/cache/module/package-info.java index 24221f222f93f..95ed25ca21643 100644 --- a/server/src/main/java/org/opensearch/common/cache/provider/package-info.java +++ b/server/src/main/java/org/opensearch/common/cache/module/package-info.java @@ -7,4 +7,4 @@ */ /** Base package for cache providers. */ -package org.opensearch.common.cache.provider; +package org.opensearch.common.cache.module; diff --git a/server/src/main/java/org/opensearch/common/cache/service/CacheService.java b/server/src/main/java/org/opensearch/common/cache/service/CacheService.java new file mode 100644 index 0000000000000..c6e970b58ea08 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/service/CacheService.java @@ -0,0 +1,56 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.service; + +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.settings.CacheSettings; +import org.opensearch.common.cache.store.config.CacheConfig; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; + +import java.util.HashMap; +import java.util.Map; + +/** + * Service responsible to create caches. + */ +public class CacheService { + + private final Map cacheStoreTypeFactories; + private final Settings settings; + private Map> cacheTypeMap; + + public CacheService(Map cacheStoreTypeFactories, Settings settings) { + this.cacheStoreTypeFactories = cacheStoreTypeFactories; + this.settings = settings; + this.cacheTypeMap = new HashMap<>(); + } + + public Map> getCacheTypeMap() { + return this.cacheTypeMap; + } + + public ICache createCache(CacheConfig config, CacheType cacheType) { + Setting cacheSettingForCacheType = CacheSettings.CACHE_TYPE_STORE_NAME.getConcreteSettingForNamespace( + cacheType.getSettingPrefix() + ); + String storeName = cacheSettingForCacheType.get(settings); + if (storeName == null || storeName.isBlank()) { + throw new IllegalArgumentException("No configuration exists for cache type: " + cacheType); + } + if (!cacheStoreTypeFactories.containsKey(storeName)) { + throw new IllegalArgumentException("No store name: [" + storeName + "] is registered for cache type: " + cacheType); + } + ICache.Factory factory = cacheStoreTypeFactories.get(storeName); + ICache iCache = factory.create(config, cacheType, cacheStoreTypeFactories); + cacheTypeMap.put(cacheType, iCache); + return iCache; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/stats/package-info.java b/server/src/main/java/org/opensearch/common/cache/service/package-info.java similarity index 69% rename from server/src/main/java/org/opensearch/common/cache/stats/package-info.java rename to server/src/main/java/org/opensearch/common/cache/service/package-info.java index 08aef5a9b3e88..5fb87f7613627 100644 --- a/server/src/main/java/org/opensearch/common/cache/stats/package-info.java +++ b/server/src/main/java/org/opensearch/common/cache/service/package-info.java @@ -5,5 +5,5 @@ * this file be licensed under the Apache-2.0 license or a * compatible open source license. */ -/** Base package for stats related classes */ -package org.opensearch.common.cache.stats; +/** Package related to cache service **/ +package org.opensearch.common.cache.service; diff --git a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java b/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java deleted file mode 100644 index cf84f296916fb..0000000000000 --- a/server/src/main/java/org/opensearch/common/cache/stats/CacheStats.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.stats; - -/** - * Interface for any cache specific stats. - * TODO: Add rest of stats like hits/misses. - */ -public interface CacheStats { - // Provides the current number of entries in cache. - long count(); -} diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java index 101c70e478c62..d218903de5b6d 100644 --- a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -108,7 +108,7 @@ public static class OpenSearchOnHeapCacheFactory implements Factory { public static final String NAME = "opensearch_onheap"; @Override - public ICache create(CacheConfig config, CacheType cacheType) { + public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { Map> settingList = OpenSearchOnHeapCacheSettings.getSettingListForCacheType(cacheType); Settings settings = config.getSettings(); return new Builder().setMaximumWeightInBytes( diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 50adaaaff9313..12ebc142d37a6 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -83,7 +83,8 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.SetOnce; import org.opensearch.common.StopWatch; -import org.opensearch.common.cache.provider.CacheProvider; +import org.opensearch.common.cache.module.CacheModule; +import org.opensearch.common.cache.service.CacheService; import org.opensearch.common.inject.Injector; import org.opensearch.common.inject.Key; import org.opensearch.common.inject.Module; @@ -794,7 +795,8 @@ protected Node( final SearchRequestSlowLog searchRequestSlowLog = new SearchRequestSlowLog(clusterService); remoteStoreStatsTrackerFactory = new RemoteStoreStatsTrackerFactory(clusterService, settings); - CacheProvider cacheProvider = new CacheProvider(pluginsService.filterPlugins(CachePlugin.class), settings); + CacheModule cacheModule = new CacheModule(pluginsService.filterPlugins(CachePlugin.class), settings); + CacheService cacheService = cacheModule.getCacheService(); final IndicesService indicesService = new IndicesService( settings, pluginsService, diff --git a/server/src/main/java/org/opensearch/plugins/CachePlugin.java b/server/src/main/java/org/opensearch/plugins/CachePlugin.java index 2560f7044e715..d962ed1db14bf 100644 --- a/server/src/main/java/org/opensearch/plugins/CachePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/CachePlugin.java @@ -10,7 +10,6 @@ import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.provider.CacheProvider; import java.util.Map; @@ -35,7 +34,7 @@ public interface CachePlugin { * * @return Map of cacheStoreType and an associated factory. */ - Map getCacheFactoryMap(CacheProvider cacheProvider); + Map getCacheFactoryMap(); String getName(); } diff --git a/server/src/test/java/org/opensearch/common/cache/module/CacheModuleTests.java b/server/src/test/java/org/opensearch/common/cache/module/CacheModuleTests.java new file mode 100644 index 0000000000000..35d7877343909 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/module/CacheModuleTests.java @@ -0,0 +1,55 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.module; + +import org.opensearch.common.cache.ICache; +import org.opensearch.common.settings.Settings; +import org.opensearch.plugins.CachePlugin; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.List; +import java.util.Map; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class CacheModuleTests extends OpenSearchTestCase { + + public void testWithMultiplePlugins() { + CachePlugin mockPlugin1 = mock(CachePlugin.class); + ICache.Factory factory1 = mock(ICache.Factory.class); + CachePlugin mockPlugin2 = mock(CachePlugin.class); + ICache.Factory factory2 = mock(ICache.Factory.class); + when(mockPlugin1.getCacheFactoryMap()).thenReturn(Map.of("cache1", factory1)); + when(mockPlugin2.getCacheFactoryMap()).thenReturn(Map.of("cache2", factory2)); + + CacheModule cacheModule = new CacheModule(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY); + + Map factoryMap = cacheModule.getCacheStoreTypeFactories(); + assertEquals(factoryMap.get("cache1"), factory1); + assertEquals(factoryMap.get("cache2"), factory2); + } + + public void testWithSameCacheStoreTypeAndName() { + CachePlugin mockPlugin1 = mock(CachePlugin.class); + ICache.Factory factory1 = mock(ICache.Factory.class); + CachePlugin mockPlugin2 = mock(CachePlugin.class); + ICache.Factory factory2 = mock(ICache.Factory.class); + when(factory1.getCacheName()).thenReturn("cache"); + when(factory2.getCacheName()).thenReturn("cache"); + when(mockPlugin1.getCacheFactoryMap()).thenReturn(Map.of("cache", factory1)); + when(mockPlugin2.getCacheFactoryMap()).thenReturn(Map.of("cache", factory2)); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> new CacheModule(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY) + ); + assertEquals("Cache name: cache is already registered", ex.getMessage()); + } +} diff --git a/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java b/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java deleted file mode 100644 index 112a9c0c57d50..0000000000000 --- a/server/src/test/java/org/opensearch/common/cache/provider/CacheProviderTests.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.common.cache.provider; - -import org.opensearch.common.cache.CacheType; -import org.opensearch.common.cache.ICache; -import org.opensearch.common.cache.settings.CacheSettings; -import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; -import org.opensearch.plugins.CachePlugin; -import org.opensearch.test.OpenSearchTestCase; - -import java.util.List; -import java.util.Map; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class CacheProviderTests extends OpenSearchTestCase { - - public void testWithMultiplePlugins() { - CachePlugin mockPlugin1 = mock(CachePlugin.class); - ICache.Factory factory1 = mock(ICache.Factory.class); - CachePlugin mockPlugin2 = mock(CachePlugin.class); - ICache.Factory factory2 = mock(ICache.Factory.class); - when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache1", factory1)); - when(mockPlugin2.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache2", factory2)); - - CacheProvider cacheProvider = new CacheProvider(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY); - - Map factoryMap = cacheProvider.getCacheStoreTypeFactories(); - assertEquals(factoryMap.get("cache1"), factory1); - assertEquals(factoryMap.get("cache2"), factory2); - } - - public void testWithSameCacheStoreTypeAndName() { - CachePlugin mockPlugin1 = mock(CachePlugin.class); - ICache.Factory factory1 = mock(ICache.Factory.class); - CachePlugin mockPlugin2 = mock(CachePlugin.class); - ICache.Factory factory2 = mock(ICache.Factory.class); - when(factory1.getCacheName()).thenReturn("cache"); - when(factory2.getCacheName()).thenReturn("cache"); - when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache", factory1)); - when(mockPlugin2.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache", factory2)); - - IllegalArgumentException ex = assertThrows( - IllegalArgumentException.class, - () -> new CacheProvider(List.of(mockPlugin1, mockPlugin2), Settings.EMPTY) - ); - assertEquals("Cache name: cache is already registered", ex.getMessage()); - } - - public void testWithCacheFactoryPresentForIndicesRequestCacheType() { - CachePlugin mockPlugin1 = mock(CachePlugin.class); - ICache.Factory factory1 = mock(ICache.Factory.class); - when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache1", factory1)); - - Setting indicesRequestCacheSetting = CacheSettings.getConcreteSettingForCacheType(CacheType.INDICES_REQUEST_CACHE); - - CacheProvider cacheProvider = new CacheProvider( - List.of(mockPlugin1), - Settings.builder().put(indicesRequestCacheSetting.getKey(), "cache1").build() - ); - assertTrue(cacheProvider.getCacheFactoryForCacheType(CacheType.INDICES_REQUEST_CACHE).isPresent()); - assertEquals(cacheProvider.getCacheFactoryForCacheType(CacheType.INDICES_REQUEST_CACHE).get(), factory1); - } - - public void testWithCacheFactoryNotPresentForIndicesRequestCacheType() { - CachePlugin mockPlugin1 = mock(CachePlugin.class); - ICache.Factory factory1 = mock(ICache.Factory.class); - when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache1", factory1)); - - Setting indicesRequestCacheSetting = CacheSettings.getConcreteSettingForCacheType(CacheType.INDICES_REQUEST_CACHE); - - CacheProvider cacheProvider = new CacheProvider( - List.of(mockPlugin1), - Settings.builder().put(indicesRequestCacheSetting.getKey(), "cache").build() - ); - assertFalse(cacheProvider.getCacheFactoryForCacheType(CacheType.INDICES_REQUEST_CACHE).isPresent()); - } - - public void testGetCacheFactoryForCacheStoreName() { - CachePlugin mockPlugin1 = mock(CachePlugin.class); - ICache.Factory factory1 = mock(ICache.Factory.class); - when(mockPlugin1.getCacheFactoryMap(any(CacheProvider.class))).thenReturn(Map.of("cache1", factory1)); - - CacheProvider cacheProvider = new CacheProvider(List.of(mockPlugin1), Settings.EMPTY); - assertTrue(cacheProvider.getCacheFactoryForCacheStoreName("cache1").isPresent()); - assertFalse(cacheProvider.getCacheFactoryForCacheStoreName("cache").isPresent()); - } -} diff --git a/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java b/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java new file mode 100644 index 0000000000000..9b821a3b2a9cb --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java @@ -0,0 +1,84 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.service; + +import org.opensearch.common.cache.CacheType; +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.module.CacheModule; +import org.opensearch.common.cache.settings.CacheSettings; +import org.opensearch.common.cache.store.OpenSearchOnHeapCache; +import org.opensearch.common.cache.store.config.CacheConfig; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.plugins.CachePlugin; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.List; +import java.util.Map; + +import static junit.framework.TestCase.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class CacheServiceTests extends OpenSearchTestCase { + + public void testWithCreateCacheForIndicesRequestCacheType() { + CachePlugin mockPlugin1 = mock(CachePlugin.class); + ICache.Factory factory1 = mock(ICache.Factory.class); + Map factoryMap = Map.of("cache1", factory1); + when(mockPlugin1.getCacheFactoryMap()).thenReturn(factoryMap); + + Setting indicesRequestCacheSetting = CacheSettings.getConcreteSettingForCacheType(CacheType.INDICES_REQUEST_CACHE); + + CacheModule cacheModule = new CacheModule( + List.of(mockPlugin1), + Settings.builder().put(indicesRequestCacheSetting.getKey(), "cache1").build() + ); + CacheConfig config = mock(CacheConfig.class); + ICache onHeapCache = mock(OpenSearchOnHeapCache.class); + when(factory1.create(config, CacheType.INDICES_REQUEST_CACHE, factoryMap)).thenReturn(onHeapCache); + + CacheService cacheService = cacheModule.getCacheService(); + ICache ircCache = cacheService.createCache(config, CacheType.INDICES_REQUEST_CACHE); + assertEquals(onHeapCache, ircCache); + } + + public void testWithCreateCacheWithNoStoreNamePresentForCacheType() { + ICache.Factory factory1 = mock(ICache.Factory.class); + Map factoryMap = Map.of("cache1", factory1); + CacheService cacheService = new CacheService(factoryMap, Settings.builder().build()); + + CacheConfig config = mock(CacheConfig.class); + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> cacheService.createCache(config, CacheType.INDICES_REQUEST_CACHE) + ); + assertEquals("No configuration exists for cache type: INDICES_REQUEST_CACHE", ex.getMessage()); + } + + public void testWithCreateCacheWithInvalidStoreNameAssociatedForCacheType() { + ICache.Factory factory1 = mock(ICache.Factory.class); + Setting indicesRequestCacheSetting = CacheSettings.getConcreteSettingForCacheType(CacheType.INDICES_REQUEST_CACHE); + Map factoryMap = Map.of("cache1", factory1); + CacheService cacheService = new CacheService( + factoryMap, + Settings.builder().put(indicesRequestCacheSetting.getKey(), "cache").build() + ); + + CacheConfig config = mock(CacheConfig.class); + ICache onHeapCache = mock(OpenSearchOnHeapCache.class); + when(factory1.create(config, CacheType.INDICES_REQUEST_CACHE, factoryMap)).thenReturn(onHeapCache); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> cacheService.createCache(config, CacheType.INDICES_REQUEST_CACHE) + ); + assertEquals("No store name: [cache] is registered for cache type: INDICES_REQUEST_CACHE", ex.getMessage()); + } +} From f625b51fa3a1e2a4c5e7c4c46b61b953be15ad23 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Fri, 23 Feb 2024 11:07:11 -0800 Subject: [PATCH 16/22] Updating cache common build.gradle Signed-off-by: Sagar Upadhyaya --- modules/cache-common/build.gradle | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/modules/cache-common/build.gradle b/modules/cache-common/build.gradle index 75931ac55ceae..c7052896e609b 100644 --- a/modules/cache-common/build.gradle +++ b/modules/cache-common/build.gradle @@ -4,28 +4,6 @@ * The OpenSearch Contributors require contributions made to * this file be licensed under the Apache-2.0 license or a * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. */ opensearchplugin { From 2586fa1f82332e7b649428662e0c57c68c6a7e85 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Tue, 27 Feb 2024 10:23:32 -0800 Subject: [PATCH 17/22] Making TieredCachePlugin constructor public Signed-off-by: Sagar Upadhyaya --- .../cache/common/tier/TieredSpilloverCachePlugin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java index 19abf8ae63c28..6b0620c5fbede 100644 --- a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java +++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java @@ -31,7 +31,7 @@ public class TieredSpilloverCachePlugin extends Plugin implements CachePlugin { /** * Default constructor */ - TieredSpilloverCachePlugin() {} + public TieredSpilloverCachePlugin() {} @Override public Map getCacheFactoryMap() { From 6a2b374acfe737e34eb41cdc842496170f5c1932 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Tue, 27 Feb 2024 12:10:32 -0800 Subject: [PATCH 18/22] Fixing CacheService unit test Signed-off-by: Sagar Upadhyaya --- .../opensearch/common/cache/service/CacheServiceTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java b/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java index 9b821a3b2a9cb..9d39f8a43ea58 100644 --- a/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java +++ b/server/src/test/java/org/opensearch/common/cache/service/CacheServiceTests.java @@ -23,6 +23,8 @@ import java.util.Map; import static junit.framework.TestCase.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -42,7 +44,7 @@ public void testWithCreateCacheForIndicesRequestCacheType() { ); CacheConfig config = mock(CacheConfig.class); ICache onHeapCache = mock(OpenSearchOnHeapCache.class); - when(factory1.create(config, CacheType.INDICES_REQUEST_CACHE, factoryMap)).thenReturn(onHeapCache); + when(factory1.create(eq(config), eq(CacheType.INDICES_REQUEST_CACHE), any(Map.class))).thenReturn(onHeapCache); CacheService cacheService = cacheModule.getCacheService(); ICache ircCache = cacheService.createCache(config, CacheType.INDICES_REQUEST_CACHE); From a8ea4c2b3c6b9175a38e2c5859c0f294c29bf0b3 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Thu, 29 Feb 2024 14:09:12 -0800 Subject: [PATCH 19/22] Removing unnecessary comments from Tiered cache test Signed-off-by: Sagar Upadhyaya --- .../tier/TieredSpilloverCacheTests.java | 129 +----------------- 1 file changed, 1 insertion(+), 128 deletions(-) diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java index 87851ca69dcae..7c9569f5defe2 100644 --- a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java +++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java @@ -18,9 +18,7 @@ import org.opensearch.common.cache.store.config.CacheConfig; import org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings; import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.test.OpenSearchTestCase; import java.util.ArrayList; @@ -353,8 +351,6 @@ public void testComputeIfAbsentWithEvictionsFromBothTier() throws Exception { tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); } assertTrue(removalListener.evictionsMetric.count() > 0); - // assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0); - // assertTrue(eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count() > 0); } public void testGetAndCount() throws Exception { @@ -433,7 +429,6 @@ public void testPut() { String key = UUID.randomUUID().toString(); String value = UUID.randomUUID().toString(); tieredSpilloverCache.put(key, value); - // assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).cachedCount.count()); assertEquals(1, tieredSpilloverCache.count()); } @@ -532,12 +527,10 @@ public void testInvalidate() { String value = UUID.randomUUID().toString(); // First try to invalidate without the key present in cache. tieredSpilloverCache.invalidate(key); - // assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); // Now try to invalidate with the key present in onHeap cache. tieredSpilloverCache.put(key, value); tieredSpilloverCache.invalidate(key); - // assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); assertEquals(0, tieredSpilloverCache.count()); tieredSpilloverCache.put(key, value); @@ -547,7 +540,6 @@ public void testInvalidate() { assertEquals(2, tieredSpilloverCache.count()); // Again invalidate older key tieredSpilloverCache.invalidate(key); - // assertEquals(1, eventListener.enumMap.get(CacheStoreType.DISK).invalidationMetric.count()); assertEquals(1, tieredSpilloverCache.count()); } @@ -823,7 +815,6 @@ public String load(String key) { assertNotNull(actualValue.get()); countDownLatch1.await(); assertEquals(1, removalListener.evictionsMetric.count()); - // assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); assertEquals(1, tieredSpilloverCache.getOnHeapCache().count()); assertEquals(1, onDiskCache.count()); assertNotNull(onDiskCache.get(keyToBeEvicted)); @@ -862,7 +853,6 @@ private TieredSpilloverCache intializeTieredSpilloverCache( Settings settings, long diskDeliberateDelay ) { - // ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(); ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(); CacheConfig cacheConfig = new CacheConfig.Builder().setKeyType(String.class) .setKeyType(String.class) @@ -882,111 +872,6 @@ private TieredSpilloverCache intializeTieredSpilloverCache( } } -/** - * Wrapper OpenSearchOnHeap cache which tracks its own stats. - * @param Type of key - * @param Type of value - */ -class OpenSearchOnHeapCacheWrapper extends OpenSearchOnHeapCache { - - StatsHolder statsHolder = new StatsHolder(); - - public OpenSearchOnHeapCacheWrapper(Builder builder) { - super(builder); - } - - @Override - public V get(K key) { - V value = super.get(key); - if (value != null) { - statsHolder.hitCount.inc(); - } else { - statsHolder.missCount.inc(); - } - return value; - } - - @Override - public void put(K key, V value) { - super.put(key, value); - statsHolder.onCachedMetric.inc(); - } - - @Override - public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { - V value = super.computeIfAbsent(key, loader); - if (loader.isLoaded()) { - statsHolder.missCount.inc(); - statsHolder.onCachedMetric.inc(); - } else { - statsHolder.hitCount.inc(); - } - return value; - } - - @Override - public void invalidate(K key) { - super.invalidate(key); - } - - @Override - public void invalidateAll() { - super.invalidateAll(); - } - - @Override - public Iterable keys() { - return super.keys(); - } - - @Override - public long count() { - return super.count(); - } - - @Override - public void refresh() { - super.refresh(); - } - - @Override - public void close() {} - - @Override - public void onRemoval(RemovalNotification notification) { - super.onRemoval(notification); - } - - /** - * Factory for the wrapper cache class - */ - static class OpenSearchOnHeapCacheWrapperFactory extends OpenSearchOnHeapCacheFactory { - - @Override - public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) { - Map> settingList = OpenSearchOnHeapCacheSettings.getSettingListForCacheType(cacheType); - Settings settings = config.getSettings(); - return new OpenSearchOnHeapCacheWrapper<>( - (Builder) new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes( - ((ByteSizeValue) settingList.get(MAXIMUM_SIZE_IN_BYTES_KEY).get(settings)).getBytes() - ).setWeigher(config.getWeigher()) - ); - } - - @Override - public String getCacheName() { - return super.getCacheName(); - } - } - - class StatsHolder { - CounterMetric hitCount = new CounterMetric(); - CounterMetric missCount = new CounterMetric(); - CounterMetric evictionMetric = new CounterMetric(); - CounterMetric onCachedMetric = new CounterMetric(); - } -} - class MockOnDiskCache implements ICache { Map cache; @@ -1008,8 +893,6 @@ public V get(K key) { @Override public void put(K key, V value) { if (this.cache.size() >= maxSize) { // For simplification - // eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, value, RemovalReason.EVICTED, - // CacheStoreType.DISK)); return; } try { @@ -1018,11 +901,10 @@ public void put(K key, V value) { throw new RuntimeException(e); } this.cache.put(key, value); - // eventListener.onCached(key, value, CacheStoreType.DISK); } @Override - public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception { + public V computeIfAbsent(K key, LoadAwareCacheLoader loader) { V value = cache.computeIfAbsent(key, key1 -> { try { return loader.load(key); @@ -1030,20 +912,11 @@ public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Except throw new RuntimeException(e); } }); - // if (!loader.isLoaded()) { - // eventListener.onHit(key, value, CacheStoreType.DISK); - // } else { - // eventListener.onMiss(key, CacheStoreType.DISK); - // eventListener.onCached(key, value, CacheStoreType.DISK); - // } return value; } @Override public void invalidate(K key) { - if (this.cache.containsKey(key)) { - // eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, null, RemovalReason.INVALIDATED, CacheStoreType.DISK)); - } this.cache.remove(key); } From e6ae90e1ea7d3fa3b644046b3f355eb3f7d87e37 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Thu, 29 Feb 2024 15:27:05 -0800 Subject: [PATCH 20/22] Fixing ehcache test concurrentPut Signed-off-by: Sagar Upadhyaya --- .../org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index 862bebba7e628..f00bca0285a29 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -137,6 +137,7 @@ public void testConcurrentPut() throws Exception { ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setIsEventListenerModeSync(true) // For accurate count .setKeyType(String.class) .setValueType(String.class) .setCacheType(CacheType.INDICES_REQUEST_CACHE) From b866ed29475fc6703941c8b1039ffea059806386 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Thu, 29 Feb 2024 19:05:42 -0800 Subject: [PATCH 21/22] Fixing flaky ehcache test Signed-off-by: Sagar Upadhyaya --- .../cache/EhcacheDiskCacheSettings.java | 16 +++++++++++++++- .../cache/store/disk/EhcacheDiskCache.java | 2 ++ .../cache/store/disk/EhCacheDiskCacheTests.java | 9 +++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java index 47bbf2ce6c223..14670579f56e3 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java @@ -102,6 +102,14 @@ public class EhcacheDiskCacheSettings { (key) -> Setting.longSetting(key, 1073741824L, NodeScope) ); + /** + * Disk cache listener mode setting. + */ + public static final Setting.AffixSetting DISK_CACHE_LISTENER_MODE_SYNC_SETTING = Setting.suffixKeySetting( + EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".is_event_listener_sync", + (key) -> Setting.boolSetting(key, false, NodeScope) + ); + /** * Key for disk segment. */ @@ -138,6 +146,10 @@ public class EhcacheDiskCacheSettings { * Key for storage path. */ public static final String DISK_STORAGE_PATH_KEY = "disk_storage_path"; + /** + * Key for listener mode + */ + public static final String DISK_LISTENER_MODE_SYNC_KEY = "disk_listener_mode"; /** * Map of key to setting. @@ -160,7 +172,9 @@ public class EhcacheDiskCacheSettings { DISK_STORAGE_PATH_KEY, DISK_STORAGE_PATH_SETTING, DISK_MAX_SIZE_IN_BYTES_KEY, - DISK_CACHE_MAX_SIZE_IN_BYTES_SETTING + DISK_CACHE_MAX_SIZE_IN_BYTES_SETTING, + DISK_LISTENER_MODE_SYNC_KEY, + DISK_CACHE_LISTENER_MODE_SYNC_SETTING ); /** diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java index 666d89e98127f..ddfd5b838e927 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java @@ -60,6 +60,7 @@ import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_CACHE_ALIAS_KEY; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_CACHE_EXPIRE_AFTER_ACCESS_KEY; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_LISTENER_MODE_SYNC_KEY; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_SEGMENT_KEY; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_STORAGE_PATH_KEY; @@ -475,6 +476,7 @@ public ICache create(CacheConfig config, CacheType cacheType, Settings settings = config.getSettings(); return new Builder().setStoragePath((String) settingList.get(DISK_STORAGE_PATH_KEY).get(settings)) .setDiskCacheAlias((String) settingList.get(DISK_CACHE_ALIAS_KEY).get(settings)) + .setIsEventListenerModeSync((Boolean) settingList.get(DISK_LISTENER_MODE_SYNC_KEY).get(settings)) .setCacheType(cacheType) .setKeyType((config.getKeyType())) .setValueType(config.getValueType()) diff --git a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java index f00bca0285a29..d5f5fbb9293bc 100644 --- a/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java +++ b/plugins/cache-ehcache/src/test/java/org/opensearch/cache/store/disk/EhCacheDiskCacheTests.java @@ -32,6 +32,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Phaser; +import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_LISTENER_MODE_SYNC_KEY; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_MAX_SIZE_IN_BYTES_KEY; import static org.opensearch.cache.EhcacheDiskCacheSettings.DISK_STORAGE_PATH_KEY; import static org.hamcrest.CoreMatchers.instanceOf; @@ -46,6 +47,7 @@ public void testBasicGetAndPut() throws IOException { try (NodeEnvironment env = newNodeEnvironment(settings)) { ICache ehcacheTest = new EhcacheDiskCache.Builder().setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setIsEventListenerModeSync(true) .setKeyType(String.class) .setValueType(String.class) .setCacheType(CacheType.INDICES_REQUEST_CACHE) @@ -100,6 +102,12 @@ public void testBasicGetAndPutUsingFactory() throws IOException { .getKey(), env.nodePaths()[0].indicesPath.toString() + "/request_cache" ) + .put( + EhcacheDiskCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE) + .get(DISK_LISTENER_MODE_SYNC_KEY) + .getKey(), + true + ) .build() ) .build(), @@ -225,6 +233,7 @@ public void testEhcacheKeyIterator() throws Exception { ICache ehcacheTest = new EhcacheDiskCache.Builder().setDiskCacheAlias("test1") .setThreadPoolAlias("ehcacheTest") .setStoragePath(env.nodePaths()[0].indicesPath.toString() + "/request_cache") + .setIsEventListenerModeSync(true) .setKeyType(String.class) .setValueType(String.class) .setCacheType(CacheType.INDICES_REQUEST_CACHE) From 081e269adcbb95be7932ce541b3313c39e50c879 Mon Sep 17 00:00:00 2001 From: Sagar Upadhyaya Date: Thu, 29 Feb 2024 19:45:15 -0800 Subject: [PATCH 22/22] Fixing twice registration of same setting Signed-off-by: Sagar Upadhyaya --- .../java/org/opensearch/cache/EhcacheDiskCacheSettings.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java index 14670579f56e3..837fd6b268ce6 100644 --- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java +++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java @@ -161,8 +161,6 @@ public class EhcacheDiskCacheSettings { DISK_CACHE_EXPIRE_AFTER_ACCESS_SETTING, DISK_CACHE_ALIAS_KEY, DISK_CACHE_ALIAS_SETTING, - DISK_SEGMENTS_KEY, - DISK_SEGMENTS_SETTING, DISK_WRITE_CONCURRENCY_KEY, DISK_WRITE_CONCURRENCY_SETTING, DISK_WRITE_MAXIMUM_THREADS_KEY,