|
| 1 | +/* |
| 2 | + * SPDX-License-Identifier: Apache-2.0 |
| 3 | + * |
| 4 | + * The OpenSearch Contributors require contributions made to |
| 5 | + * this file be licensed under the Apache-2.0 license or a |
| 6 | + * compatible open source license. |
| 7 | + */ |
| 8 | + |
| 9 | +/* |
| 10 | + * Licensed to Elasticsearch under one or more contributor |
| 11 | + * license agreements. See the NOTICE file distributed with |
| 12 | + * this work for additional information regarding copyright |
| 13 | + * ownership. Elasticsearch licenses this file to you under |
| 14 | + * the Apache License, Version 2.0 (the "License"); you may |
| 15 | + * not use this file except in compliance with the License. |
| 16 | + * You may obtain a copy of the License at |
| 17 | + * |
| 18 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 19 | + * |
| 20 | + * Unless required by applicable law or agreed to in writing, |
| 21 | + * software distributed under the License is distributed on an |
| 22 | + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| 23 | + * KIND, either express or implied. See the License for the |
| 24 | + * specific language governing permissions and limitations |
| 25 | + * under the License. |
| 26 | + */ |
| 27 | + |
| 28 | +/* |
| 29 | + * Modifications Copyright OpenSearch Contributors. See |
| 30 | + * GitHub history for details. |
| 31 | + */ |
| 32 | + |
| 33 | +package org.opensearch.index.codec.customcodecs; |
| 34 | + |
| 35 | +import org.apache.logging.log4j.LogManager; |
| 36 | +import org.apache.lucene.codecs.Codec; |
| 37 | +import org.apache.lucene.document.Document; |
| 38 | +import org.apache.lucene.index.DirectoryReader; |
| 39 | +import org.apache.lucene.index.IndexWriter; |
| 40 | +import org.apache.lucene.index.IndexWriterConfig; |
| 41 | +import org.apache.lucene.index.SegmentReader; |
| 42 | +import org.apache.lucene.store.Directory; |
| 43 | +import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; |
| 44 | +import org.opensearch.common.settings.Settings; |
| 45 | +import org.opensearch.env.Environment; |
| 46 | +import org.opensearch.index.IndexSettings; |
| 47 | +import org.opensearch.index.analysis.IndexAnalyzers; |
| 48 | +import org.opensearch.index.codec.CodecService; |
| 49 | +import org.opensearch.index.codec.CodecServiceConfig; |
| 50 | +import org.opensearch.index.codec.CodecServiceFactory; |
| 51 | +import org.opensearch.index.codec.CodecSettings; |
| 52 | +import org.opensearch.index.mapper.MapperService; |
| 53 | +import org.opensearch.index.similarity.SimilarityService; |
| 54 | +import org.opensearch.indices.mapper.MapperRegistry; |
| 55 | +import org.opensearch.plugins.MapperPlugin; |
| 56 | +import org.opensearch.test.IndexSettingsModule; |
| 57 | +import org.opensearch.test.OpenSearchTestCase; |
| 58 | +import org.junit.Before; |
| 59 | + |
| 60 | +import java.io.IOException; |
| 61 | +import java.util.Collections; |
| 62 | +import java.util.Optional; |
| 63 | + |
| 64 | +import static org.opensearch.index.engine.EngineConfig.INDEX_CODEC_COMPRESSION_LEVEL_SETTING; |
| 65 | +import static org.hamcrest.Matchers.is; |
| 66 | +import static org.junit.Assume.assumeThat; |
| 67 | + |
| 68 | +@SuppressCodecs("*") // we test against default codec so never get a random one here! |
| 69 | +public class QatCodecTests extends OpenSearchTestCase { |
| 70 | + |
| 71 | + private CustomCodecPlugin plugin; |
| 72 | + |
| 73 | + @Before |
| 74 | + public void setup() { |
| 75 | + plugin = new CustomCodecPlugin(); |
| 76 | + } |
| 77 | + |
| 78 | + public void testQatLz4() throws Exception { |
| 79 | + assumeThat("Qat library is available", QatZipperFactory.isQatAvailable(), is(true)); |
| 80 | + Codec codec = createCodecService(false).codec("qat_lz4"); |
| 81 | + assertStoredFieldsCompressionEquals(Lucene99QatCodec.Mode.QAT_LZ4, codec); |
| 82 | + Lucene99QatStoredFieldsFormat storedFieldsFormat = (Lucene99QatStoredFieldsFormat) codec.storedFieldsFormat(); |
| 83 | + assertEquals(Lucene99CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionMode().getCompressionLevel()); |
| 84 | + } |
| 85 | + |
| 86 | + public void testQatDeflate() throws Exception { |
| 87 | + assumeThat("Qat library is available", QatZipperFactory.isQatAvailable(), is(true)); |
| 88 | + Codec codec = createCodecService(false).codec("qat_deflate"); |
| 89 | + assertStoredFieldsCompressionEquals(Lucene99QatCodec.Mode.QAT_DEFLATE, codec); |
| 90 | + Lucene99QatStoredFieldsFormat storedFieldsFormat = (Lucene99QatStoredFieldsFormat) codec.storedFieldsFormat(); |
| 91 | + assertEquals(Lucene99CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionMode().getCompressionLevel()); |
| 92 | + } |
| 93 | + |
| 94 | + public void testQatLz4WithCompressionLevel() throws Exception { |
| 95 | + assumeThat("Qat library is available", QatZipperFactory.isQatAvailable(), is(true)); |
| 96 | + int randomCompressionLevel = randomIntBetween(1, 6); |
| 97 | + Codec codec = createCodecService(randomCompressionLevel, "qat_lz4").codec("qat_lz4"); |
| 98 | + assertStoredFieldsCompressionEquals(Lucene99QatCodec.Mode.QAT_LZ4, codec); |
| 99 | + Lucene99QatStoredFieldsFormat storedFieldsFormat = (Lucene99QatStoredFieldsFormat) codec.storedFieldsFormat(); |
| 100 | + assertEquals(randomCompressionLevel, storedFieldsFormat.getCompressionMode().getCompressionLevel()); |
| 101 | + } |
| 102 | + |
| 103 | + public void testQatDeflateWithCompressionLevel() throws Exception { |
| 104 | + assumeThat("Qat library is available", QatZipperFactory.isQatAvailable(), is(true)); |
| 105 | + int randomCompressionLevel = randomIntBetween(1, 6); |
| 106 | + Codec codec = createCodecService(randomCompressionLevel, "qat_deflate").codec("qat_deflate"); |
| 107 | + assertStoredFieldsCompressionEquals(Lucene99QatCodec.Mode.QAT_DEFLATE, codec); |
| 108 | + Lucene99QatStoredFieldsFormat storedFieldsFormat = (Lucene99QatStoredFieldsFormat) codec.storedFieldsFormat(); |
| 109 | + assertEquals(randomCompressionLevel, storedFieldsFormat.getCompressionMode().getCompressionLevel()); |
| 110 | + } |
| 111 | + |
| 112 | + public void testQatCompressionLevelSupport() throws Exception { |
| 113 | + assumeThat("Qat library is available", QatZipperFactory.isQatAvailable(), is(true)); |
| 114 | + CodecService codecService = createCodecService(false); |
| 115 | + CodecSettings qatDeflateCodec = (CodecSettings) codecService.codec("qat_deflate"); |
| 116 | + CodecSettings qatLz4Codec = (CodecSettings) codecService.codec("qat_lz4"); |
| 117 | + assertTrue(qatDeflateCodec.supports(INDEX_CODEC_COMPRESSION_LEVEL_SETTING)); |
| 118 | + assertTrue(qatLz4Codec.supports(INDEX_CODEC_COMPRESSION_LEVEL_SETTING)); |
| 119 | + } |
| 120 | + |
| 121 | + public void testQatLz4MapperServiceNull() throws Exception { |
| 122 | + assumeThat("Qat library is available", QatZipperFactory.isQatAvailable(), is(true)); |
| 123 | + Codec codec = createCodecService(true).codec("qat_lz4"); |
| 124 | + assertStoredFieldsCompressionEquals(Lucene99QatCodec.Mode.QAT_LZ4, codec); |
| 125 | + Lucene99QatStoredFieldsFormat storedFieldsFormat = (Lucene99QatStoredFieldsFormat) codec.storedFieldsFormat(); |
| 126 | + assertEquals(Lucene99QatCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionMode().getCompressionLevel()); |
| 127 | + } |
| 128 | + |
| 129 | + public void testQatDeflateMapperServiceNull() throws Exception { |
| 130 | + assumeThat("Qat library is available", QatZipperFactory.isQatAvailable(), is(true)); |
| 131 | + Codec codec = createCodecService(true).codec("qat_deflate"); |
| 132 | + assertStoredFieldsCompressionEquals(Lucene99QatCodec.Mode.QAT_DEFLATE, codec); |
| 133 | + Lucene99QatStoredFieldsFormat storedFieldsFormat = (Lucene99QatStoredFieldsFormat) codec.storedFieldsFormat(); |
| 134 | + assertEquals(Lucene99QatCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionMode().getCompressionLevel()); |
| 135 | + } |
| 136 | + |
| 137 | + private void assertStoredFieldsCompressionEquals(Lucene99QatCodec.Mode expected, Codec actual) throws Exception { |
| 138 | + assumeThat("Qat library is available", QatZipperFactory.isQatAvailable(), is(true)); |
| 139 | + SegmentReader sr = getSegmentReader(actual); |
| 140 | + String v = sr.getSegmentInfo().info.getAttribute(Lucene99QatStoredFieldsFormat.MODE_KEY); |
| 141 | + assertNotNull(v); |
| 142 | + assertEquals(expected, Lucene99QatCodec.Mode.valueOf(v)); |
| 143 | + } |
| 144 | + |
| 145 | + private CodecService createCodecService(boolean isMapperServiceNull) throws IOException { |
| 146 | + Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); |
| 147 | + if (isMapperServiceNull) { |
| 148 | + return new CustomCodecService( |
| 149 | + null, |
| 150 | + IndexSettingsModule.newIndexSettings("_na", nodeSettings, Lucene99QatCodec.INDEX_CODEC_QAT_MODE_SETTING), |
| 151 | + LogManager.getLogger("test") |
| 152 | + ); |
| 153 | + } |
| 154 | + return buildCodecService(nodeSettings); |
| 155 | + } |
| 156 | + |
| 157 | + private CodecService createCodecService(int randomCompressionLevel, String codec) throws IOException { |
| 158 | + Settings nodeSettings = Settings.builder() |
| 159 | + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) |
| 160 | + .put("index.codec", codec) |
| 161 | + .put("index.codec.compression_level", randomCompressionLevel) |
| 162 | + .build(); |
| 163 | + return buildCodecService(nodeSettings); |
| 164 | + } |
| 165 | + |
| 166 | + private CodecService buildCodecService(Settings nodeSettings) throws IOException { |
| 167 | + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings( |
| 168 | + "_na", |
| 169 | + nodeSettings, |
| 170 | + Lucene99QatCodec.INDEX_CODEC_QAT_MODE_SETTING |
| 171 | + ); |
| 172 | + SimilarityService similarityService = new SimilarityService(indexSettings, null, Collections.emptyMap()); |
| 173 | + IndexAnalyzers indexAnalyzers = createTestAnalysis(indexSettings, nodeSettings).indexAnalyzers; |
| 174 | + MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER); |
| 175 | + MapperService service = new MapperService( |
| 176 | + indexSettings, |
| 177 | + indexAnalyzers, |
| 178 | + xContentRegistry(), |
| 179 | + similarityService, |
| 180 | + mapperRegistry, |
| 181 | + () -> null, |
| 182 | + () -> false, |
| 183 | + null |
| 184 | + ); |
| 185 | + |
| 186 | + Optional<CodecServiceFactory> customCodecServiceFactory = plugin.getCustomCodecServiceFactory(indexSettings); |
| 187 | + if (customCodecServiceFactory.isPresent()) { |
| 188 | + return customCodecServiceFactory.get().createCodecService(new CodecServiceConfig(indexSettings, service, logger)); |
| 189 | + } |
| 190 | + return new CustomCodecService(service, indexSettings, LogManager.getLogger("test")); |
| 191 | + } |
| 192 | + |
| 193 | + private SegmentReader getSegmentReader(Codec codec) throws IOException { |
| 194 | + Directory dir = newDirectory(); |
| 195 | + IndexWriterConfig iwc = newIndexWriterConfig(null); |
| 196 | + iwc.setCodec(codec); |
| 197 | + IndexWriter iw = new IndexWriter(dir, iwc); |
| 198 | + iw.addDocument(new Document()); |
| 199 | + iw.commit(); |
| 200 | + iw.close(); |
| 201 | + DirectoryReader ir = DirectoryReader.open(dir); |
| 202 | + SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); |
| 203 | + ir.close(); |
| 204 | + dir.close(); |
| 205 | + return sr; |
| 206 | + } |
| 207 | +} |
0 commit comments