/* * SPDX-License-Identifier: Apache-2.0 * * The OpenSearch Contributors require contributions made to * this file be licensed under the Apache-2.0 license or a * * Modifications Copyright OpenSearch Contributors. See * GitHub history for details. */ /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.opensearch.hadoop.serialization; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException; import org.opensearch.hadoop.cfg.Settings; import org.opensearch.hadoop.hive.HiveFieldExtractor; import org.opensearch.hadoop.serialization.HiveTypeToJsonTest.MyHiveType; import org.opensearch.hadoop.serialization.field.ConstantFieldExtractor; import org.opensearch.hadoop.serialization.field.FieldExtractor; import org.opensearch.hadoop.util.TestSettings; import org.junit.Test; import static org.junit.Assert.*; import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.*; public class HiveFieldExtractorTests { private Object extract(String field, Object target) { TestSettings cfg = new TestSettings(); cfg.setProperty(ConstantFieldExtractor.PROPERTY, field); ConstantFieldExtractor extractor = new HiveFieldExtractor() { @Override public void processField(Settings settings, List fl) { fieldNames = fl; } }; extractor.setSettings(cfg); return extractor.field(target); } @Test public void testHiveFieldExtractorNestedNotFound() throws Exception { Map m = new LinkedHashMap(); assertEquals(FieldExtractor.NOT_FOUND, extract("key", m)); } @Test(expected = OpenSearchHadoopIllegalArgumentException.class) public void testHiveFieldExtractorNested() throws Exception { List nested = Arrays.asList(new String[] { "bar", "bor" }); List types = Arrays.asList(new TypeInfo[] { stringTypeInfo, intTypeInfo }); MyHiveType struct = new MyHiveType(Arrays.asList(new Object[] { new Text("found"), new IntWritable(2) }), getStructTypeInfo(nested, types)); List topNames = Arrays.asList(new String[] { "foo", "far" }); List topTypes = Arrays.asList(new TypeInfo[] { getStructTypeInfo(nested, types), intTypeInfo }); MyHiveType topStruct = new MyHiveType(Arrays.asList(new Object[] { struct, new IntWritable(1) }), getStructTypeInfo(topNames, topTypes)); String colDesc = "bar,bor"; assertEquals(new Text("found"), extract("foo.bar", topStruct)); } @Test public void testHiveFieldExtractorTopLevel() throws Exception { List names = Arrays.asList(new String[] { "one", "two" }); List types = Arrays.asList(new TypeInfo[] { stringTypeInfo, intTypeInfo }); MyHiveType struct = new MyHiveType(Arrays.asList(new Object[] { new Text("first"), new IntWritable(2) }), getStructTypeInfo(names, types)); String colDesc = "one,two"; assertEquals("first", extract("one", struct)); } }