|
18 | 18 |
|
19 | 19 | import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; |
20 | 20 |
|
| 21 | +import org.apache.kafka.common.TopicPartition; |
21 | 22 | import org.apache.kafka.connect.data.Schema; |
22 | 23 | import org.apache.kafka.connect.data.Struct; |
23 | 24 | import org.apache.kafka.connect.sink.SinkRecord; |
24 | 25 | import org.elasticsearch.test.ESIntegTestCase; |
25 | 26 | import org.elasticsearch.test.InternalTestCluster; |
26 | 27 | import org.junit.Test; |
27 | 28 |
|
28 | | -import java.util.ArrayList; |
29 | 29 | import java.util.Arrays; |
30 | | -import java.util.Collection; |
31 | 30 | import java.util.HashMap; |
32 | 31 | import java.util.HashSet; |
33 | 32 | import java.util.Map; |
| 33 | +import java.util.Collection; |
| 34 | +import java.util.ArrayList; |
| 35 | +import java.util.Collections; |
| 36 | + |
34 | 37 |
|
35 | 38 | @ThreadLeakScope(ThreadLeakScope.Scope.NONE) |
36 | 39 | public class ElasticsearchSinkTaskTest extends ElasticsearchSinkTestBase { |
37 | 40 |
|
| 41 | + private static final String TOPIC_IN_CAPS = "AnotherTopicInCaps"; |
| 42 | + private static final int PARTITION_113 = 113; |
| 43 | + private static final TopicPartition TOPIC_IN_CAPS_PARTITION = new TopicPartition(TOPIC_IN_CAPS, PARTITION_113); |
| 44 | + |
38 | 45 | private Map<String, String> createProps() { |
39 | 46 | Map<String, String> props = new HashMap<>(); |
40 | 47 | props.put(ElasticsearchSinkConnectorConfig.TYPE_NAME_CONFIG, TYPE); |
@@ -72,4 +79,36 @@ public void testPutAndFlush() throws Exception { |
72 | 79 | verifySearchResults(records, true, false); |
73 | 80 | } |
74 | 81 |
|
| 82 | + @Test |
| 83 | + public void testCreateAndWriteToIndexForTopicWithUppercaseCharacters() { |
| 84 | + // We should as well test that writing a record with a previously un seen record will create |
| 85 | + // an index following the required elasticsearch requirements of lowercasing. |
| 86 | + InternalTestCluster cluster = ESIntegTestCase.internalCluster(); |
| 87 | + cluster.ensureAtLeastNumDataNodes(3); |
| 88 | + Map<String, String> props = createProps(); |
| 89 | + |
| 90 | + ElasticsearchSinkTask task = new ElasticsearchSinkTask(); |
| 91 | + |
| 92 | + String key = "key"; |
| 93 | + Schema schema = createSchema(); |
| 94 | + Struct record = createRecord(schema); |
| 95 | + |
| 96 | + SinkRecord sinkRecord = new SinkRecord(TOPIC_IN_CAPS, |
| 97 | + PARTITION_113, |
| 98 | + Schema.STRING_SCHEMA, |
| 99 | + key, |
| 100 | + schema, |
| 101 | + record, |
| 102 | + 0 ); |
| 103 | + |
| 104 | + try { |
| 105 | + task.start(props, client); |
| 106 | + task.open(new HashSet<>(Collections.singletonList(TOPIC_IN_CAPS_PARTITION))); |
| 107 | + task.put(Collections.singleton(sinkRecord)); |
| 108 | + } catch (Exception ex) { |
| 109 | + fail("A topic name not in lowercase can not be used as index name in Elasticsearch"); |
| 110 | + } finally { |
| 111 | + task.stop(); |
| 112 | + } |
| 113 | + } |
75 | 114 | } |
0 commit comments