Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.kafka;

import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Properties;
Expand All @@ -36,13 +37,12 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.org.apache.commons.cli.BasicParser;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;


/**
* connects to kafka and reads from the passed in topics. Parses each message into an avro object
* and dumps it to the console.
Expand All @@ -52,7 +52,6 @@ public final class DumpToStringListener {
private static final Logger LOG = LoggerFactory.getLogger(DumpToStringListener.class);

private DumpToStringListener(){

}

public static void main(String[] args) {
Expand All @@ -65,12 +64,14 @@ public static void main(String[] args) {
options.addRequiredOption("t", "kafkatopics", true,"Kafka Topics "
+ "to subscribe to (comma delimited)");
CommandLine commandLine = null;

try {
commandLine = new BasicParser().parse(options, args);
commandLine = new DefaultParser().parse(options, args);
} catch (ParseException e) {
LOG.error("Could not parse: ", e);
printUsageAndExit(options, -1);
}

SpecificDatumReader<HBaseKafkaEvent> dreader =
new SpecificDatumReader<>(HBaseKafkaEvent.SCHEMA$);

Expand All @@ -81,11 +82,11 @@ public static void main(String[] args) {
props.put("key.deserializer", ByteArrayDeserializer.class.getName());
props.put("value.deserializer", ByteArrayDeserializer.class.getName());

try (KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props);){
try (KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props)) {
consumer.subscribe(Arrays.stream(topic.split(",")).collect(Collectors.toList()));

while (true) {
ConsumerRecords<byte[], byte[]> records = consumer.poll(10000);
ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofMillis(10000));
Iterator<ConsumerRecord<byte[], byte[]>> it = records.iterator();
while (it.hasNext()) {
ConsumerRecord<byte[], byte[]> record = it.next();
Expand All @@ -108,5 +109,4 @@ private static void printUsageAndExit(Options options, int exitCode) {
"[-k <kafka brokers (comma delmited)>] \n", true);
System.exit(exitCode);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.org.apache.commons.cli.BasicParser;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
Expand Down Expand Up @@ -162,7 +162,7 @@ public static void main(String[] args) throws Exception {
String[] restArgs = parser.getRemainingArgs();

try {
commandLine = new BasicParser().parse(options, restArgs);
commandLine = new DefaultParser().parse(options, restArgs);
} catch (ParseException e) {
LOG.error("Could not parse: ", e);
printUsageAndExit(options, -1);
Expand Down
6 changes: 6 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@
<commons-io.version>2.5</commons-io.version>
<avro.version>1.7.7</avro.version>
<commons-lang3.version>3.6</commons-lang3.version>
<commons-text.version>1.8</commons-text.version>
<!--This property is for hadoops netty. HBase netty
comes in via hbase-thirdparty hbase-shaded-netty-->
<netty.hadoop.version>3.6.2.Final</netty.hadoop.version>
Expand All @@ -173,6 +174,11 @@
<artifactId>commons-io</artifactId>
<version>${commons-io.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>${commons-text.version}</version>
</dependency>
<!-- Avro dependencies we mostly get transitively, manual version coallescing -->
<dependency>
<groupId>org.apache.avro</groupId>
Expand Down
5 changes: 5 additions & 0 deletions spark/hbase-spark-it/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,11 @@
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</dependency>

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This dependency is used only in the unit tests and only at one place. Is it really required just for generating a random string?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point. I just moved from the deprecated one to the new one. Completely removed this dependency with the latest changes.

<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.commons.lang3.RandomStringUtils;

import org.apache.commons.text.RandomStringGenerator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
Expand Down Expand Up @@ -225,7 +226,7 @@ public Iterator<List<byte[]>> call(Integer v1, Iterator v2) throws Exception {
List<byte[]> tmp1 = Arrays.asList(rk, CHAIN_FAM, chainIdArray, Bytes.toBytes(nextRow));
List<byte[]> tmp2 = Arrays.asList(rk, SORT_FAM, chainIdArray, Bytes.toBytes(i));
List<byte[]> tmp3 = Arrays.asList(rk, DATA_FAM, chainIdArray, Bytes.toBytes(
RandomStringUtils.randomAlphabetic(50)));
new RandomStringGenerator.Builder().withinRange('a', 'z').build().generate(50)));
res.add(tmp1);
res.add(tmp2);
res.add(tmp3);
Expand Down