阿里云搭建kafka配置及Java程序示例
1. 下载解压kafka安装包
tar -xvzf kafka_2.12-2.2.1.tgz -C /software/kafka/
2. 启动zookeeper
bin/zookeeper-server-start.sh config/zookeeper.properties &
3. 修改kafka配置文件server.properties
broker.id=0
port=9092
host.name=阿里云内网地址
advertised.host.name=阿里云外网地址
4. 启动kafka服务
./kafka-server-start.sh ../config/server.properties &
5. 创建topic
./kafka-topics.sh --create --zookeeper 内网ip:2181 --replication-factor 1 --partitions 1 --topic HelloWorld
6. 查看topic
./kafka-topics.sh --list --zookeeper 内网ip:2181
7. 生产者
./kafka-console-producer.sh --broker-list 内网ip:9092 --topic HelloWorld
8. 消费者
./kafka-console-consumer.sh --bootstrap-server 内网ip:9092 --topic HelloWorld --from-beginning
9. Java生产者
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;/*** @author Ryan Feng* @version 1.0* @date 2019-07-09 18:19*/public class ProducerDemo {public static void main(String[] args) {Properties properties = new Properties();properties.put("bootstrap.servers", "外网ip:9092");properties.put("acks", "all");properties.put("retries", 0);properties.put("batch.size", 16384);properties.put("linger.ms", 1);properties.put("buffer.memory", 33554432);properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");org.apache.kafka.clients.producer.Producer producer = null;long start = System.currentTimeMillis();try {producer = new KafkaProducer(properties);for (int i = 0; i < 1000000; i++) {String msg = "New Message: " + i;producer.send(new ProducerRecord("HelloWorld", msg));System.out.println("Sent:" + msg);}long end = System.currentTimeMillis();System.out.println("Time for 1 million message:"+ (end-start)/1000);} catch (Exception e) {e.printStackTrace();} finally {producer.close();}}
}
10. Java消费者
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.util.Properties;
import java.util.Arrays;
import java.util.UUID;/*** @author Ryan Feng* @version 1.0* @date 2019-07-09 21:22*/
public class ConsumerDemo {public static void main(String[] args){Properties properties = new Properties();properties.put("bootstrap.servers", "外网ip:9092");properties.put("group.id", UUID.randomUUID().toString());properties.put("enable.auto.commit", "true");properties.put("auto.commit.interval.ms", "1000");properties.put("auto.offset.reset", "earliest");properties.put("session.timeout.ms", "30000");properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");KafkaConsumer kafkaConsumer = new KafkaConsumer<>(properties);kafkaConsumer.subscribe(Arrays.asList("HelloWorld"));while (true) {ConsumerRecords records = kafkaConsumer.poll(100);for (ConsumerRecord record : records) {System.out.printf("offset = %d, value = %s", record.offset(), record.value());System.out.println();}}}
}
本文来自互联网用户投稿,文章观点仅代表作者本人,不代表本站立场,不承担相关法律责任。如若转载,请注明出处。 如若内容造成侵权/违法违规/事实不符,请点击【内容举报】进行投诉反馈!
