import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.io.IOException;
import java.util.Properties;
public class Demo5 {
public static void main(String[] args) throws IOException {
Properties properties = new Properties();
properties.load(Demo5.class.getClassLoader().getResourceAsStream("producer.properties"));
KafkaProducer producer = new KafkaProducer<>(properties);
//创建要发送的数据(叫做生产的记录producerecord)
ProducerRecord
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.io.IOException;
import java.util.Arrays;
import java.util.Properties;
public class Demo6 {
public static void main(String[] args) throws IOException {
Properties properties = new Properties();
properties.load(Demo6.class.getClassLoader().getResourceAsStream("consumer.properties"));
KafkaConsumer consumer = new KafkaConsumer<>(properties);
//消费者要想消费,需要在好到那个主题,也就是先订阅(subscribe)那个主题
consumer.subscribe(Arrays.asList("hadoop"));
//消费者开始消费数据
while (true){
ConsumerRecords records = consumer.poll(1000);//获取数据
//数据不止一个所以需要遍历
for (ConsumerRecord record : records) {//这个遍历的类型是ConsumerRecord,不要弄错了
System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
}
}
}
}