kafka-sasl消费示范例子

kafka-sasl消费示范例子,第1张

kafka-sasl消费示范例子

kafka-sasl消费示范例子

package cn.cuiot.dmp.rocketmq;

import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;

import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import java.util.Arrays;
import java.util.Properties;


public class AclConsumerMain extends AbstractConsumer {
    //consumer
    private static org.apache.kafka.clients.consumer.KafkaConsumer consumer;


        public static void main(String[] args) {
            System.out.println("enter args quantity error, e.g. " +
                    "java -cp 《jar名》《启动类》《kafka Bootstrap servers地址》《sasl配置文件目录》《groupId》" +
                    "《topic》n");
            String kafkaBootstrapServers = "kafka.cuiot.cn:9093,kafka.cuiot.cn:9193,kafka.cuiot.cn:9293";
            String kafkaSaslConfDir = "C:\data\consumer.conf";
            String groupIdConfig = "";
            String topic = "";
            if (args.length == 7) {
                kafkaBootstrapServers = args[0];
                kafkaSaslConfDir = args[1];
                groupIdConfig = args[2];
                topic = args[3];
            }
            System.setProperty("java.security.auth.login.config", kafkaSaslConfDir);
            Properties properties = new Properties();
            properties.setProperty("bootstrap.servers", kafkaBootstrapServers);
            properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

            properties.setProperty("security.protocol", "SASL_PLAINTEXT");
            properties.setProperty(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256");

            //指定消费者组
            properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupIdConfig);
            properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 30000);
            //设置单次拉取的量,走公网访问时,该参数会有较大影响
            properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 32000);
            properties.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, 32000);
            //每次poll的最大数量
            //注意该值不要改得太大,如果poll太多数据,而不能在下次poll之前消费完,则会触发一次负载均衡,产生卡顿
            properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 30);
            //hostname校验改成空
            properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
            //自动提交offset
//        properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, ConfigUtils.getInstance().getString("enable.auto.commit"));
            //提交offset间隔
//        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, ConfigUtils.getInstance().getString("auto.commit.interval.ms"));
//        properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, ConfigUtils.getInstance().getString("max.poll.records"));
//        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, ConfigUtils.getInstance().getString("session.timeout.ms"));
//        properties.put("transaction.timeout.ms", 1000 * 60 * 5 + "");
            consumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(properties);
            consumer.subscribe(Arrays.asList(topic));

            System.out.println("---------开始Sasl消费---------");

            exeConsumer(consumer);

    }





        public static String sha256_mac(String message, String key) {
            String outPut = null;
            try {
                Mac sha256Hmac = Mac.getInstance("HmacSHA256");
                SecretKeySpec secretKey = new SecretKeySpec(key.getBytes(), "HmacSHA256");
                sha256Hmac.init(secretKey);
                byte[] bytes = sha256Hmac.doFinal(message.getBytes());
                outPut = byteArrayToHexString(bytes);
            } catch (Exception e) {
                System.out.println("Error HmacSHA256========" + e.getMessage());
            }
            return outPut;
        }

        private static String byteArrayToHexString(byte[] b) {
            StringBuilder sb = new StringBuilder();
            String stmp;
            for (int n = 0; b != null && n < b.length; n++) {
                stmp = Integer.toHexString(b[n] & 0xFF);
                if (stmp.length() == 1) {
                    sb.append('0');
                }
                sb.append(stmp);
            }
            return sb.toString().toLowerCase();
        }

//        public static void main(String[] strs) {
//            String consumerGroupId = "";
//            String secretKey = "";
//            System.out.println(sha256_mac(consumerGroupId, secretKey));
//        }


}

consumer.conf

KafkaClient{
org.apache.kafka.common.security.scram.ScramLoginModule required
username="918593438985748480"
password="6a4b5dbc795aa5eb3ad0be248f308c7bba4b230cdcce9aee4a5f62b810abadb0";
};

欢迎分享,转载请注明来源:内存溢出

原文地址: http://outofmemory.cn/zaji/5687592.html

(0)
打赏 微信扫一扫 微信扫一扫 支付宝扫一扫 支付宝扫一扫
上一篇 2022-12-17
下一篇 2022-12-17

发表评论

登录后才能评论

评论列表(0条)

保存