Commit 2a449751 authored by Nguyen Ha's avatar Nguyen Ha

add Redis

parent 44b8e017
package com.viettel.campaign.config; //package com.viettel.campaign.config;
//
import com.viettel.campaign.service.Receiver; //import com.viettel.campaign.service.Receiver;
import org.springframework.context.annotation.Configuration; //import org.springframework.context.annotation.Configuration;
import java.util.HashMap; //import java.util.HashMap;
import java.util.Map; //import java.util.Map;
//
import org.apache.kafka.clients.consumer.ConsumerConfig; //import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer; //import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.context.annotation.Bean; //import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; //import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory; //import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory; //import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer; //import org.springframework.kafka.support.serializer.JsonDeserializer;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project campaign // * @project campaign
*/ // */
@Configuration //@Configuration
public class ReceiverConfig { //public class ReceiverConfig {
// @Value("${spring.kafka.bootstrap-servers}") //// @Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServers = "192.168.1.201:9092"; // private String bootstrapServers = "192.168.1.201:9092";
//
@Bean // @Bean
public Map<String, Object> consumerConfigs() { // public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>(); // Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); // props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); // props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class); // props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "hanv"); // props.put(ConsumerConfig.GROUP_ID_CONFIG, "hanv");
return props; // return props;
} // }
//
@Bean // @Bean
public ConsumerFactory<String, String> consumerFactory() { // public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(), // return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new JsonDeserializer<>()); // new JsonDeserializer<>());
} // }
//
@Bean // @Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() { // public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = // ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>(); // new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory()); // factory.setConsumerFactory(consumerFactory());
//
return factory; // return factory;
} // }
//
@Bean // @Bean
public Receiver receiver() { // public Receiver receiver() {
return new Receiver(); // return new Receiver();
} // }
} //}
package com.viettel.campaign.config; //package com.viettel.campaign.config;
//
import java.util.HashMap; //import java.util.HashMap;
import java.util.Map; //import java.util.Map;
//
import com.viettel.campaign.service.Sender; //import com.viettel.campaign.service.Sender;
import org.apache.kafka.clients.producer.ProducerConfig; //import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer; //import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean; //import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; //import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate; //import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory; //import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer; //import org.springframework.kafka.support.serializer.JsonSerializer;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project campaign // * @project campaign
*/ // */
@Configuration //@Configuration
public class SenderConfig { //public class SenderConfig {
private String bootstrapServers = "192.168.1.201:9092"; // private String bootstrapServers = "192.168.1.201:9092";
//
@Bean // @Bean
public Map<String, Object> producerConfigs() { // public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>(); // Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); // props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); // props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class); // props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
//
return props; // return props;
} // }
//
@Bean // @Bean
public ProducerFactory<String, String> producerFactory() { // public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs()); // return new DefaultKafkaProducerFactory<>(producerConfigs());
} // }
//
@Bean // @Bean
public KafkaTemplate<String, String> simpleKafkaTemplate() { // public KafkaTemplate<String, String> simpleKafkaTemplate() {
return new KafkaTemplate<>(producerFactory()); // return new KafkaTemplate<>(producerFactory());
} // }
//
@Bean // @Bean
public Sender sender() { // public Sender sender() {
return new Sender(); // return new Sender();
} // }
} //}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment