Commit 2a449751 authored by Nguyen Ha's avatar Nguyen Ha

add Redis

parent 44b8e017
package com.viettel.campaign.config;
import com.viettel.campaign.service.Receiver;
import org.springframework.context.annotation.Configuration;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer;
/**
* @author hanv_itsol
* @project campaign
*/
@Configuration
public class ReceiverConfig {
// @Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServers = "192.168.1.201:9092";
@Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "hanv");
return props;
}
@Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new JsonDeserializer<>());
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
@Bean
public Receiver receiver() {
return new Receiver();
}
}
//package com.viettel.campaign.config;
//
//import com.viettel.campaign.service.Receiver;
//import org.springframework.context.annotation.Configuration;
//import java.util.HashMap;
//import java.util.Map;
//
//import org.apache.kafka.clients.consumer.ConsumerConfig;
//import org.apache.kafka.common.serialization.StringDeserializer;
//import org.springframework.context.annotation.Bean;
//import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
//import org.springframework.kafka.core.ConsumerFactory;
//import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
//import org.springframework.kafka.support.serializer.JsonDeserializer;
//
///**
// * @author hanv_itsol
// * @project campaign
// */
//@Configuration
//public class ReceiverConfig {
//// @Value("${spring.kafka.bootstrap-servers}")
// private String bootstrapServers = "192.168.1.201:9092";
//
// @Bean
// public Map<String, Object> consumerConfigs() {
// Map<String, Object> props = new HashMap<>();
// props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
// props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
// props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
// props.put(ConsumerConfig.GROUP_ID_CONFIG, "hanv");
// return props;
// }
//
// @Bean
// public ConsumerFactory<String, String> consumerFactory() {
// return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
// new JsonDeserializer<>());
// }
//
// @Bean
// public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
// ConcurrentKafkaListenerContainerFactory<String, String> factory =
// new ConcurrentKafkaListenerContainerFactory<>();
// factory.setConsumerFactory(consumerFactory());
//
// return factory;
// }
//
// @Bean
// public Receiver receiver() {
// return new Receiver();
// }
//}
package com.viettel.campaign.config;
import java.util.HashMap;
import java.util.Map;
import com.viettel.campaign.service.Sender;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
/**
* @author hanv_itsol
* @project campaign
*/
@Configuration
public class SenderConfig {
private String bootstrapServers = "192.168.1.201:9092";
@Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
return props;
}
@Bean
public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
@Bean
public KafkaTemplate<String, String> simpleKafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
@Bean
public Sender sender() {
return new Sender();
}
}
//package com.viettel.campaign.config;
//
//import java.util.HashMap;
//import java.util.Map;
//
//import com.viettel.campaign.service.Sender;
//import org.apache.kafka.clients.producer.ProducerConfig;
//import org.apache.kafka.common.serialization.StringSerializer;
//import org.springframework.context.annotation.Bean;
//import org.springframework.context.annotation.Configuration;
//import org.springframework.kafka.core.DefaultKafkaProducerFactory;
//import org.springframework.kafka.core.KafkaTemplate;
//import org.springframework.kafka.core.ProducerFactory;
//import org.springframework.kafka.support.serializer.JsonSerializer;
//
///**
// * @author hanv_itsol
// * @project campaign
// */
//@Configuration
//public class SenderConfig {
// private String bootstrapServers = "192.168.1.201:9092";
//
// @Bean
// public Map<String, Object> producerConfigs() {
// Map<String, Object> props = new HashMap<>();
// props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
// props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
// props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
//
// return props;
// }
//
// @Bean
// public ProducerFactory<String, String> producerFactory() {
// return new DefaultKafkaProducerFactory<>(producerConfigs());
// }
//
// @Bean
// public KafkaTemplate<String, String> simpleKafkaTemplate() {
// return new KafkaTemplate<>(producerFactory());
// }
//
// @Bean
// public Sender sender() {
// return new Sender();
// }
//}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment