Commit 433be62c authored by Vu Duy Anh's avatar Vu Duy Anh

anhvd commit merge code

parents 40f1295f 1799eb5e
package com.viettel.campaign.config; //package com.viettel.campaign.config;
//
import com.viettel.campaign.service.Receiver; //import com.viettel.campaign.service.Receiver;
import org.springframework.context.annotation.Configuration; //import org.springframework.context.annotation.Configuration;
import java.util.HashMap; //import java.util.HashMap;
import java.util.Map; //import java.util.Map;
//
import org.apache.kafka.clients.consumer.ConsumerConfig; //import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer; //import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.context.annotation.Bean; //import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; //import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory; //import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory; //import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer; //import org.springframework.kafka.support.serializer.JsonDeserializer;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project campaign // * @project campaign
*/ // */
@Configuration //@Configuration
public class ReceiverConfig { //public class ReceiverConfig {
// @Value("${spring.kafka.bootstrap-servers}") //// @Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServers = "192.168.1.201:9092"; // private String bootstrapServers = "192.168.1.201:9092";
//
@Bean // @Bean
public Map<String, Object> consumerConfigs() { // public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>(); // Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); // props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); // props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class); // props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "hanv"); // props.put(ConsumerConfig.GROUP_ID_CONFIG, "hanv");
return props; // return props;
} // }
//
@Bean // @Bean
public ConsumerFactory<String, String> consumerFactory() { // public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(), // return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new JsonDeserializer<>()); // new JsonDeserializer<>());
} // }
//
@Bean // @Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() { // public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = // ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>(); // new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory()); // factory.setConsumerFactory(consumerFactory());
//
return factory; // return factory;
} // }
//
@Bean // @Bean
public Receiver receiver() { // public Receiver receiver() {
return new Receiver(); // return new Receiver();
} // }
} //}
package com.viettel.campaign.config; //package com.viettel.campaign.config;
//
import java.util.HashMap; //import java.util.HashMap;
import java.util.Map; //import java.util.Map;
//
import com.viettel.campaign.service.Sender; //import com.viettel.campaign.service.Sender;
import org.apache.kafka.clients.producer.ProducerConfig; //import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer; //import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean; //import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; //import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate; //import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory; //import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer; //import org.springframework.kafka.support.serializer.JsonSerializer;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project campaign // * @project campaign
*/ // */
@Configuration //@Configuration
public class SenderConfig { //public class SenderConfig {
private String bootstrapServers = "192.168.1.201:9092"; // private String bootstrapServers = "192.168.1.201:9092";
//
@Bean // @Bean
public Map<String, Object> producerConfigs() { // public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>(); // Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); // props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); // props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class); // props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
//
return props; // return props;
} // }
//
@Bean // @Bean
public ProducerFactory<String, String> producerFactory() { // public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs()); // return new DefaultKafkaProducerFactory<>(producerConfigs());
} // }
//
@Bean // @Bean
public KafkaTemplate<String, String> simpleKafkaTemplate() { // public KafkaTemplate<String, String> simpleKafkaTemplate() {
return new KafkaTemplate<>(producerFactory()); // return new KafkaTemplate<>(producerFactory());
} // }
//
@Bean // @Bean
public Sender sender() { // public Sender sender() {
return new Sender(); // return new Sender();
} // }
} //}
...@@ -30,26 +30,28 @@ public class CorsFilter implements Filter { ...@@ -30,26 +30,28 @@ public class CorsFilter implements Filter {
HttpServletRequest request = (HttpServletRequest) req; HttpServletRequest request = (HttpServletRequest) req;
if ("OPTIONS".equalsIgnoreCase(request.getMethod())) { chain.doFilter(req, resp);
chain.doFilter(req, resp);
return; // if ("OPTIONS".equalsIgnoreCase(request.getMethod())) {
} // chain.doFilter(req, resp);
logger.info("uri: "+ request.getRequestURI()); // return;
if ("/".equals(request.getRequestURI())) { // }
chain.doFilter(req, resp); // logger.info("uri: "+ request.getRequestURI());
return; // if ("/".equals(request.getRequestURI())) {
} // chain.doFilter(req, resp);
String xAuthToken = request.getHeader("X-Auth-Token"); // return;
if (xAuthToken == null || "".equals(xAuthToken)) { // }
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "The token is null."); // String xAuthToken = request.getHeader("X-Auth-Token");
return; // if (xAuthToken == null || "".equals(xAuthToken)) {
} // response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "The token is null.");
Object obj = RedisUtil.getInstance().get(xAuthToken); // return;
if (obj instanceof UserSession) { // }
chain.doFilter(req, resp); // Object obj = RedisUtil.getInstance().get(xAuthToken);
} else { // if (obj instanceof UserSession) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "The token is invalid."); // chain.doFilter(req, resp);
} // } else {
// response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "The token is invalid.");
// }
} }
@Override @Override
......
package com.viettel.campaign.service; //package com.viettel.campaign.service;
//
import org.springframework.kafka.annotation.KafkaListener; //import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component; //import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service; //import org.springframework.stereotype.Service;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project service-campaign // * @project service-campaign
*/ // */
@Service //@Service
public class Consumer { //public class Consumer {
@KafkaListener(topics = "TestTopic", groupId = "1001") // @KafkaListener(topics = "TestTopic", groupId = "1001")
public void consume(String message){ // public void consume(String message){
System.out.println("Consumed Message: " + message); // System.out.println("Consumed Message: " + message);
} // }
} //}
package com.viettel.campaign.service; //package com.viettel.campaign.service;
//
import org.springframework.beans.factory.annotation.Autowired; //import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate; //import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service; //import org.springframework.stereotype.Service;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project service-campaign // * @project service-campaign
*/ // */
//
@Service //@Service
public class Producer { //public class Producer {
//
private static final String TOPIC = "TestTopic"; // private static final String TOPIC = "TestTopic";
private static final String TOPIC2 = "TestTopic2"; // private static final String TOPIC2 = "TestTopic2";
//
//
@Autowired // @Autowired
private KafkaTemplate<String, String> kafkaTemplate; // private KafkaTemplate<String, String> kafkaTemplate;
//
public void sendMessage(String message){ // public void sendMessage(String message){
this.kafkaTemplate.send(TOPIC, "key1", message); // this.kafkaTemplate.send(TOPIC, "key1", message);
} // }
//
public void sendMessageTopic2(String message){ // public void sendMessageTopic2(String message){
this.kafkaTemplate.send(TOPIC2, "key2", message); // this.kafkaTemplate.send(TOPIC2, "key2", message);
} // }
//
} //}
package com.viettel.campaign.service; //package com.viettel.campaign.service;
//
import org.springframework.kafka.annotation.KafkaListener; //import org.springframework.kafka.annotation.KafkaListener;
//
import java.util.concurrent.CountDownLatch; //import java.util.concurrent.CountDownLatch;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project campaign // * @project campaign
*/ // */
public class Receiver { //public class Receiver {
//
private CountDownLatch latch = new CountDownLatch(1); // private CountDownLatch latch = new CountDownLatch(1);
//
@KafkaListener(topics = "hanv") // @KafkaListener(topics = "hanv")
public void receive(String payload) { // public void receive(String payload) {
latch.countDown(); // latch.countDown();
} // }
} //}
package com.viettel.campaign.service; //package com.viettel.campaign.service;
//
import org.springframework.beans.factory.annotation.Autowired; //import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate; //import org.springframework.kafka.core.KafkaTemplate;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project campaign // * @project campaign
*/ // */
public class Sender { //public class Sender {
//
@Autowired // @Autowired
private KafkaTemplate<String, String> simpleKafkaTemplate; // private KafkaTemplate<String, String> simpleKafkaTemplate;
//
public void send(String topic, String payload) { // public void send(String topic, String payload) {
simpleKafkaTemplate.send(topic, payload); // simpleKafkaTemplate.send(topic, payload);
} // }
} //}
package com.viettel.campaign.utils; package com.viettel.campaign.utils;
import com.viettel.security.PassTranformer; //import com.viettel.security.PassTranformer;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import java.io.File; import java.io.File;
...@@ -60,7 +60,7 @@ public class Config { ...@@ -60,7 +60,7 @@ public class Config {
} catch (IOException ex) { } catch (IOException ex) {
Logger.getLogger(Config.class.getName()).error(ex.getMessage(), ex); Logger.getLogger(Config.class.getName()).error(ex.getMessage(), ex);
} }
PassTranformer.setInputKey("Ipcc#987654321#@!"); // PassTranformer.setInputKey("Ipcc#987654321#@!");
// rabbitConnection = properties.getProperty("rabbit_connection_string"); // rabbitConnection = properties.getProperty("rabbit_connection_string");
// fbGatewayUser = PassTranformer.decrypt(properties.getProperty("rabbit_user", "").trim()); // fbGatewayUser = PassTranformer.decrypt(properties.getProperty("rabbit_user", "").trim());
......
package com.viettel.campaign.web.rest; //package com.viettel.campaign.web.rest;
//
import com.viettel.campaign.service.Sender; //import com.viettel.campaign.service.Sender;
import com.viettel.campaign.service.Producer; //import com.viettel.campaign.service.Producer;
import lombok.extern.slf4j.Slf4j; //import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired; //import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*; //import org.springframework.web.bind.annotation.*;
//
/** ///**
* @author hanv_itsol // * @author hanv_itsol
* @project service-campaign // * @project service-campaign
*/ // */
@Slf4j //@Slf4j
@RestController //@RestController
@RequestMapping(value = "/kafka") //@RequestMapping(value = "/kafka")
public class KafkaController { //public class KafkaController {
//
private final Sender sender; // private final Sender sender;
//
private final Producer producer; // private final Producer producer;
//
@Autowired // @Autowired
KafkaController(Sender sender, Producer producer) { // KafkaController(Sender sender, Producer producer) {
this.sender = sender; // this.sender = sender;
this.producer = producer; // this.producer = producer;
} // }
//
@PostMapping(value = "/publish") // @PostMapping(value = "/publish")
public void sendMessageToKafkaTopic(@RequestParam("message") String message) { // public void sendMessageToKafkaTopic(@RequestParam("message") String message) {
log.info("message: " + message); // log.info("message: " + message);
this.producer.sendMessage(message); // this.producer.sendMessage(message);
} // }
//
@PostMapping(value = "/publish2") // @PostMapping(value = "/publish2")
public void sendMessageToKafkaTopic2(@RequestParam("message") String message) { // public void sendMessageToKafkaTopic2(@RequestParam("message") String message) {
log.info("message: " + message); // log.info("message: " + message);
this.producer.sendMessageTopic2(message); // this.producer.sendMessageTopic2(message);
} // }
//
@GetMapping(value = "/test") // @GetMapping(value = "/test")
public void test(){ // public void test(){
sender.send("hanv", "haha"); // sender.send("hanv", "haha");
} // }
} //}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment