修改:kafka 订阅实现修改

This commit is contained in:
lxy 2020-09-07 09:51:01 +08:00
parent d45b39a309
commit f848c57e39
10 changed files with 201 additions and 127 deletions

View File

@ -0,0 +1,43 @@
package com.zdemo;
import org.redkale.convert.json.JsonConvert;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* @author Liang
* @data 2020-09-05 23:18
*/
public abstract class AbstractConsumer implements IConsumer {
public final Map<String, EventType> eventMap = new HashMap<>();
public void addEventType(EventType... eventType) {
for (EventType type : eventType) {
eventMap.put(type.topic, type);
}
}
@Override
public final Collection<String> getSubscribes() {
return eventMap.keySet();
}
@Override
public final void accept(String topic, String value) {
EventType eventType = eventMap.get(topic);
Object data = null;
if ("java.lang.String".equals(eventType.typeToken.getType().toString())) {
data = value;
} else {
data = JsonConvert.root().convertFrom(eventType.typeToken.getType(), value);
}
eventType.accept(data);
}
}

View File

@ -6,40 +6,18 @@ package com.zdemo;
* @param <V> * @param <V>
*/ */
public class Event<V> { public class Event<V> {
private String topic; public final String topic;
private String key; //public final String key;
private V value; public final V value;
public Event() { private Event(String topic, V value) {
}
public Event(String topic, String key, V value) {
this.topic = topic; this.topic = topic;
this.key = key;
this.value = value; this.value = value;
} }
public String getTopic() { public static <V> Event of(String topic, V value) {
return topic; return new Event<V>(topic, value);
} }
public void setTopic(String topic) {
this.topic = topic;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public V getValue() {
return value;
}
public void setValue(V value) {
this.value = value;
}
} }

View File

@ -0,0 +1,25 @@
package com.zdemo;
import org.redkale.util.TypeToken;
import java.util.function.Consumer;
public class EventType<T> {
public final String topic;
public final TypeToken<T> typeToken;
private final Consumer<T> consumer;
private EventType(String topic, TypeToken<T> typeToken, Consumer<T> consumer) {
this.topic = topic;
this.typeToken = typeToken;
this.consumer = consumer;
}
public static <T> EventType of(String topic, TypeToken<T> typeToken, Consumer<T> consumer) {
return new EventType<>(topic, typeToken, consumer);
}
public void accept(T t) {
consumer.accept(t);
}
}

View File

@ -1,8 +1,5 @@
package com.zdemo; package com.zdemo;
import org.redkale.convert.json.JsonConvert;
import org.redkale.util.TypeToken;
import java.util.Collection; import java.util.Collection;
import java.util.logging.Logger; import java.util.logging.Logger;
@ -11,24 +8,5 @@ public interface IConsumer<T extends Event> {
Collection<String> getSubscribes(); Collection<String> getSubscribes();
TypeToken<T> getTypeToken(); <T> void accept(String topic, String record);
void accept(T t);
default void accept(String value) {
System.out.println(value);
if ("com.zdemo.Event<java.lang.String>".equals(getTypeToken().getType().toString())) {
String _value = value.split("\"value\":")[1];
_value = _value.substring(0, _value.length() - 1);
Event t = JsonConvert.root().convertFrom(getTypeToken().getType(), value.replace(_value, ""));
if (_value.startsWith("\"") && _value.endsWith("\"")) {
_value = _value.substring(1, _value.length() -1);
}
t.setValue(_value);
accept((T) t);
} else {
Event t = JsonConvert.root().convertFrom(getTypeToken().getType(), value);
accept((T) t);
}
}
} }

View File

@ -1,10 +1,11 @@
package com.zdemo.kafak; package com.zdemo.kafak;
import com.zdemo.Event; import com.zdemo.AbstractConsumer;
import com.zdemo.EventType;
import com.zdemo.IConsumer; import com.zdemo.IConsumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;
import org.redkale.net.http.RestService; import org.redkale.net.http.RestService;
import org.redkale.service.Service; import org.redkale.service.Service;
import org.redkale.util.AnyValue; import org.redkale.util.AnyValue;
@ -16,48 +17,86 @@ import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.time.Duration; import java.time.Duration;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Level;
import static java.util.Arrays.asList;
/** /**
* 消费 * 消费
*
* @param <T>
*/ */
@RestService @RestService
public abstract class KafakConsumer<T extends Event> implements IConsumer<T>, Service { public abstract class KafakConsumer extends AbstractConsumer implements IConsumer, Service {
@Resource(name = "APP_HOME") @Resource(name = "APP_HOME")
protected File APP_HOME; protected File APP_HOME;
protected Properties props;
// 0:none 1:restart -1:stop
//private int cmd = -1;
public abstract String getGroupid(); public abstract String getGroupid();
@Override private final LinkedBlockingQueue<EventType> queue = new LinkedBlockingQueue<>();
public void init(AnyValue config) {
new Thread(() -> {
try (FileInputStream fis = new FileInputStream(new File(APP_HOME, "conf/kafak.properties"));) {
Properties props = new Properties();
props.load(fis);
props.put("group.id", getGroupid());
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(getSubscribes());
while (true) { @Override
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100)); public void addEventType(EventType... eventTypes) {
for (ConsumerRecord<String, String> record : records) { super.addEventType(eventTypes);
String value = record.value();
try { try {
accept(value); for (EventType eventType : eventTypes) {
} catch (Exception e) { queue.put(eventType);
logger.warning("event accept error :" + value); }
} catch (InterruptedException e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
@Override
public void init(AnyValue config) {
try (FileInputStream fis = new FileInputStream(new File(APP_HOME, "conf/kafak.properties"));) {
props = new Properties();
props.load(fis);
if (logger.isLoggable(Level.INFO)) logger.info(getGroupid() + " consumer started!");
new Thread(() -> {
try {
props.put("group.id", getGroupid());
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(asList("_"));
while (true) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(10_000));
records.forEach(record -> {
String topic = record.topic();
long offset = record.offset();
String value = record.value();
try {
accept(topic, value);
} catch (Exception e) {
logger.warning(String.format("topic[%s] event accept error, offset=%s,value:%s", topic, offset, value));
e.printStackTrace();
}
});
// 动态新增订阅
while (!queue.isEmpty()) {
queue.clear();
consumer.unsubscribe();
consumer.subscribe(getSubscribes());
}
}
} catch (WakeupException ex) {
System.out.println("WakeupException !!!!");
} }
}, "thread-consumer-[" + getGroupid() + "]").start();
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {
e.printStackTrace(); e.printStackTrace();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} }
}).start();
} }
} }

View File

@ -50,7 +50,7 @@ public class KafakProducer<T extends Event> implements IProducer<T>, Service {
@Override @Override
public void send(T... t) { public void send(T... t) {
for (T x : t) { for (T x : t) {
producer.send(new ProducerRecord(x.getTopic(), JsonConvert.root().convertTo(x))); producer.send(new ProducerRecord(x.topic, JsonConvert.root().convertTo(x.value)));
} }
} }

View File

@ -1,6 +1,6 @@
package com.zdemo.redis; package com.zdemo.redis;
import com.zdemo.Event; import com.zdemo.AbstractConsumer;
import com.zdemo.IConsumer; import com.zdemo.IConsumer;
import org.redkale.service.Service; import org.redkale.service.Service;
import org.redkale.util.AnyValue; import org.redkale.util.AnyValue;
@ -12,7 +12,7 @@ import java.io.OutputStreamWriter;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.Socket; import java.net.Socket;
public abstract class RedisConsumer<T extends Event> implements IConsumer<T>, Service { public abstract class RedisConsumer extends AbstractConsumer implements IConsumer, Service {
@Resource(name = "property.redis.host") @Resource(name = "property.redis.host")
private String host = "127.0.0.1"; private String host = "127.0.0.1";
@ -21,10 +21,6 @@ public abstract class RedisConsumer<T extends Event> implements IConsumer<T>, Se
@Resource(name = "property.redis.port") @Resource(name = "property.redis.port")
private int port = 6379; private int port = 6379;
public String getGroupid() {
return "";
}
@Override @Override
public void init(AnyValue config) { public void init(AnyValue config) {
new Thread(() -> { new Thread(() -> {
@ -61,9 +57,9 @@ public abstract class RedisConsumer<T extends Event> implements IConsumer<T>, Se
br.readLine(); //$n len(value) br.readLine(); //$n len(value)
String value = br.readLine(); // value String value = br.readLine(); // value
try { try {
accept(value); accept(topic, value);
} catch (Exception e) { } catch (Exception e) {
logger.warning("event accept error :" + value); logger.warning("topic[" + topic + "] event accept error :" + value);
e.printStackTrace(); e.printStackTrace();
} }
} }

View File

@ -42,7 +42,7 @@ public class RedisProducer<T extends Event> implements IProducer<T>, Service {
public void send(T... t) { public void send(T... t) {
for (T x : t) { for (T x : t) {
try { try {
oswPub.write("PUBLISH " + x.getTopic() + " '" + JsonConvert.root().convertTo(x) + "' \r\n"); oswPub.write("PUBLISH " + x.topic + " '" + JsonConvert.root().convertTo(x.value) + "' \r\n");
oswPub.flush(); oswPub.flush();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();

View File

@ -1,9 +1,17 @@
package com.zdemo.test; package com.zdemo.test;
import com.zdemo.Event; import com.zdemo.Event;
import com.zdemo.redis.RedisProducer; import com.zdemo.EventType;
import com.zdemo.kafak.KafakProducer;
import org.junit.Test; import org.junit.Test;
import org.redkale.boot.Application; import org.redkale.boot.Application;
import org.redkale.convert.json.JsonConvert;
import org.redkale.util.TypeToken;
import java.util.List;
import java.util.Map;
import static java.util.Arrays.asList;
/** /**
* 消息发布订阅测试 * 消息发布订阅测试
@ -14,13 +22,32 @@ public class AppTest {
public void runConsumer() { public void runConsumer() {
try { try {
//启动并开启消费监听 //启动并开启消费监听
Application.singleton(MyConsumer.class); MyConsumer consumer = Application.singleton(MyConsumer.class);
try { //新增订阅主题 a1
Thread.sleep(15_000); consumer.addEventType(EventType.of("a1", new TypeToken<Float>() {
} catch (InterruptedException e) { }, r -> {
e.printStackTrace(); System.out.println("我收到了消息 主题A 事件:" + JsonConvert.root().convertTo(r));
} }));
Thread.sleep(5_000);
//新增订阅主题 b1c1
consumer.addEventType(
// 订阅主题 b1
EventType.of("b1", new TypeToken<Map<String, String>>() {
}, r -> {
System.out.println("我收到了消息 主题B 事件:" + JsonConvert.root().convertTo(r));
}),
// 订阅主题 c1
EventType.of("c1", new TypeToken<List<Integer>>() {
}, r -> {
System.out.println("我收到了消息 主题C 事件:" + JsonConvert.root().convertTo(r));
})
);
Thread.sleep(60_000);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
@ -29,14 +56,17 @@ public class AppTest {
@Test @Test
public void runProducer() { public void runProducer() {
try { try {
RedisProducer producer = Application.singleton(RedisProducer.class); KafakProducer producer = Application.singleton(KafakProducer.class);
Event event = new Event<>(); // 发送不同的 事件
event.setTopic("c"); float v0 = 1f;
event.setKey("abx"); Map v1 = Map.of("k", "v");
event.setValue(1f); List v2 = asList(1, 2, 3);
producer.send(Event.of("a1", v0));
producer.send(Event.of("b1", v1));
producer.send(Event.of("c1", v2));
producer.send(event);
try { try {
Thread.sleep(1_000); Thread.sleep(1_000);

View File

@ -1,37 +1,22 @@
package com.zdemo.test; package com.zdemo.test;
import com.zdemo.Event; import com.zdemo.EventType;
import com.zdemo.kafak.KafakConsumer; import com.zdemo.kafak.KafakConsumer;
import org.redkale.convert.json.JsonConvert; import org.redkale.convert.json.JsonConvert;
import org.redkale.util.TypeToken; import org.redkale.util.TypeToken;
import java.util.Collection; public class MyConsumer extends KafakConsumer {
import java.util.List;
public class MyConsumer extends KafakConsumer<Event<String>> {
public String getGroupid() { public String getGroupid() {
return "group-test"; //questuserimlive return "group-test"; //消费组名称
}
@Override
public Collection<String> getSubscribes() {
return List.of("a", "b", "c", "vis-log");
}
@Override
public TypeToken<Event<String>> getTypeToken() {
return new TypeToken<Event<String>>() {
};
}
@Override
public void accept(Event<String> event) {
switch (event.getTopic()) {
case "a" -> System.out.println("我收到了消息 主题A 事件:" + JsonConvert.root().convertTo(event));
case "b" -> System.out.println("我收到了消息 主题B 事件:" + JsonConvert.root().convertTo(event));
case "c" -> System.out.println("我收到了消息 主题C 事件:" + JsonConvert.root().convertTo(event));
} }
{
addEventType(
EventType.of("a1", new TypeToken<Float>() {
}, r -> {
System.out.println("我收到了消息 主题A 事件:" + JsonConvert.root().convertTo(r));
})
);
} }
} }