修改:日志输出

This commit is contained in:
lxy 2020-09-28 20:28:29 +08:00
parent 4b3849b66e
commit 459b31e750
5 changed files with 19 additions and 49 deletions

View File

@ -13,7 +13,6 @@ import org.redkale.util.AnyValue;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.time.Duration; import java.time.Duration;
import java.util.Properties; import java.util.Properties;
@ -48,7 +47,7 @@ public abstract class KafakConsumer extends AbstractConsumer implements IConsume
queue.put(eventType); queue.put(eventType);
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
e.printStackTrace(); logger.log(Level.WARNING, "", e);
} }
} }
@ -69,7 +68,7 @@ public abstract class KafakConsumer extends AbstractConsumer implements IConsume
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(asList("_")); consumer.subscribe(asList("_"));
while (true) { while (true) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(10_000)); ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1_000));
records.forEach(record -> { records.forEach(record -> {
String topic = record.topic(); String topic = record.topic();
long offset = record.offset(); long offset = record.offset();
@ -77,8 +76,7 @@ public abstract class KafakConsumer extends AbstractConsumer implements IConsume
try { try {
accept(topic, value); accept(topic, value);
} catch (Exception e) { } catch (Exception e) {
logger.warning(String.format("topic[%s] event accept error, offset=%s,value:%s", topic, offset, value)); logger.log(Level.WARNING, String.format("topic[%s] event accept error, offset=%s,value:%s", topic, offset, value), e);
e.printStackTrace();
} }
}); });
@ -94,10 +92,8 @@ public abstract class KafakConsumer extends AbstractConsumer implements IConsume
} }
}, "thread-consumer-[" + getGroupid() + "]").start(); }, "thread-consumer-[" + getGroupid() + "]").start();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); logger.log(Level.WARNING, "", e);
} }
} }

View File

@ -12,9 +12,9 @@ import org.redkale.util.AnyValue;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.Properties; import java.util.Properties;
import java.util.logging.Level;
/** /**
* 生产 * 生产
@ -31,26 +31,23 @@ public class KafakProducer<T extends Event> implements IProducer<T>, Service {
@Override @Override
public void init(AnyValue config) { public void init(AnyValue config) {
File file = new File(APP_HOME, "conf/kafak.properties"); File file = new File(APP_HOME, "conf/kafak.properties");
if (!file.exists()) {
logger.warning(String.format("------\n%s (系统找不到指定的文件。)\n未初始化kafak 生产者kafak发布消息不可用\n------", file.getPath()));
return;
}
try (FileInputStream fis = new FileInputStream(file)) { try (FileInputStream fis = new FileInputStream(file)) {
Properties props = new Properties(); Properties props = new Properties();
props.load(fis); props.load(fis);
producer = new KafkaProducer(props); producer = new KafkaProducer(props);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); logger.log(Level.WARNING, "未初始化kafak 生产者kafak发布消息不可用", e);
} }
} }
@Override @Override
public void send(T... t) { public void send(T... t) {
for (T x : t) { for (T x : t) {
producer.send(new ProducerRecord(x.topic, JsonConvert.root().convertTo(x.value))); String v = JsonConvert.root().convertTo(x.value);
if (v.startsWith("\"") && v.endsWith("\"")) {
v = v.substring(1, v.length() - 1);
}
producer.send(new ProducerRecord(x.topic, v));
} }
} }

View File

@ -11,6 +11,7 @@ import java.io.InputStreamReader;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.Socket; import java.net.Socket;
import java.util.logging.Level;
public abstract class RedisConsumer extends AbstractConsumer implements IConsumer, Service { public abstract class RedisConsumer extends AbstractConsumer implements IConsumer, Service {
@ -59,13 +60,12 @@ public abstract class RedisConsumer extends AbstractConsumer implements IConsume
try { try {
accept(topic, value); accept(topic, value);
} catch (Exception e) { } catch (Exception e) {
logger.warning("topic[" + topic + "] event accept error :" + value); logger.log(Level.WARNING, "topic[" + topic + "] event accept error :" + value, e);
e.printStackTrace();
} }
} }
} }
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.log(Level.WARNING, "Redis Consumer 初始化失败!", e);
} }
}).start(); }).start();
} }

View File

@ -11,6 +11,7 @@ import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.Socket; import java.net.Socket;
import java.util.logging.Level;
public class RedisProducer<T extends Event> implements IProducer<T>, Service { public class RedisProducer<T extends Event> implements IProducer<T>, Service {
@ -34,7 +35,7 @@ public class RedisProducer<T extends Event> implements IProducer<T>, Service {
oswPub.write("AUTH " + password + "\r\n"); oswPub.write("AUTH " + password + "\r\n");
oswPub.flush(); oswPub.flush();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); logger.log(Level.WARNING, "", e);
} }
} }
@ -45,7 +46,7 @@ public class RedisProducer<T extends Event> implements IProducer<T>, Service {
oswPub.write("PUBLISH " + x.topic + " '" + JsonConvert.root().convertTo(x.value) + "' \r\n"); oswPub.write("PUBLISH " + x.topic + " '" + JsonConvert.root().convertTo(x.value) + "' \r\n");
oswPub.flush(); oswPub.flush();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); logger.log(Level.WARNING, "", e);
} }
} }
} }

View File

@ -22,32 +22,8 @@ public class AppTest {
public void runConsumer() { public void runConsumer() {
try { try {
//启动并开启消费监听 //启动并开启消费监听
MyConsumer consumer = Application.singleton(MyConsumer.class); Application.singleton(MyConsumer.class);
Thread.sleep(60_000 * 60);
//新增订阅主题 a1
consumer.addEventType(EventType.of("a1", new TypeToken<Float>() {
}, r -> {
System.out.println("我收到了消息 主题A 事件:" + JsonConvert.root().convertTo(r));
}));
Thread.sleep(5_000);
//新增订阅主题 b1c1
consumer.addEventType(
// 订阅主题 b1
EventType.of("b1", new TypeToken<Map<String, String>>() {
}, r -> {
System.out.println("我收到了消息 主题B 事件:" + JsonConvert.root().convertTo(r));
}),
// 订阅主题 c1
EventType.of("c1", new TypeToken<List<Integer>>() {
}, r -> {
System.out.println("我收到了消息 主题C 事件:" + JsonConvert.root().convertTo(r));
})
);
Thread.sleep(60_000);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }