ResultSet 처리

This commit is contained in:
jackdaw@loafle.com 2017-04-25 19:48:49 +09:00
parent f0b9602f6d
commit 2198e40515
3 changed files with 62 additions and 34 deletions

View File

@ -4,12 +4,10 @@ import com.loafle.overflow.crawler.Crawler;
import com.loafle.overflow.crawler.config.Config;
import com.loafle.overflow.crawler.config.Item;
import com.loafle.overflow.crawler.config.Query;
import com.loafle.overflow.crawler.result.OFResultSet;
import redis.clients.jedis.Jedis;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
@ -30,7 +28,7 @@ public class RedisCralwer extends Crawler {
return collectMetric(c);
}
protected Map<String, Object> collectMetric(Config c) {
protected Object collectMetric(Config c) {
String targetIP = c.getTarget().getConnection().getIp();
int targetPort = Integer.valueOf(c.getTarget().getConnection().getPort());
@ -39,7 +37,6 @@ public class RedisCralwer extends Crawler {
Jedis jedis = null;
List<Item> items = c.getItems();
Map<String,Object> returnMap = new HashMap<>();
try {
jedis = new Jedis(targetIP, targetPort,false);
@ -50,20 +47,23 @@ public class RedisCralwer extends Crawler {
}
}
//Map<String, Object> rr = (Map<String, Object>) parseToMap(jedis.info());
List<OFResultSet> resultSetList = new ArrayList<>();
for (Item item : items) {
OFResultSet resultSet = OFResultSet.newInstance(item);
List<String> metrics = item.getMetrics();
Query query = item.getQuery();
String queryString = (String) query.getQueryInfo().get("query");
String info = jedis.info(queryString);
Map<String, String> resultMap = (Map<String, String>) parseToMap(info).get(queryString);
for (Query query : item.getQueries()) {
String info = jedis.info(query.getQuery());
Map<String, String> resultMap = (Map<String, String>) parseToMap(info).get(query.getQuery());
for (int index =0 ; index < query.getKeys().size() ; ++index) {
returnMap.put(metrics.get(index),resultMap.get(query.getKeys().get(index)));
}
}
resultSet.addRow(parse(resultMap,resultSet.getMeta()));
resultSetList.add(resultSet);
}
return resultSetList;
} catch (Exception e) {
throw e;
} finally {
@ -72,7 +72,17 @@ public class RedisCralwer extends Crawler {
}
}
return returnMap;
}
private List<String> parse(Map<String, String> dataMap, Map<String, Integer> meta) {
List<String> row = new ArrayList<>(Arrays.asList(new String[meta.size()]));
for (Map.Entry<String, Integer> info : meta.entrySet()) {
String data = dataMap.get(info.getKey());
data = data.trim();
row.set(info.getValue().intValue(),data);
}
return row;
}
public Map<String, Object> parseToMap(String source) {

View File

@ -2,11 +2,14 @@ package com.loafle.overflow.crawler.redis;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.loafle.overflow.crawler.config.Config;
import com.loafle.overflow.crawler.result.OFResultSet;
import org.junit.Ignore;
import org.junit.Test;
import redis.clients.jedis.Jedis;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
@ -25,10 +28,22 @@ public class RedisCralwerTest {
Config c = mapper.readValue(new File(path),Config.class);
RedisCralwer rc = new RedisCralwer();
Map<String,Object> data = (Map<String, Object>) rc.getInternal(c);
assertEquals(data.size(),7);
List<OFResultSet> result = (List<OFResultSet>) rc.getInternal(c);
print("",result);
}
private void print(String s, List<OFResultSet> mm) {
System.out.println(s);
Map<String,String> m = new HashMap<>();
for(OFResultSet r : mm) {
m.putAll(r.getData());
}
for (Map.Entry<String, String> item : m.entrySet()) {
System.out.println("key=" + item.getKey() + " ||||||||||| value=" + item.getValue());
}
}
}

View File

@ -26,9 +26,10 @@
"cpu.usage.system_children",
"cpu.usage.user_children"
],
"queries":[
{
"query" : "CPU" ,
"query": {
"queryInfo" : {
"query" : "CPU"
},
"keys" : [
"used_cpu_sys",
"used_cpu_user",
@ -36,7 +37,6 @@
"used_cpu_user_children"
]
}
]
},
{
"metrics" : [
@ -44,16 +44,19 @@
"memory.usage.rss",
"memory.usage.reak"
],
"queries":[
{
"query" : "Memory" ,
"keys" : [
"used_memory",
"used_memory_rss",
"used_memory_peak"
]
}
]
"query":
{
"queryInfo" : {
"query" : "Memory"
},
"keys" : [
"used_memory",
"used_memory_rss",
"used_memory_peak"
]
}
}
]
}