fixed by crawler

This commit is contained in:
snoop 2017-11-10 17:33:29 +09:00
parent 0d81aa25a5
commit acce66b50f
2 changed files with 106 additions and 36 deletions

View File

@ -2,12 +2,14 @@ package main
import (
"fmt"
"git.loafle.net/overflow/ssh_crawler/crawler"
"encoding/json"
"io/ioutil"
config "git.loafle.net/overflow/overflow_commons_go/modules/config/model"
"log"
"git.loafle.net/overflow/ssh_crawler/stat"
"git.loafle.net/overflow/ssh_crawler"
)
func main() {
@ -15,6 +17,7 @@ func main() {
data, err := ioutil.ReadFile("./config/test.json")
if err != nil {
log.Fatal(err)
}
@ -24,50 +27,27 @@ func main() {
log.Fatal(err)
}
start(&cc)
}
ssc := ssh_crawler.NewSSHCrawler()
func start(c *config.Config) {
ip := c.Target.Connection.Ip
port := c.Target.Connection.Port
user := c.Target.Auth["id"].(string)
pw := c.Target.Auth["pw"].(string)
keyFilePathObj := c.Target.Auth["keyFilePath"]
var keyFilePath string = ""
if keyFilePathObj != nil {
keyFilePath = keyFilePathObj.(string)
bb, err := ssc.Internal(cc)
if err != nil {
log.Fatal(err)
}
var m map[string]string
cr, err := crawler.New(ip, port, user, pw, keyFilePath)
err = json.Unmarshal(bb, &m)
if err != nil {
fmt.Println(err)
log.Fatal(err)
}
var inter crawler.SSHCrawlerModuler
for _, item := range c.Items {
mode := item.QueryInfo.Extend["mode"].(string)
switch mode {
case "cpu" :
inter = stat.CPUStat{}
break
case "mem" :
inter = stat.MemStat{}
break
default :
continue
}
ch := make(chan interface{})
cr.Process(inter, ch, item)
print(<-ch)
}
log.Println(m)
}
func main11() {
//const ip = "192.168.1.215"

90
ssh_crawler.go Normal file
View File

@ -0,0 +1,90 @@
package ssh_crawler
import (
"encoding/json"
config "git.loafle.net/overflow/overflow_commons_go/modules/config/model"
"fmt"
"git.loafle.net/overflow/ssh_crawler/stat"
crawler "git.loafle.net/overflow/ssh_crawler/crawler"
"reflect"
rpcCrawler "git.loafle.net/overflow/crawler_go"
)
type SSHCrawler struct {
rpcCrawler.CrawlerImpl
}
func (r *SSHCrawler) Internal(params config.Config) ([]byte, error) {
b, err := r.start(&params)
if err != nil {
return nil, err
}
return json.Marshal(b)
}
func NewSSHCrawler() *SSHCrawler {
ad := &SSHCrawler{}
return ad
}
func (r *SSHCrawler) start(c *config.Config) (*map[string]string, error){
ip := c.Target.Connection.Ip
port := c.Target.Connection.Port
user := c.Target.Auth["id"].(string)
pw := c.Target.Auth["pw"].(string)
keyFilePathObj := c.Target.Auth["keyFilePath"]
var keyFilePath string = ""
if keyFilePathObj != nil {
keyFilePath = keyFilePathObj.(string)
}
cr, err := crawler.New(ip, port, user, pw, keyFilePath)
if err != nil {
fmt.Println(err)
}
var inter crawler.SSHCrawlerModuler
var resultMap map[string]string = make(map[string]string)
var tempMap map[string]string = nil
var temp interface{} = nil
for _, item := range c.Items {
mode := item.QueryInfo.Extend["mode"].(string)
switch mode {
case "cpu" :
inter = stat.CPUStat{}
break
case "mem" :
inter = stat.MemStat{}
break
default :
continue
}
ch := make(chan interface{})
cr.Process(inter, ch, item)
temp = <-ch
if reflect.TypeOf(temp).String() == "map[string]string" {
tempMap = temp.(map[string]string)
for k, v := range tempMap {
resultMap[k] = v
}
} else {
var errr error = temp.(error)
return nil, errr
}
}
return &resultMap, nil
}