2017-04-13 03:43:39 +00:00
|
|
|
package collector_go
|
|
|
|
|
|
|
|
import (
|
2017-04-14 10:09:51 +00:00
|
|
|
sm "loafle.com/overflow/collector_go/scheduler"
|
2017-04-13 03:43:39 +00:00
|
|
|
"log"
|
|
|
|
)
|
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
type scheduleInfo struct {
|
|
|
|
sensorId string
|
|
|
|
interval string
|
|
|
|
}
|
|
|
|
type schedules []*scheduleInfo
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
type Collector struct {
|
|
|
|
scheduler sm.Scheduler
|
|
|
|
}
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
func (c *Collector) Start() {
|
|
|
|
result := c.genSchedules()
|
|
|
|
if len(result) <= 0 {
|
|
|
|
return
|
2017-04-13 03:43:39 +00:00
|
|
|
}
|
2017-04-14 10:09:51 +00:00
|
|
|
c.scheduler = sm.Scheduler{}
|
|
|
|
c.scheduler.Init()
|
|
|
|
for i := 0; i < len(result); i++ {
|
|
|
|
r := result[i]
|
|
|
|
c.scheduler.NewSchedule(r.sensorId, r.interval, c.collect)
|
|
|
|
}
|
|
|
|
}
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
func (c *Collector) Stop() {
|
|
|
|
c.scheduler.RemoveAllSchedule()
|
|
|
|
}
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
func (c *Collector) AddSensor(container, crawler, id string) {
|
|
|
|
s := c.genSchedule(container, crawler, id)
|
|
|
|
c.scheduler.NewSchedule(s.sensorId, s.interval, c.collect)
|
|
|
|
}
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
func (c *Collector) RemoveSensor() {
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
}
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
func (c *Collector) collect(id string) {
|
|
|
|
log.Println("collect ", id)
|
|
|
|
}
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
func (c *Collector) genSchedules() schedules {
|
|
|
|
ss := make([]*scheduleInfo, 0)
|
|
|
|
s1 := &scheduleInfo{
|
|
|
|
sensorId: "aa",
|
|
|
|
interval: "3",
|
|
|
|
}
|
|
|
|
s2 := &scheduleInfo{
|
|
|
|
sensorId: "bb",
|
|
|
|
interval: "5",
|
|
|
|
}
|
|
|
|
ss = append(ss, s1)
|
|
|
|
ss = append(ss, s2)
|
|
|
|
return ss
|
|
|
|
}
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
func (c *Collector) genSchedule(container, crawler, id string) *scheduleInfo {
|
|
|
|
s := &scheduleInfo{
|
|
|
|
sensorId: "cc",
|
|
|
|
interval: "5",
|
2017-04-13 03:43:39 +00:00
|
|
|
}
|
2017-04-14 10:09:51 +00:00
|
|
|
return s
|
|
|
|
}
|
2017-04-13 03:43:39 +00:00
|
|
|
|
2017-04-14 10:09:51 +00:00
|
|
|
// connection
|
|
|
|
//func CallGet() {
|
|
|
|
//
|
|
|
|
// conn, err := grpc.Dial(address, grpc.WithInsecure())
|
|
|
|
// if err != nil {
|
|
|
|
// log.Fatalf("did not connect: %v", err)
|
|
|
|
// }
|
|
|
|
// defer conn.Close()
|
|
|
|
//
|
|
|
|
//
|
|
|
|
// dc := g.NewDataClient(conn);
|
|
|
|
//
|
|
|
|
// in := &g.Input{}
|
|
|
|
//
|
|
|
|
// in.Id = ""
|
|
|
|
// in.Name = g.Crawlers_HEALTH_DNS
|
|
|
|
//
|
|
|
|
//
|
|
|
|
//
|
|
|
|
// out, err := dc.Get(context.Background(), in) ////
|
|
|
|
//
|
|
|
|
// if err != nil {
|
|
|
|
// log.Println(err)
|
|
|
|
// }
|
|
|
|
// log.Println(out)
|
|
|
|
//
|
|
|
|
//}
|