This commit is contained in:
snoop 2017-04-14 11:29:55 +09:00
parent fc9a2f79d8
commit cf37b9d0cb
4 changed files with 384 additions and 55 deletions

109
crawler_communicator.go Normal file
View File

@ -0,0 +1,109 @@
package crawler_manager
import (
"path/filepath"
"google.golang.org/grpc"
"log"
g "loafle.com/overflow/crawler_go/grpc"
"context"
"loafle.com/overflow/crawler_go/config"
"encoding/json"
)
func CallAdd() {
conn, err := grpc.Dial(address, grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
}
defer conn.Close()
cc := g.NewConfigClient(conn)
in := &g.Input{}
in.Id = "test_redis_sid"
in.Name = g.Crawlers_HEALTH_REDIS
out, err := cc.Add(context.Background(), in)
if err != nil {
log.Println(err)
}
log.Println(out)
}
func CallInit(c *config.Config, path string) {
port := portMap[c.Crawler.Name]
conn, err := grpc.Dial(address+port, grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
}
defer conn.Close()
cc := g.NewConfigClient(conn)
in := &g.Init{}
in.Path = filepath.Dir(path)+ "/"
in.Name = g.Crawlers(g.Crawlers_value[c.Crawler.Name])
inArr := &g.InputArray{}
inArr.In = append(inArr.In, in)
outInit, errInit := cc.Init(context.Background(), inArr)
if errInit != nil {
log.Println(errInit)
}
log.Println(outInit)
}
func CallRemove() {
conn, err := grpc.Dial(address, grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
}
defer conn.Close()
cc := g.NewConfigClient(conn)
inR := &g.Input{}
inR.Id = "123123"
inR.Name = g.Crawlers_HEALTH_DNS
outRem, errRem := cc.Remove(context.Background(), inR)
if errRem != nil {
log.Println(errRem)
}
log.Println(outRem)
}
func CallGet() {
conn, err := grpc.Dial("localhost:50000", grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
}
defer conn.Close()
c := g.NewDataClient(conn)
in := &g.Input{
Name: g.Crawlers_HEALTH_REDIS,
Id: "test_redis_sid",
}
out, err := c.Get(context.Background(), in)
if err != nil {
log.Fatalf("could not greet: %v", err)
}
var check bool
json.Unmarshal(out.Data, &check)
log.Println(check)
}

View File

@ -1,83 +1,83 @@
package crawler_manager
import (
"google.golang.org/grpc"
"log"
g "loafle.com/overflow/crawler_go/grpc"
"context"
"loafle.com/overflow/crawler_go/config"
"encoding/json"
"io/ioutil"
)
const (
address = "192.168.1.105:50052"
address = "localhost:"
defaultPort = 50001
)
func CallAdd() {
var currentPort = defaultPort
conn, err := grpc.Dial(address, grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
}
defer conn.Close()
var pidMap map[string]int
cc := g.NewConfigClient(conn)
var exeMap map[string]string
in := &g.Input{}
var portMap map[string]string
in.Id = ""
in.Name = g.Crawlers_HEALTH_DNS
func init() {
out, err := cc.Add(context.Background(), in)
if err != nil {
log.Println(err)
}
log.Println(out)
pidMap = make(map[string]int)
exeMap = make(map[string]string)
portMap = make(map[string]string)
//pidMap["HEALTH_REDIS"] = 18281
exeMap[g.Crawlers_HEALTH_REDIS.String()] = "/home/snoop/develop/path/go/src/loafle.com/overflow/tnc";
}
func CallInit() {
func ReadConfig(path string ) *config.Config {
bytes, err := ioutil.ReadFile(path)
conn, err := grpc.Dial(address, grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
return nil
}
defer conn.Close()
cc := g.NewConfigClient(conn)
c := config.Config{}
in := &g.Init{}
json.Unmarshal(bytes, &c)
in.Path = "";
in.Name = g.Crawlers_HEALTH_DNS
return &c
inArr := &g.InputArray{}
inArr.In = append(inArr.In, in)
outInit, errInit := cc.Init(context.Background(), inArr)
if errInit != nil {
log.Println(errInit)
}
log.Println(outInit)
}
func CallRemove() {
func ManageCrawler(path string) {
conn, err := grpc.Dial(address, grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
c := ReadConfig(path)
exePath := exeMap[c.Crawler.Name]
cs := c.Crawler.Name
pid := pidMap[cs]
if pid > 0 {
b := IsAlive(pid)
if b == false {
ExeCrawler(c, exePath)
CallInit(c, path)
} else {
CallAdd()
}
} else {
ExeCrawler(c, exePath)
CallInit(c, path)
}
defer conn.Close()
cc := g.NewConfigClient(conn)
inR := &g.Input{}
inR.Id = "123123"
inR.Name = g.Crawlers_HEALTH_DNS
outRem, errRem := cc.Remove(context.Background(), inR)
if errRem != nil {
log.Println(errRem)
}
log.Println(outRem)
}
}

View File

@ -3,8 +3,19 @@ package crawler_manager
import (
"testing"
//grpc1 "google.golang.org/grpc"
"os"
"os/exec"
"path/filepath"
"strings"
)
func TestManageCrawler(t *testing.T) {
ManageCrawler("/home/snoop/develop/path/go/src/loafle.com/overflow/crawler_manager_go/config/example.json")
}
func TestAdd(t *testing.T) {
@ -13,9 +24,117 @@ func TestAdd(t *testing.T) {
}
func TestCallInit(t *testing.T) {
CallInit()
//CallInit("")
}
func TestCallRemove(t *testing.T) {
CallRemove()
}
}
func TestPid(t *testing.T) {
pp, err := os.FindProcess(12314)
if err != nil{
t.Log("err : ", err)
}
t.Log(pp.Pid)
}
func TestPr(t *testing.T) {
//ps -aux | awk '{print $2}' | grep 15538
bytes, _ := exec.Command("ps","-aux", "awk", "{print $2}").Output()
t.Log(string(bytes))
}
func TestExe(t *testing.T) {
ps := exec.Command("ps", "-aux")
awk := exec.Command("awk", "{print $2}")
grep := exec.Command("grep", "22373")
awk.Stdin, _ = ps.StdoutPipe()
grep.Stdin,_ = awk.StdoutPipe()
ps.Start()
awk.Start()
byt, _ := grep.Output()
t.Log(len(byt))
}
func TestExeState(t *testing.T) {
ps := exec.Command("ps", "-aux")
awk := exec.Command("awk", "{print $2, $7}")
grep := exec.Command("grep", "22373")
awk2 := exec.Command("awk", "{print $2}")
awk.Stdin, _ = ps.StdoutPipe()
grep.Stdin,_ = awk.StdoutPipe()
awk2.Stdin,_ = grep.StdoutPipe()
ps.Start()
awk.Start()
grep.Start()
byt, _ := awk2.Output()
t.Log(len(byt))
t.Log(string(byt))
t.Log(len(strings.TrimSpace(string(byt))))
}
func TestPipe(t *testing.T) {
c1 := exec.Command("ls")
c2 := exec.Command("wc", "-l")
c2.Stdin, _ = c1.StdoutPipe()
//c2.Stdout = os.Stdout
_ = c1.Start()
aa, _ := c2.Output()
t.Log(string(aa))
}
func TestIsAlive(t *testing.T) {
IsAlive(15538)
}
func TestIsState(t *testing.T) {
b := IsState(225931)
t.Log(b)
}
func TestCom(t *testing.T) {
str:= "Z"
if str == "Z" {
t.Log("aaa")
}
}
func TestReadConfig(t *testing.T) {
c := ReadConfig("/home/snoop/develop/path/go/src/loafle.com/overflow/crawler_go/config/example.json")
t.Log(c)
}
func TestDir(t *testing.T) {
a := filepath.Dir("/home/snoop/develop/path/go/src/loafle.com/overflow/crawler_go/config/example.json")
t.Log(a)
}
func TestMyPid(t *testing.T) {
t.Log(os.Getpid())
}

101
crawler_runner.go Normal file
View File

@ -0,0 +1,101 @@
package crawler_manager
import (
"log"
"loafle.com/overflow/crawler_go/config"
"os/exec"
"time"
"strconv"
"strings"
)
//FIXME:: process check!!!!!!
func ExeCrawler(c *config.Config, exePath string) {
log.Println("Run Crawler")
for {
pArg := "-Port=" + strconv.Itoa(currentPort)
cmd := exec.Command(exePath, pArg)
err := cmd.Start()
if err != nil {
log.Println(err)
}
time.Sleep(time.Duration( time.Second * 2))
log.Println("current Pid : " , cmd.Process.Pid)
if IsAlive(cmd.Process.Pid) == false || IsState(cmd.Process.Pid) == false {
log.Println("run fail port:",currentPort)
currentPort++
continue
}
portMap[c.Crawler.Name] = strconv.Itoa(currentPort)
log.Println("current port:", currentPort)
//FIXME::remove
break
}
}
func IsAlive(pid int) bool {
if pid < 0 {
return false
}
ps := exec.Command("ps", "-aux")
awk := exec.Command("awk", "{print $2}")
grep := exec.Command("grep", strconv.Itoa(pid))
awk.Stdin, _ = ps.StdoutPipe()
grep.Stdin,_ = awk.StdoutPipe()
ps.Start()
awk.Start()
byt, _ := grep.Output()
str := strconv.Itoa(pid)
if len(str) +1 == len(byt) {
return true
}
return false
}
func IsState(pid int) bool {
ps := exec.Command("ps", "-aux")
awk := exec.Command("awk", "{print $2, $8}")
grep := exec.Command("grep", strconv.Itoa(pid))
awk2 := exec.Command("awk", "{print $2}")
awk.Stdin, _ = ps.StdoutPipe()
grep.Stdin,_ = awk.StdoutPipe()
awk2.Stdin,_ = grep.StdoutPipe()
ps.Start()
awk.Start()
grep.Start()
byt, _ := awk2.Output()
if len(byt) <= 0 {
return false
}
str := string(byt)
str = strings.TrimSpace(str)
log.Println(str)
idx := strings.Index(str, "Z")
if idx >= 0 {
return false
}
return true
}