|
|
|
@ -2,14 +2,15 @@ package web
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"net/http"
|
|
|
|
|
"strconv"
|
|
|
|
|
"strings"
|
|
|
|
|
"time"
|
|
|
|
|
|
|
|
|
|
"github.com/gorilla/mux"
|
|
|
|
|
"gopkg.in/mgo.v2"
|
|
|
|
|
"gopkg.in/mgo.v2/bson"
|
|
|
|
|
|
|
|
|
|
"sunteng/cronsun/models"
|
|
|
|
|
"time"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type JobLog struct{}
|
|
|
|
@ -37,41 +38,52 @@ func (jl *JobLog) GetDetail(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (jl *JobLog) GetList(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
// nodes := GetStringArrayFromQuery("nodes", ",", r)
|
|
|
|
|
// names := GetStringArrayFromQuery("names", ",", r)
|
|
|
|
|
// begin := r.FormValue("begin")
|
|
|
|
|
// end := r.FormValue("end")
|
|
|
|
|
|
|
|
|
|
list := []*models.JobLog{
|
|
|
|
|
&models.JobLog{
|
|
|
|
|
Id: bson.NewObjectId(),
|
|
|
|
|
Name: "test1",
|
|
|
|
|
JobId: "test1",
|
|
|
|
|
Node: "192.168.1.2",
|
|
|
|
|
ExitCode: 0,
|
|
|
|
|
BeginTime: time.Now(),
|
|
|
|
|
EndTime: time.Now().Add(time.Duration(time.Minute)),
|
|
|
|
|
},
|
|
|
|
|
&models.JobLog{
|
|
|
|
|
Id: bson.NewObjectId(),
|
|
|
|
|
Name: "test2",
|
|
|
|
|
JobId: "test2",
|
|
|
|
|
Node: "192.168.1.2",
|
|
|
|
|
ExitCode: 1,
|
|
|
|
|
BeginTime: time.Now(),
|
|
|
|
|
EndTime: time.Now().Add(time.Duration(350 * time.Millisecond)),
|
|
|
|
|
},
|
|
|
|
|
&models.JobLog{
|
|
|
|
|
Id: bson.NewObjectId(),
|
|
|
|
|
Name: "test3",
|
|
|
|
|
JobId: "test3",
|
|
|
|
|
Node: "192.168.1.3",
|
|
|
|
|
ExitCode: 0,
|
|
|
|
|
BeginTime: time.Now(),
|
|
|
|
|
EndTime: time.Now().Add(time.Duration(time.Hour * 12)),
|
|
|
|
|
},
|
|
|
|
|
nodes := GetStringArrayFromQuery("nodes", ",", r)
|
|
|
|
|
names := GetStringArrayFromQuery("names", ",", r)
|
|
|
|
|
begin := getTime(r.FormValue("begin"))
|
|
|
|
|
end := getTime(r.FormValue("end"))
|
|
|
|
|
page := getPage(r.FormValue("page"))
|
|
|
|
|
pageSize := getPageSize(r.FormValue("pageSize"))
|
|
|
|
|
sort := "-beginTime"
|
|
|
|
|
if r.FormValue("sort") == "1" {
|
|
|
|
|
sort = "beginTime"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
query := bson.M{}
|
|
|
|
|
if len(nodes) > 0 {
|
|
|
|
|
query["node"] = bson.M{"$in": nodes}
|
|
|
|
|
}
|
|
|
|
|
if len(names) > 0 {
|
|
|
|
|
var search []bson.M
|
|
|
|
|
for _, k := range names {
|
|
|
|
|
k = strings.TrimSpace(k)
|
|
|
|
|
if len(k) == 0 {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
search = append(search, bson.M{"name": bson.M{"$regex": bson.RegEx{Pattern: k, Options: "i"}}})
|
|
|
|
|
}
|
|
|
|
|
query["$or"] = search
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !begin.IsZero() {
|
|
|
|
|
query["beginTime"] = bson.M{"$gte": begin}
|
|
|
|
|
}
|
|
|
|
|
if !end.IsZero() {
|
|
|
|
|
query["endTime"] = bson.M{"$lte": end}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var pager struct {
|
|
|
|
|
Total int `json:"total"`
|
|
|
|
|
List []*models.JobLog `json:"list"`
|
|
|
|
|
}
|
|
|
|
|
var err error
|
|
|
|
|
pager.List, pager.Total, err = models.GetJobLogList(query, page, pageSize, sort)
|
|
|
|
|
if err != nil {
|
|
|
|
|
outJSONError(w, http.StatusInternalServerError, err.Error())
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
outJSON(w, list)
|
|
|
|
|
|
|
|
|
|
outJSON(w, pager)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func GetStringArrayFromQuery(name, sep string, r *http.Request) (arr []string) {
|
|
|
|
@ -82,3 +94,28 @@ func GetStringArrayFromQuery(name, sep string, r *http.Request) (arr []string) {
|
|
|
|
|
|
|
|
|
|
return strings.Split(val, sep)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func getPage(page string) int {
|
|
|
|
|
p, err := strconv.Atoi(page)
|
|
|
|
|
if err != nil || p < 1 {
|
|
|
|
|
p = 1
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return p
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func getPageSize(ps string) int {
|
|
|
|
|
p, err := strconv.Atoi(ps)
|
|
|
|
|
if err != nil || p < 1 {
|
|
|
|
|
p = 50
|
|
|
|
|
} else if p > 200 {
|
|
|
|
|
p = 200
|
|
|
|
|
}
|
|
|
|
|
return p
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func getTime(t string) time.Time {
|
|
|
|
|
t = strings.TrimSpace(t)
|
|
|
|
|
time, _ := time.Parse("2006-01-02", t)
|
|
|
|
|
return time
|
|
|
|
|
}
|
|
|
|
|