del: 移除captrace/data2html/crontask程序

This commit is contained in:
TsMask
2025-01-07 11:04:25 +08:00
parent d18404ffa7
commit 40ef92f244
37 changed files with 11 additions and 5107 deletions

8
.vscode/launch.json vendored
View File

@@ -29,14 +29,6 @@
"program": "d:/omc.git/be.ems/sshsvc/sshsvc.go",
"console": "integratedTerminal"
},
{
"name": "debug crontask",
"type": "go",
"request": "launch",
"mode": "debug",
"program": "d:/omc.git/be.ems/crontask",
"console": "integratedTerminal"
},
{
"name": "debug encyaml",
"type": "go",

View File

@@ -1,636 +0,0 @@
package main
import (
"encoding/binary"
"encoding/hex"
"fmt"
"net"
"os"
"os/exec"
"strings"
"sync"
"be.ems/captrace/config"
"be.ems/lib/dborm"
"be.ems/lib/global"
"be.ems/lib/log"
_ "github.com/go-sql-driver/mysql"
"golang.org/x/net/http/httpguts"
"golang.org/x/net/http2/hpack"
)
// goroutine
var limitChan = make(chan bool, 1024)
const (
GTPU_V1_VERSION = 1 << 5
GTPU_VER_MASK = 7 << 5
GTPU_PT_GTP = 1 << 4
GTPU_HEADER_LEN = 12
GTPU_E_S_PB_BIT = 7
GTPU_E_BI = 1 << 2
)
const (
GTPU_HEADER_VERSION_INDEX = 0
GTPU_HEADER_MSG_TYPE_INDEX = 1
GTPU_HEADER_LENGTH_INDEX = 2
GTPU_HEADER_TEID_INDEX = 4
)
type ExtHeader struct {
TaskId uint32
IMSI string
IfType byte
MsgType byte
MsgDirect byte // 0-recv,1-send
TimeStamp int64
SrcIP string
DstIP string
SrcPort uint16
DstPort uint16
Proto int
PPI int // only for SCTP
DataLen uint16
DataInfo []byte
}
type MsgInfo struct {
TaskId uint32
TimeStamp int64
IfType byte
MsgType byte
MsgDirect byte
SrcAddr string // IP:Port
DstAddr string // IP:Port
}
// func (fr *Framer) ReadFrame() (Frame, error)
// ReadMetaHeaders *hpack.Decoder
func validWireHeaderFieldName(v string) bool {
if len(v) == 0 {
return false
}
for _, r := range v {
if !httpguts.IsTokenRune(r) {
return false
}
if 'A' <= r && r <= 'Z' {
return false
}
}
return true
}
// tshark -r gtp.pcap -T json -d tcp.port==8080,http2 -Y "http2"
// -T pdml: Packet Details Markup Language
// -T psml: Packet Summary Markup Language
func execTshark(filename string, proto string) {
pcapPath := filename
tshark := exec.Command("tshark", "-r"+pcapPath,
"-Y"+proto,
"-T", "pdml")
out, err := tshark.CombinedOutput()
if err != nil {
log.Errorf("Failed to exec tshark:", err)
} else {
log.Debug("combined out:", string(out))
}
}
const magicMicroseconds = 0xa1b2c3d4
const versionMajor = 2
const versionMinor = 4
func WriteEmptyPcap(filename string, timestamp int64, length int, data []byte) error {
var cooked = [...]byte{0x00, 0x00, 0x03, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00}
var buf []byte
//24+16+16 = 56
buf = make([]byte, 56+length)
binary.LittleEndian.PutUint32(buf[0:4], magicMicroseconds)
binary.LittleEndian.PutUint16(buf[4:6], versionMajor)
binary.LittleEndian.PutUint16(buf[6:8], versionMinor)
// bytes 8:12 stay 0 (timezone = UTC)
// bytes 12:16 stay 0 (sigfigs is always set to zero, according to
// http://wiki.wireshark.org/Development/LibpcapFileFormat
binary.LittleEndian.PutUint32(buf[16:20], 0x00040000)
binary.LittleEndian.PutUint32(buf[20:24], 0x00000071)
// Packet Header
binary.LittleEndian.PutUint64(buf[24:32], uint64(timestamp))
binary.LittleEndian.PutUint32(buf[32:36], uint32(length+16))
binary.LittleEndian.PutUint32(buf[36:40], uint32(length+16))
copy(buf[40:], cooked[:])
copy(buf[56:], data[:])
err := os.WriteFile(filename, buf[:], 0644)
//log.Debugf("CAP: %v\n", buf)
return err
}
func ngapDataHandle(emsg []byte, timestamp int64, data []byte) int {
filePath := fmt.Sprintf("/tmp/ng%d.pcap", timestamp)
err := WriteEmptyPcap(filePath, timestamp, len(data), data)
if err != nil {
log.Error("Failed to tshark:", err)
return -1
} else {
execTshark(filePath, "ngap")
}
return 0
}
func pfcpDataHandle(emsg []byte, timestamp int64, data []byte) int {
filePath := fmt.Sprintf("/tmp/pf%d.pcap", timestamp)
err := WriteEmptyPcap(filePath, timestamp, len(data), data)
if err != nil {
log.Error("Failed to tshark:", err)
} else {
execTshark(filePath, "pfcp")
}
return 0
}
func httpDataHandle(emsg []byte, timestamp int64, data []byte) int {
filePath := fmt.Sprintf("/tmp/sb%d.pcap", timestamp)
err := WriteEmptyPcap(filePath, timestamp, len(data), data)
if err != nil {
log.Error("Failed to tshark:", err)
} else {
execTshark(filePath, "http2")
}
return 0
}
func httpHeaderDataHandle(emsg []byte, header []byte, data []byte) int {
var remainSize = uint32(16 << 20)
var sawRegular bool
var invalid bool // pseudo header field errors
var Fields []hpack.HeaderField
invalid = false
hdec := hpack.NewDecoder(4096, nil)
hdec.SetEmitEnabled(true)
hdec.SetMaxStringLength(int(16 << 20))
hdec.SetEmitFunc(func(hf hpack.HeaderField) {
if !httpguts.ValidHeaderFieldValue(hf.Value) {
// Don't include the value in the error, because it may be sensitive.
invalid = true
}
isPseudo := strings.HasPrefix(hf.Name, ":")
if isPseudo {
if sawRegular {
invalid = true
}
} else {
sawRegular = true
if !validWireHeaderFieldName(hf.Name) {
invalid = true
}
}
if invalid {
hdec.SetEmitEnabled(false)
return
}
size := hf.Size()
if size > remainSize {
hdec.SetEmitEnabled(false)
//mh.Truncated = true
return
}
remainSize -= size
Fields = append(Fields, hf)
})
// defer hdec.SetEmitFunc(func(hf hpack.HeaderField) {})
frag := header
if _, err := hdec.Write(frag); err != nil {
return -1
}
if err := hdec.Close(); err != nil {
return -1
}
hdec.SetEmitFunc(func(hf hpack.HeaderField) {})
var headers []byte
var line string
for i := range Fields {
line = fmt.Sprintf("\"%s\":\"%s\",", Fields[i].Name, Fields[i].Value)
headers = append(headers, []byte(line)...)
}
if data != nil && len(data) > 0 {
encode := fmt.Sprintf("%s \"content\":%s\n", string(headers), string(data))
emsg = append(emsg, []byte(encode)...)
log.Debug("encode:", string(encode))
} else {
log.Debug("headers:", string(headers))
emsg = append(emsg, []byte(headers)...)
}
return 0
}
func gtpuHandler(rvMsg []byte, rvLen int) {
var extHdr ExtHeader
var tr dborm.TraceData
var off, ret int
msg := rvMsg
verFlags := msg[GTPU_HEADER_VERSION_INDEX]
gtpuHdrLen := GTPU_HEADER_LEN
localTeid := binary.BigEndian.Uint32(msg[GTPU_HEADER_TEID_INDEX:])
extHdr.TaskId = localTeid
if (verFlags & GTPU_E_S_PB_BIT) != 0 {
if (verFlags & GTPU_E_BI) != 0 {
extTypeIndex := GTPU_HEADER_LEN - 1
extType := msg[extTypeIndex]
if extType == 0xFE {
extHdr.IMSI = string(msg[extTypeIndex+2 : extTypeIndex+17])
extHdr.IfType = msg[extTypeIndex+17]
extHdr.MsgType = msg[extTypeIndex+18]
extHdr.MsgDirect = msg[extTypeIndex+19]
extHdr.TimeStamp = int64(binary.BigEndian.Uint64(msg[extTypeIndex+19:]))
log.Debugf("ext info %v %s %d %d %d", msg[(extTypeIndex+2):(extTypeIndex+20)], extHdr.IMSI, extHdr.IfType, extHdr.MsgType, extHdr.MsgDirect)
// set offset of IP Packet
off = 40 + 4
//src ip: msg+40+12
extHdr.SrcIP = fmt.Sprintf("%d.%d.%d.%d", msg[off+12], msg[off+13], msg[off+14], msg[off+15])
//dst ip: msg+40+12+4
extHdr.DstIP = fmt.Sprintf("%d.%d.%d.%d", msg[off+16], msg[off+17], msg[off+18], msg[off+19])
extHdr.SrcPort = uint16(binary.BigEndian.Uint16(msg[off+20:]))
extHdr.DstPort = uint16(binary.BigEndian.Uint16(msg[off+22:]))
log.Debugf("info %s:%d %s:%d", extHdr.SrcIP, extHdr.SrcPort, extHdr.DstIP, extHdr.DstPort)
// ip header start msg+40
tr.TaskID = int(extHdr.TaskId)
tr.Timestamp = extHdr.TimeStamp
tr.Imsi = extHdr.IMSI
tr.IfType = int(extHdr.IfType)
tr.SrcAddr = fmt.Sprintf("%s:%d", extHdr.SrcIP, extHdr.SrcPort)
tr.DstAddr = fmt.Sprintf("%s:%d", extHdr.DstIP, extHdr.DstPort)
tr.MsgType = int(extHdr.MsgType)
tr.MsgDirect = int(extHdr.MsgDirect)
tr.Length = int(rvLen - off)
tr.RawMsg = make([]byte, int(rvLen-off))
copy(tr.RawMsg, []byte(msg[off:]))
extHdr.Proto = int(msg[off+9])
if extHdr.Proto == 132 { //SCTP
extHdr.PPI = int(msg[off+47])
extHdr.DataLen = uint16(binary.BigEndian.Uint16(msg[(off+34):]) - 16)
log.Debugf("dat len %d %d", extHdr.DataLen, extHdr.PPI)
if extHdr.PPI == 60 { // NGAP
extHdr.DataInfo = make([]byte, extHdr.DataLen)
copy(extHdr.DataInfo, msg[(off+48):])
//append(extHdr.DataInfo, msg[88:]...)
log.Debugf("dataInfo %v", extHdr.DataInfo)
ret = ngapDataHandle([]byte(tr.DecMsg), tr.Timestamp, tr.RawMsg)
}
} else if extHdr.Proto == 6 { // TCP
iplen := uint16(binary.BigEndian.Uint16(msg[off+2:]))
tcplen := uint16(iplen - 32 - 20)
hdrlen := uint16(binary.BigEndian.Uint16(msg[off+20+32+1:]))
offset := uint16(off + 52)
log.Debugf("HTTP %d %d %d\n", iplen, tcplen, hdrlen)
extHdr.DataLen = tcplen
extHdr.DataInfo = make([]byte, extHdr.DataLen)
copy(extHdr.DataInfo, msg[offset:])
//ret = httpDataHandle(tr.DecodedMsg, tr.Timestamp, tr.RawMsg)
if tcplen > (hdrlen + 9) { // has data
doffset := uint16(offset + hdrlen + 9)
datlen := uint16(binary.BigEndian.Uint16(msg[doffset+1:]))
log.Debugf("HTTP datlen %d", datlen)
ret = httpHeaderDataHandle([]byte(tr.DecMsg), msg[offset+9:offset+9+hdrlen], msg[doffset+9:doffset+datlen+9])
} else {
ret = httpHeaderDataHandle([]byte(tr.DecMsg), msg[offset+9:hdrlen], nil)
}
} else if extHdr.Proto == 17 { // UDP
ilen := uint16(binary.BigEndian.Uint16(msg[off+2:]))
udplen := uint16(ilen - 20)
extHdr.DataLen = udplen - 8
extHdr.DataInfo = make([]byte, extHdr.DataLen)
copy(extHdr.DataInfo, msg[off+27:])
ret = pfcpDataHandle([]byte(tr.DecMsg), tr.Timestamp, tr.RawMsg)
}
if ret < 0 {
log.Error("Decode message error")
} else {
PutTraceRecordToDB(&tr)
}
}
for extType != 0 && extTypeIndex < rvLen {
extLen := msg[extTypeIndex+1] << 2
if extLen == 0 {
log.Error("error, extLen is zero")
return
}
gtpuHdrLen += int(extLen)
extTypeIndex += int(extLen)
extType = msg[extTypeIndex]
}
}
} else {
gtpuHdrLen -= 4
}
}
func udpProcess(conn *net.UDPConn) {
data := make([]byte, 2048)
n, _, err := conn.ReadFromUDP(data)
if err != nil {
log.Error("failed read udp msg, error: " + err.Error())
}
gtpuHandler(data, n)
//str := string(data[:n])
//log.Error("receive from client, data:" + str)
<-limitChan
}
func udpServer(address string) {
udpAddr, err := net.ResolveUDPAddr("udp", address)
if err != nil {
log.Error("Failed to ResolveUDPAddr:", err)
os.Exit(1)
}
conn, err := net.ListenUDP("udp", udpAddr)
if err != nil {
log.Error("read from connect failed, err:", err)
os.Exit(1)
}
defer conn.Close()
for {
limitChan <- true
go udpProcess(conn)
}
}
//SCTP OFFSET = 48
//HTTP2 OFFSET = 52
//UDP OFFSET = 28
func PutTraceRecordToDB(tr *dborm.TraceData) error {
var offset, ret int
if tr.IfType == 1 || tr.IfType == 2 { // SCTP
offset = 48
ret = ngapDataHandle([]byte(tr.DecMsg), tr.Timestamp, tr.RawMsg[offset:])
} else if tr.IfType == 4 { // UDP
offset = 28
ret = pfcpDataHandle([]byte(tr.DecMsg), tr.Timestamp, tr.RawMsg[offset:])
} else { // HTTP2
iplen := uint16(binary.BigEndian.Uint16(tr.RawMsg[2:]))
tcplen := uint16(iplen - 32 - 20)
hdrlen := uint16(binary.BigEndian.Uint16(tr.RawMsg[20+32+1:]))
offset = 52
if tcplen > (hdrlen + 9) { // has data
doffset := uint16(uint16(offset) + hdrlen + 9)
datlen := uint16(binary.BigEndian.Uint16(tr.RawMsg[doffset+1:]))
log.Debugf("HTTP datlen %d\n", datlen)
ret = httpHeaderDataHandle([]byte(tr.DecMsg), tr.RawMsg[offset+9:offset+9+int(hdrlen)], tr.RawMsg[doffset+9:doffset+datlen+9])
} else {
ret = httpHeaderDataHandle([]byte(tr.DecMsg), tr.RawMsg[offset+9:hdrlen], nil)
}
}
if ret == 0 {
_, err := dborm.XormInsertTraceData(tr)
if err != nil {
log.Error("Failed to dborm.XormInsertTraceData:", err)
return err
}
}
return nil
}
// ////////////////
// var rdb *redis.Client
// var redisOn bool
// var rurl string
//
// func RdbInitClient(Url string) (err error) {
// rurl = Url
// rdb = redis.NewClient(&redis.Options{
// Addr: rurl,
// Password: "", // no password set
// DB: 0, // use default
// })
//
// ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
// defer cancel()
//
// _, err = rdb.Ping(ctx).Result()
// if err != nil {
// log.Debugf("db connect failed\n")
// return err
// }
// return nil
// }
//
// func RdbKeys(filter string) (vals []string, err error) {
// ctx := context.Background()
//
// vals, err = rdb.Keys(ctx, filter).Result()
//
// if err != nil {
// log.Error("db: Keys ", err.Error())
// return nil, err
// }
//
// return vals, nil
// }
//
// func RdbHGetAll(key string) (kvs map[string]string, err error) {
// ctx := context.Background()
// kvs, err = rdb.HGetAll(ctx, key).Result()
//
// if err != nil {
// log.Error("db: HGetAll ", err.Error())
// return nil, err
// }
//
// return kvs, nil
// }
//
// func RdbHMSet(key string, kvs map[string]interface{}) (err error) {
// ctx := context.Background()
// err = rdb.HMSet(ctx, key, kvs).Err()
//
// if err != nil {
// log.Error("db: HMSet ", err.Error())
// return err
// }
//
// return nil
// }
//
// func RdbDel(key string) (err error) {
// ctx := context.Background()
// err = rdb.Del(ctx, key).Err()
//
// if err != nil {
// log.Error("db: Del ", err.Error())
// return err
// }
//
// return nil
// }
//
// func rdbClient(url string) {
// var err error
// var kvs map[string]string
// var vals []string
// var tr TraceRecord
//
// err = RdbInitClient(url)
//
// if err != nil {
// log.Debugf("db: RdbInitClient err\n")
// os.Exit(2)
// }
//
// for {
// vals, err = RdbKeys("tsk*") // (vals []string, err error)
// if err == nil {
// for i := range vals {
// kvs, err = RdbHGetAll(vals[i]) //(kvs map[string]string, err error)
// if err == nil {
// //tsk-1:1682764180993584177:460000100000001:8
// log.Debugf("%d: %s %s %s %v\n", i, vals[i], kvs["srcip"], kvs["dstip"], []byte(kvs["ipdat"]))
// arr := strings.Split(vals[i], ":")
// if arr != nil && len(arr) == 4 {
// tr.Taskid, _ = strconv.Atoi(arr[0][4:])
// tr.Timestamp, _ = strconv.ParseInt(arr[1], 10, 64)
//
// tr.Imsi = arr[2]
// tr.IfType, _ = strconv.Atoi(arr[3])
//
// }
// tr.SrcAddr = fmt.Sprintf("%s:d", kvs["srcip"], kvs["srcport"])
// tr.DstAddr = fmt.Sprintf("%s:d", kvs["dstip"], kvs["dstport"])
// tr.MsgType, _ = strconv.Atoi(kvs["msgtp"])
// tr.MsgDirect, _ = strconv.Atoi(kvs["direct"])
// rawlen, _ := strconv.Atoi(kvs["datlen"])
// tr.RawMsg = make([]byte, rawlen)
// copy(tr.RawMsg, []byte(kvs["ipdat"]))
// //tr.DecodedMsg
// PutTraceRecordToDB(&tr)
//
// //RdbDel(vals[i])
// }
//
// }
// }
// time.Sleep(time.Second * 5)
// }
// }
// DataBase Connnection
//func QueryMultiRowDemo() {
// //InitMysql()
// sqlStr := "SELECT id,sname,age FROM student WHERE id = ?"
// rows, err := dbc.Query(sqlStr, 1)
// if err != nil {
// log.Debugf("query failed, err:%v\n", err)
// return
// }
// // 非常重要关闭rows释放持有的数据库链接
// defer rows.Close()
//
// // 循环读取结果集中的数据
// for rows.Next() {
// var u User
// err := rows.Scan(&u.id, &u.name, &u.age)
// if err != nil {
// log.Debugf("scan failed, err:%v\n", err)
// return
// }
// //log.Debugf("id:%d name:%s age:%d\n", u.id, u.name, u.age)
// }
//}
func ToHtml(path string, timestamp string, port string, ipDaTA []byte) {
log.Trace("byte数据:", ipDaTA)
encodedStr := hex.EncodeToString(ipDaTA)
// [72 101 108 108 111]
log.Trace(encodedStr)
command := fmt.Sprintf("/usr/local/bin/data2html -f %s -t %s -i %s -d %s", path, timestamp, port, encodedStr)
log.Trace("commm:", command)
cmd := exec.Command("sh", "-c", command)
_, err := cmd.CombinedOutput()
if err != nil {
log.Error("Error:can not obtain stdout pipe for command:", err)
return
}
//执行命令
}
func main() {
conf := config.GetYamlConfig()
log.InitLogger(conf.Logger.File, conf.Logger.Duration, conf.Logger.Count, "omc:captrace", config.GetLogLevel())
log.Debugf("OMC captrace version: %s\n", global.Version)
log.Infof("========================= OMC captrace startup =========================")
log.Infof("OMC captrace version: %s %s %s", global.Version, global.BuildTime, global.GoVer)
err := dborm.InitDbClient(conf.Database.Type, conf.Database.User, conf.Database.Password,
conf.Database.Host, conf.Database.Port, conf.Database.Name, conf.Database.ConnParam)
if err != nil {
fmt.Println("dborm.initDbClient err:", err)
os.Exit(2)
}
var wg sync.WaitGroup
wg.Add(1)
udpServer(conf.Gtp.Addr)
wg.Wait()
}

View File

@@ -1,128 +0,0 @@
package config
import (
"flag"
"fmt"
"os"
"strings"
"be.ems/lib/global"
"be.ems/lib/log"
"gopkg.in/yaml.v3"
)
type DbConfig struct {
Type string `yaml:"type"`
User string `yaml:"user"`
Password string `yaml:"password"`
Host string `yaml:"host"`
Port string `yaml:"port"`
Name string `yaml:"name"`
ConnParam string `yaml:"connParam,omitempty"`
Backup string `yaml:"backup"`
}
// Yaml struct of config
type YamlConfig struct {
Logger struct {
File string `yaml:"file"`
Level string `yaml:"level"`
Duration int `yaml:"duration"`
Count int `yaml:"count"`
} `yaml:"logger"`
Gtp struct {
Addr string `yaml:"addr"`
} `yaml:"gtp"`
Database DbConfig `yaml:"database"`
}
var YamlConf YamlConfig = NewYamlConfig()
// set default value for yaml config
func NewYamlConfig() YamlConfig {
return YamlConfig{
Database: DbConfig{
Type: "mysql",
ConnParam: "charset=utf8mb4&collation=utf8mb4_general_ci&parseTime=True&interpolateParams=True",
},
}
}
func ReadConfig(configFile string) {
yamlFile, err := os.ReadFile(configFile)
if err != nil {
fmt.Println("Read yaml config file error:", err)
os.Exit(2)
}
// fmt.Println("yamlfile:", string(yamlFile))
err = yaml.Unmarshal(yamlFile, &YamlConf)
if err != nil {
fmt.Println("Unmarshal error:", err)
os.Exit(3)
}
}
func WriteYamlConfig(newConfigData YamlConfig, configFile string) {
// 将配置转换回YAML数据
newYamlData, err := yaml.Marshal(&newConfigData)
if err != nil {
log.Errorf("Failed to marshal YAML: %v", err)
}
// 将新的YAML数据写入文件
err = os.WriteFile(configFile, newYamlData, 0644)
if err != nil {
log.Errorf("Failed to write YAML file: %v", err)
}
}
func GetYamlConfig() *YamlConfig {
return &YamlConf
}
func GetLogLevel() log.LogLevel {
var logLevel log.LogLevel
switch strings.ToLower(YamlConf.Logger.Level) {
case "trace":
logLevel = log.LOG_TRACE
case "info":
logLevel = log.LOG_INFO
case "debug":
logLevel = log.LOG_DEBUG
case "warn":
logLevel = log.LOG_WARN
case "error":
logLevel = log.LOG_ERROR
case "fatal":
logLevel = log.LOG_FATAL
case "off":
logLevel = log.LOG_OFF
default:
logLevel = log.LOG_DEBUG
}
return logLevel
}
const defaultConfigFile = "./etc/capconf.yaml"
func init() {
cfile := flag.String("c", defaultConfigFile, "config file")
pv := flag.Bool("v", false, "print version")
pversion := flag.Bool("version", false, "print version")
ph := flag.Bool("h", false, "print help")
phelp := flag.Bool("help", false, "print help")
flag.Parse()
if *pv || *pversion {
fmt.Printf("OMC captrace version: %s\n%s\n%s\n\n", global.Version, global.BuildTime, global.GoVer)
os.Exit(0)
}
if *ph || *phelp {
flag.Usage()
os.Exit(0)
}
ReadConfig(*cfile)
}

View File

@@ -1,22 +0,0 @@
# file: log file name
# level: /trace/debug/info/error/warn/error/fatal, default: debug
# duration: saved days, default is 30 days
logger:
file: d:/omc.git/be.ems/captrace/log/captrace.log
level: trace
duration: 24
count: 10
gtp:
addr: :2153
database:
type: mysql
user: root
password: 1000omc@kp!
host: 172.25.97.150
port: 33066
name: omc_db
connParam: charset=utf8mb4&collation=utf8mb4_general_ci&parseTime=True&interpolateParams=True

View File

@@ -1,26 +0,0 @@
# Makefile for rest agent project
PROJECT = OMC
VERSION = 2.2501.1
PLATFORM = amd64
ARMPLATFORM = aarch64
BUILDDIR = ../../build
DEBBUILDDIR = ../../debbuild
RPMBUILDDIR = $(HOME)/goprojects/rpmbuild
INSTALLDIR = /usr/local/omc
RELEASEDIR = ../../release
LIBDIR = be.ems/lib
BINNAME = captrace
.PHONY: build $(BINNAME)
build $(BINNAME):
go build -o $(BINNAME) -v -ldflags "-s -w -X '$(LIBDIR)/global.Version=$(VERSION)' \
-X '$(LIBDIR)/global.BuildTime=`date`' \
-X '$(LIBDIR)/global.GoVer=`go version`'"
run: $(BINNAME)
./$(BINNAME)
clean:
rm ./$(BINNAME)

View File

@@ -1,22 +0,0 @@
# file: log file name
# level: /trace/debug/info/error/warn/error/fatal, default: debug
# duration: saved days, default is 30 days
logger:
file: /usr/local/omc/log/captrace.log
level: warn
duration: 24
count: 10
gtp:
addr: :32152
database:
type: mysql
user: root
password: 1000omc@kp!
host: 127.0.0.1
port: 33066
name: omc_db
connParam: charset=utf8mb4&collation=utf8mb4_general_ci&parseTime=True&interpolateParams=True

View File

@@ -1,44 +0,0 @@
# file: log file name
# level: /trace/debug/info/warn/error/fatal, default: debug
# duration: saved days, default is 30 days
logger:
file: /usr/local/omc/log/crontask.log
level: warn
duration: 24
count: 90
omc:
name: OMC01
hosturi: http://127.0.0.1:33030
hostno: A001
province: ""
netabbr: HX
vendor: ""
tasks:
file: /usr/local/omc/etc/tasks.yaml
database:
type: mysql
user: root
password: 1000omc@kp!
host: 127.0.0.1
port: 33066
name: omc_db
connParam: charset=utf8mb4&collation=utf8mb4_general_ci&parseTime=True&interpolateParams=True
# northbound interface, cm/pm
# duration(day): saved days
# zipPeriods(day): periods of zip pm data file
nbi:
cm:
cfgfiledir: /usr/local/omc/etc/cm
xmlfiledir: /opt/omc/ftp
version: V1.0.1
duration: 30
pm:
cfgfiledir: /usr/local/omc/etc/pm
xmlfiledir: /opt/omc/ftp
version: V1.0.1
duration: 30
zipPeriods: 1

View File

@@ -1,183 +0,0 @@
# example:
# tasks:
# - name: test # task comment
# uri: # restful uri
# params: # params of url
# interval: 30 # do sometion in the interval
# unit: Seconds #Seconds/Minutes/Hours/Days/Weeks, Monday/Tuesday/.../Sunday,
# at: 00:10:00 # do at time such as xx:xx:xx
# do: HelloWorldTask # (Do what: callback function)
#
# Attention: must restart crontask after modified this file
#
tasks:
- name: test # task comment
status: Active #active/inactive
uri: # restful uri
params: # params of http url
body: # body of http request
interval: 60 # do sometion in the interval
unit: Seconds #Seconds/Minutes/Hours/Days/Weeks, Monday/Tuesday/.../Sunday,
at: 00:10:00 # do at time such as xx:xx:xx when unit such as Day/Days/Mondays...
do: TaskHelloWorld # (Do what: callback function)
- name: clear expired history alarm
status: Inactive
uri: /api/rest/databaseManagement/v1/omc_db/alarm
params: WHERE=now()+>+ADDDATE(event_time,+interval+(SELECT+`value`+FROM+config+WHERE+config_tag='historyDuration')+day)+and+alarm_status='0'
interval: 1
unit: Days
at: 00:10:00
do: TaskDeleteExpiredRecord
- name: clear expired history gold_kpi
status: Inactive
uri: /api/rest/databaseManagement/v1/omc_db/gold_kpi
params: WHERE=now()+>+ADDDATE(`date`,+interval+IFNULL((SELECT+`value`+FROM+config+WHERE+config_tag='goldKpiDuration'),7)+day)
interval: 1
unit: Days
at: 00:12:00
do: TaskDeleteExpiredRecord
- name: clear deleted custom pm kpi
uri: /api/rest/databaseManagement/v1/omc_db/pm_custom_title
params: WHERE=now()+>+ADDDATE(update_time,+interval+(SELECT+`value`+FROM+config+WHERE+config_tag='keepPMCKpi')+day)+and+status='Deleted'
interval: 1
unit: Days
at: 00:15:00
do: TaskDeleteExpiredRecord
- name: update expired user session
status: Inactive
uri: /api/rest/databaseManagement/v1/omc_db/session
params: WHERE=NOW()+>+ADDDATE(shake_time,+interval+expires+second)+and+status='online'
body: '{"session":{"status":"offline"}}'
interval: 30
unit: Seconds
at:
do: TaskUpdateTable
- name: clear expired log
status: Inactive
uri:
params:
interval: 1
unit: Days
at: 00:50:00
do: TaskDeleteExpiredRecord
- name: Backup measure data
uri: /api/rest/databaseManagement/v1/omc_db/measure_data
params: SQL=select+*+into+outfile+'%s'+fields+terminated+by+','+escaped+by+''+optionally+enclosed+by+''+lines+terminated+by+'\n'+from+(select+'id','date','task_id','ne_name','rm_uid','ne_type','granul_option','kpi_code','kpi_id','kpi_ext','start_time','end_time','value','timestamp'+union+select+id,date,task_id,ne_name,rm_uid,ne_type,granul_option,kpi_code,kpi_id,kpi_ext,start_time,end_time,value,timestamp+from+measure_data)+b
interval: 1
unit: Days
at: 00:20:00
do: TaskDBBackupCSVGetBySQL
- name: Backup operation log
uri: /api/rest/databaseManagement/v1/omc_db/operation_log
params: SQL=select+*+into+outfile+'%s'+fields+terminated+by+','+escaped+by+''+optionally+enclosed+by+''+lines+terminated+by+'\n'+from+(select+'op_id','account_name','op_ip','subsys_tag','op_type','op_content','op_result','begin_time','end_time','vnf_flag','log_time'+union+select+op_id,account_name,op_ip,subsys_tag,op_type,op_content,op_result,begin_time,end_time,vnf_flag,log_time+from+operation_log)+b
interval: 1
unit: Days
at: 00:26:00
do: TaskDBBackupCSVGetBySQL
- name: Backup security log
uri: /api/rest/databaseManagement/v1/omc_db/security_log
params: SQL=select+*+into+outfile+'%s'+fields+terminated+by+','+escaped+by+''+optionally+enclosed+by+''+lines+terminated+by+'\n'+from+(select+'id','account_name','account_type','op_ip','op_type','op_content','op_result','op_time'+union+select+id,account_name,account_type,op_ip,op_type,op_content,op_result,op_time+from+security_log)+b
interval: 1
unit: Days
at: 00:28:00
do: TaskDBBackupCSVGetBySQL
- name: Backup alarm log
uri: /api/rest/databaseManagement/v1/omc_db/alarm_log
params: SQL=select+*+into+outfile+'%s'+fields+terminated+by+','+escaped+by+''+optionally+enclosed+by+''+lines+terminated+by+'\n'+from+(select+'id','ne_type','ne_id','alarm_seq','alarm_id','alarm_code','alarm_status','event_time','log_time'+union+select+id,ne_type,ne_id,alarm_seq,alarm_id,alarm_code,alarm_status,event_time,log_time+from+alarm_log)+b
interval: 1
unit: Days
at: 00:30:00
do: TaskDBBackupCSVGetBySQL
- name: handshake to NF
status: Active
uri: /api/rest/systemManagement/v1/elementType/%s/objectType/systemState
params:
interval: 10
unit: Seconds
at:
do: TaskHandShakeToNF
- name: Export CM from NF
status: Inactive
uri: /api/rest/systemManagement/v1/elementType/%s/objectType/cm
params: ne_id=%s
interval: 1
unit: Days
at: 00:15
do: TaskExportCmFromNF
- name: Generate NRM xml file
uri:
params:
interval: 1
unit: Day
at: 00:00,06:00,12:00,18:00
do: GenCmXmlFile
- name: Task of Generate measure threshold alarm
status: Active
uri: /api/rest/faultManagement/v1/elementType/%s/objectType/alarms
params: 10200
interval: 10
unit: Seconds
at:
do: TaskGenMeasureThresholdAlarm
- name: Task of Generate license alarm
status: Inactive
uri: /api/rest/faultManagement/v1/elementType/%s/objectType/alarms
params: 10100 #alarm_code
interval: 1
unit: Days
at: 00:30
do: TaskGenLicenseAlarm
- name: Task of Generate NE system state alarm
status: Inactive
uri: /api/rest/faultManagement/v1/elementType/%s/objectType/alarms
params: 10000
interval: 5
unit: Seconds
at:
do: TaskGenNeStateAlarm
- name: Task of Generate Measure Report Timeout
status: Active
uri: /api/rest/faultManagement/v1/elementType/%s/objectType/alarms
params: 10201
interval: 10
unit: Seconds
at:
do: TaskGenMeasureReportTimeoutAlarm
- name: Monitor proces list and write system log
uri: /api/rest/databaseManagement/v1/omc_db/system_log
params:
body:
interval: 10
unit: Seconds
at:
do: TaskWriteSystemLog
- name: Copy log to /opt/omc/ftp/log
uri:
params: cp -rf /usr/local/omc/database/*.csv /opt/omc/ftp/log
interval: 10
unit: Minutes
at:
do: TaskRunShellCommand
# - name: Import CM to NF
# uri: /api/rest/systemManagement/v1/elementType/udm/objectType/cm
# params: ne_id=SZ_01
# interval: 15
# unit: Seconds
# at:
# do: TaskImportCmToNF
crontab:
# - name: 每隔1分钟执行
# tab: 0 */1 * * * ? // crontab: rule like linux crontab
# do: CronHelloWorldTask // function name to call
# params:
- name: Generate PM xml file
status: Active
tab: 5,20,35,50 * * * *
do: GenPmXmlFile
uri: this is uri
params: Generating PM xml file
# - name: Import CM to NF
# tab: 0 * * * * *
# do: TaskImportCmToNF
# uri: /api/rest/systemManagement/v1/elementType/udm/objectType/cm
# params: ne_id=SZ_01

View File

@@ -1,17 +0,0 @@
[Unit]
Description=OMC capture trace data from NF daemon
After=network-online.target mysql.service
[Service]
Type=simple
LimitNOFILE=65535
LimitNPROC=65535
Restart=always
ExecStart=/usr/local/omc/bin/captrace -c /usr/local/omc/etc/capconf.yaml
RestartSec=2
RestartPreventExitStatus=SIGTERM SIGKILL
ExecReload=/bin/kill -HUP $MAINPID
[Install]
WantedBy=multi-user.target

View File

@@ -1,6 +1,6 @@
[Unit]
Description=OMC cron task daemon
After=network-online.target mysql.service
Description=OMC Service
After=network-online.target mysql.service kvdb.service
[Service]
Type=simple
@@ -8,10 +8,10 @@ LimitNOFILE=65535
LimitNPROC=65535
Restart=always
ExecStart=/usr/local/omc/bin/crontask -c /usr/local/omc/etc/crontask.yaml
ExecStart=/usr/local/omc/bin/restagent -c /usr/local/omc/etc/restconf.yaml
RestartSec=2
RestartPreventExitStatus=SIGTERM SIGKILL
ExecReload=/bin/kill -HUP $MAINPID
[Install]
WantedBy=multi-user.target
WantedBy=multi-user.target

View File

@@ -1,65 +0,0 @@
package cmschema
// FileHeader ...
type FileHeader struct {
TimeStamp string `xml:"TimeStamp"`
TimeZone string `xml:"TimeZone"`
VendorName string `xml:"VendorName"`
ElementType string `xml:"ElementType"`
CmVersion string `xml:"CmVersion"`
}
// N ...
type N struct {
IAttr int `xml:"i,attr"`
Value string `xml:",chardata"`
}
// FieldName ...
type FieldName struct {
N []N `xml:"N"`
}
// V ...
type V struct {
IAttr int `xml:"i,attr"`
Value string `xml:",chardata"`
}
// Object ...
type Object struct {
RmUIDAttr string `xml:"rmUID,attr"`
DnAttr string `xml:"Dn,attr,omitempty"`
UserLabelAttr string `xml:"UserLabel,attr,omitempty"`
PVFlagAttr string `xml:"PVFlag,attr"`
VMIDAttr string `xml:"VMID,attr,omitempty"`
VNFInstanceIDAttr string `xml:"VNFInstanceID,attr,omitempty"`
V []V `xml:"V"`
}
// FieldValue ...
type FieldValue struct {
Object []Object `xml:"Object"`
}
// Objects ...
type ObjectData struct {
ObjectType string `xml:"ObjectType"`
FieldName FieldName `xml:"FieldName"`
FieldValue FieldValue `xml:"FieldValue"`
}
// DataFile ...
type DataFile struct {
FileHeader FileHeader `xml:"FileHeader"`
//Objects []Objects `xml:"Objects"`
ObjectData ObjectData `xml:"objectData"`
XsiAttr string `xml:"xmlns:xsi,attr"`
XsiLoc string `xml:"xsi:noNamespaceSchemaLocation,attr"`
}
// NRM xml file
type NRMXmlFile struct {
Header string `xml:"Header"`
DataFile DataFile `xml:"xmlns:xsi"`
}

View File

@@ -1,296 +0,0 @@
package cmyaml
type UdmCm struct {
ManagedElement ManagedElement `yaml:"ManagedElement"`
UdmFunction UdmFunction `yaml:"UdmFunction"`
UdrFunction UdrFunction `yaml:"UdrFunction"`
AusfFunction AusfFunction `yaml:"AusfFunction"`
IPResource IPResource `yaml:"IPResource"`
}
type UdmFunction struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
AdministrativeState string `yaml:"AdministrativeState"`
OperationalState string `yaml:"OperationalState"`
VnfInstanceId string `yaml:"VnfInstanceId"`
Fqdn string `yaml:"Fqdn"`
SbiServiceList string `yaml:"SbiServiceList"`
}
type UdrFunction struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
AddrType string `yaml:"AddrType"`
IpVersion string `yaml:"IpVersion"`
AddrSegList string `yaml:"AddrSegList"`
}
type AusfFunction struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
LocIpAddrList string `yaml:"LocIpAddrList"`
FarIpSubnetworkList string `yaml:"FarIpSubnetworkList"`
}
type IPResource struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
InterfaceType string `yaml:"InterfaceType"`
LocIpV4AddrList string `yaml:"LocIpV4AddrList"`
LocIpV6AddrList string `yaml:"LocIpV6AddrList"`
}
type ManagedElement struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
VendorName string `yaml:"VendorName"`
ManagedBy string `yaml:"ManagedBy"`
ManagementIpAddress string `yaml:"ManagementIpAddress"`
SwVersion string `yaml:"SwVersion"`
PatchInfo string `yaml:"PatchInfo"`
AdministrativeState string `yaml:"AdministrativeState"`
OperationalState string `yaml:"OperationalState"`
}
type AmfCm struct {
ManagedElement ManagedElement `yaml:"ManagedElement"`
AmfFunction AmfFunction `yaml:"AmfFunction"`
EpRpDynN8Amf EpRpDynN8Amf `yaml:"EpRpDynN8Amf"`
EpRpDynN11Amf EpRpDynN11Amf `yaml:"EpRpDynN11Amf"`
EpRpDynN12Amf EpRpDynN12Amf `yaml:"EpRpDynN12Amf"`
IPResource IPResource `yaml:"IPResource"`
}
type EpRpDynN11Amf struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
LocIpAddrList string `yaml:"LocIpAddrList"`
FarIpSubnetworkList string `yaml:"FarIpSubnetworkList"`
}
type EpRpDynN12Amf struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
LocIpAddrList string `yaml:"LocIpAddrList"`
FarIpSubnetworkList string `yaml:"FarIpSubnetworkList"`
}
type AmfFunction struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
AdministrativeState string `yaml:"AdministrativeState"`
Fqdn string `yaml:"Fqdn"`
SbiServiceList string `yaml:"SbiServiceList"`
AmfGuamiList string `yaml:"AmfGuamiList"`
OperationalState string `yaml:"OperationalState"`
VnfInstanceId string `yaml:"VnfInstanceId"`
SnssaiList string `yaml:"SnssaiList"`
MaxUser string `yaml:"MaxUser"`
RelativeCapacity string `yaml:"RelativeCapacity"`
MaxGnbNum string `yaml:"MaxGnbNum"`
}
type EpRpDynN8Amf struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
LocIpAddrList string `yaml:"LocIpAddrList"`
FarIpSubnetworkList string `yaml:"FarIpSubnetworkList"`
}
type SmfCm struct {
ManagedElement ManagedElement `yaml:"ManagedElement"`
SmfFunction SmfFunction `yaml:"SmfFunction"`
AddrPool AddrPool `yaml:"AddrPool"`
EpRpDynN7Smf EpRpDynN7Smf `yaml:"EpRpDynN7Smf"`
EpRpDynN10Smf EpRpDynN10Smf `yaml:"EpRpDynN10Smf"`
IPResource IPResource `yaml:"IPResource"`
}
type SmfFunction struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
VnfInstanceId string `yaml:"VnfInstanceId"`
MaxQfi string `yaml:"MaxQfi"`
MaxPduSessions string `yaml:"MaxPduSessions"`
UpfList string `yaml:"UpfList"`
AdministrativeState string `yaml:"AdministrativeState"`
OperationalState string `yaml:"OperationalState"`
Fqdn string `yaml:"Fqdn"`
SbiServiceList string `yaml:"SbiServiceList"`
}
type AddrPool struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
AddrType string `yaml:"AddrType"`
IpVersion string `yaml:"IpVersion"`
AddrSegList string `yaml:"AddrSegList"`
}
type EpRpDynN7Smf struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
LocIpAddrList string `yaml:"LocIpAddrList"`
FarIpSubnetworkList string `yaml:"FarIpSubnetworkList"`
}
type EpRpDynN10Smf struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
LocIpAddrList string `yaml:"LocIpAddrList"`
FarIpSubnetworkList string `yaml:"FarIpSubnetworkList"`
}
type UpfCm struct {
IPResource IPResource `yaml:"IPResource"`
InventoryUnitHost InventoryUnitHost `yaml:"InventoryUnitHost"`
InventoryUnitAccessory InventoryUnitAccessory `yaml:"InventoryUnitAccessory"`
UdrFunction UpfUdrFunction `yaml:"UdrFunction"`
InventoryUnitRack InventoryUnitRack `yaml:"InventoryUnitRack"`
EpRpDynN9Upf EpRpDynN9Upf `yaml:"EpRpDynN9Upf"`
AusfFunction UpfAusfFunction `yaml:"AusfFunction"`
SmfFunction UpfSmfFunction `yaml:"SmfFunction"`
InventoryUnitPack InventoryUnitPack `yaml:"InventoryUnitPack"`
UpfFunction UpfFunction `yaml:"UpfFunction"`
AmfFunction UpfAmfFunction `yaml:"AmfFunction"`
ManagedElement ManagedElement `yaml:"ManagedElement"`
InventoryUnitShelf InventoryUnitShelf `yaml:"InventoryUnitShelf"`
EpRpDynN3Upf EpRpDynN3Upf `yaml:"EpRpDynN3Upf"`
}
type InventoryUnitRack struct {
SbiServiceList string `yaml:"SbiServiceList"`
Fqdn string `yaml:"Fqdn"`
UserLabel string `yaml:"UserLabel"`
AdministrativeState string `yaml:"AdministrativeState"`
OperationalState string `yaml:"OperationalState"`
VnfInstanceId string `yaml:"VnfInstanceId"`
MaxPduSessions string `yaml:"MaxPduSessions"`
MaxQfi string `yaml:"MaxQfi"`
UpfList string `yaml:"UpfList"`
Id string `yaml:"Id"`
}
type UpfAusfFunction struct {
UserLabel string `yaml:"UserLabel"`
AdministrativeState string `yaml:"AdministrativeState"`
OperationalState string `yaml:"OperationalState"`
VnfInstanceId string `yaml:"VnfInstanceId"`
Fqdn string `yaml:"Fqdn"`
Id string `yaml:"Id"`
}
type EpRpDynN3Upf struct {
LocIpAddrList string `yaml:"LocIpAddrList"`
FarIpSubnetworkList string `yaml:"FarIpSubnetworkList"`
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
}
type InventoryUnitHost struct {
UserLabel string `yaml:"UserLabel"`
VendorName string `yaml:"VendorName"`
DateOfLastService string `yaml:"DateOfLastService"`
ManufacturerData string `yaml:"ManufacturerData"`
VendorUnitTypeNumber string `yaml:"VendorUnitTypeNumber"`
HostPosition string `yaml:"HostPosition"`
MemSize string `yaml:"MemSize"`
HardDiskSize string `yaml:"HardDiskSize"`
NumberOfCpu string `yaml:"NumberOfCpu"`
Id string `yaml:"Id"`
VendorUnitFamilyType string `yaml:"VendorUnitFamilyType"`
SerialNumber string `yaml:"SerialNumber"`
VersionNumber string `yaml:"VersionNumber"`
DateOfManufacture string `yaml:"DateOfManufacture"`
}
type InventoryUnitAccessory struct {
UserLabel string `yaml:"UserLabel"`
VendorName string `yaml:"VendorName"`
SerialNumber string `yaml:"SerialNumber"`
ManufacturerData string `yaml:"ManufacturerData"`
DateOfLastService string `yaml:"DateOfLastService"`
AccessoryPosition string `yaml:"AccessoryPosition"`
AccessoryType string `yaml:"AccessoryType"`
Id string `yaml:"Id"`
VendorUnitFamilyType string `yaml:"VendorUnitFamilyType"`
VendorUnitTypeNumber string `yaml:"VendorUnitTypeNumber"`
VersionNumber string `yaml:"VersionNumber"`
DateOfManufacture string `yaml:"DateOfManufacture"`
AddtionalInformation string `yaml:"AddtionalInformation"`
}
type EpRpDynN9Upf struct {
UserLabel string `yaml:"UserLabel"`
LocIpAddrList string `yaml:"LocIpAddrList"`
FarIpSubnetworkList string `yaml:"FarIpSubnetworkList"`
Id string `yaml:"Id"`
}
type UpfSmfFunction struct {
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
AdministrativeState string `yaml:"AdministrativeState"`
OperationalState string `yaml:"OperationalState"`
VnfInstanceId string `yaml:"VnfInstanceId"`
Fqdn string `yaml:"Fqdn"`
}
type InventoryUnitPack struct {
VendorUnitFamilyType string `yaml:"VendorUnitFamilyType"`
VendorName string `yaml:"VendorName"`
VersionNumber string `yaml:"VersionNumber"`
DateOfManufacture string `yaml:"DateOfManufacture"`
DateOfLastService string `yaml:"DateOfLastService"`
ManufacturerData string `yaml:"ManufacturerData"`
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
PortsInformation string `yaml:"PortsInformation"`
PackPosition string `yaml:"PackPosition"`
SlotsOccupied string `yaml:"SlotsOccupied"`
VendorUnitTypeNumber string `yaml:"VendorUnitTypeNumber"`
SerialNumber string `yaml:"SerialNumber"`
}
type UpfAmfFunction struct {
VnfInstanceId string `yaml:"VnfInstanceId"`
Fqdn string `yaml:"Fqdn"`
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
AdministrativeState string `yaml:"AdministrativeState"`
OperationalState string `yaml:"OperationalState"`
}
type InventoryUnitShelf struct {
ManufacturerData string `yaml:"ManufacturerData"`
RackPosition string `yaml:"RackPosition"`
SerialNumber string `yaml:"SerialNumber"`
VersionNumber string `yaml:"VersionNumber"`
VendorUnitFamilyType string `yaml:"VendorUnitFamilyType"`
VendorUnitTypeNumber string `yaml:"VendorUnitTypeNumber"`
VendorName string `yaml:"VendorName"`
DateOfManufacture string `yaml:"DateOfManufacture"`
DateOfLastService string `yaml:"DateOfLastService"`
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
}
type UpfUdrFunction struct {
OperationalState string `yaml:"OperationalState"`
VnfInstanceId string `yaml:"VnfInstanceId"`
Fqdn string `yaml:"Fqdn"`
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
AdministrativeState string `yaml:"AdministrativeState"`
}
type UpfFunction struct {
MaxThroughput string `yaml:"MaxThroughput"`
Id string `yaml:"Id"`
UserLabel string `yaml:"UserLabel"`
AdministrativeState string `yaml:"AdministrativeState"`
OperationalState string `yaml:"OperationalState"`
VnfInstanceId string `yaml:"VnfInstanceId"`
MaxQosFlows string `yaml:"MaxQosFlows"`
}

View File

@@ -1,201 +0,0 @@
package main
import (
"flag"
"fmt"
"os"
"strings"
"be.ems/lib/global"
"be.ems/lib/log"
"gopkg.in/yaml.v3"
)
type DbConfig struct {
Type string `yaml:"type"`
User string `yaml:"user"`
Password string `yaml:"password"`
Host string `yaml:"host"`
Port string `yaml:"port"`
Name string `yaml:"name"`
ConnParam string `yaml:"connParam,omitempty"`
Backup string `yaml:"backup"`
}
// Yaml struct of config
type YamlConfig struct {
Logger struct {
File string `yaml:"file"`
Level string `yaml:"level"`
Duration int `yaml:"duration"`
Count int `yaml:"count"`
} `yaml:"logger"`
OMC struct {
Name string `yaml:"name"`
HostUri string `yaml:"hosturi"`
HostNo string `yaml:"hostno"`
Province string `yaml:"province"`
NetAbbr string `yaml:"netabbr"`
Vendor string `yaml:"vendor"`
} `yaml:"omc"`
Database DbConfig `yaml:"database"`
Tasks struct {
File string `yaml:"file"`
} `yaml:"tasks"`
NBI struct {
CM struct {
CfgFileDir string `yaml:"cfgfiledir"`
XmlFileDir string `yaml:"xmlfiledir"`
Version string `yaml:"version"`
} `yaml:"cm"`
PM struct {
CfgFileDir string `yaml:"cfgfiledir"`
XmlFileDir string `yaml:"xmlfiledir"`
Version string `yaml:"version"`
} `yaml:"pm"`
} `yaml:"nbi"`
}
var yamlConfig YamlConfig = NewYamlConfig()
// set default value for yaml config
func NewYamlConfig() YamlConfig {
return YamlConfig{
Database: DbConfig{
Type: "mysql",
ConnParam: "charset=utf8mb4&collation=utf8mb4_general_ci&parseTime=True&interpolateParams=True",
},
}
}
func ReadConfig(configFile string) error {
yamlFile, err := os.ReadFile(configFile)
if err != nil {
fmt.Println("Read yaml config file error:", err)
return err
}
err = yaml.Unmarshal(yamlFile, &yamlConfig)
if err != nil {
fmt.Println("Unmarshal error:", err)
return err
}
return nil
}
func GetYamlConfig() *YamlConfig {
return &yamlConfig
}
func GetLogLevel() log.LogLevel {
var logLevel log.LogLevel
switch strings.ToLower(yamlConfig.Logger.Level) {
case "trace":
logLevel = log.LOG_TRACE
case "info":
logLevel = log.LOG_INFO
case "debug":
logLevel = log.LOG_DEBUG
case "warn":
logLevel = log.LOG_WARN
case "error":
logLevel = log.LOG_ERROR
case "fatal":
logLevel = log.LOG_FATAL
case "off":
logLevel = log.LOG_OFF
default:
logLevel = log.LOG_DEBUG
}
return logLevel
}
type Task struct {
Name string `yaml:"name"`
Status string `yaml:"status" default:"Active"`
Uri string `yaml:"uri"`
Params string `yaml:"params"`
Body string `yaml:"body"`
Interval uint64 `yaml:"interval"`
Unit string `yaml:"unit"`
At string `yaml:"at"`
From int `yaml:"from"`
Do string `yaml:"do"`
}
type Crontab struct {
Name string `yaml:"name"`
Status string `yaml:"status" default:"Active"`
Tab string `yaml:"tab"`
Do string `yaml:"do"`
Uri string `yaml:"uri"`
Params string `yaml:"params"`
Body string `yaml:"body"`
}
type Tasks struct {
Tasks []Task `yaml:"tasks"`
Crontabs []Crontab `yaml:"crontab"`
}
const (
TaskStatusActive = "active"
TaskStatusInactive = "inactive"
)
var taskSet Tasks
func ReadTasksYaml(pfile string) (ret error) {
log.Debug("pfile:", pfile)
file, err := os.ReadFile(pfile)
if err != nil {
log.Error(err)
return err
}
err = yaml.Unmarshal(file, &taskSet)
if err != nil {
log.Error(err)
return err
}
log.Trace("tasks:", taskSet)
return nil
}
func GetDefaultUserAgent() string {
return "OMC-crontask/" + global.Version
}
const defaultConfigFile = "./etc/crontask.yaml"
var ConfigFile *string
func init() {
ConfigFile = flag.String("c", defaultConfigFile, "config file")
pv := flag.Bool("v", false, "print version")
pversion := flag.Bool("version", false, "print version")
ph := flag.Bool("h", false, "print help")
phelp := flag.Bool("help", false, "print help")
flag.Parse()
if *pv || *pversion {
fmt.Printf("OMC crontask version: %s\n%s\n%s\n\n", global.Version, global.BuildTime, global.GoVer)
os.Exit(0)
}
if *ph || *phelp {
flag.Usage()
os.Exit(0)
}
err := ReadConfig(*ConfigFile)
if err != nil {
fmt.Println("Failed to ReadConfig:", err)
os.Exit(3)
}
}

View File

@@ -1,32 +0,0 @@
//go:build linux
// +build linux
package main
import (
"os"
"os/signal"
"syscall"
"be.ems/lib/log"
)
// 启动一个 goroutine 监听信号量
func ReloadRoutine() {
sigCh := make(chan os.Signal, 1)
signal.Notify(sigCh, syscall.SIGUSR1)
for {
<-sigCh
log.Info("Received reload signal, reloading config...")
err := ReadConfig(*ConfigFile)
if err != nil {
log.Error("Failed to ReadConfig:", err)
continue
}
log.Info("Config reloaded successfully.")
}
}

View File

@@ -1,34 +0,0 @@
//go:build windows
// +build windows
package main
import (
"os"
"os/signal"
"syscall"
"be.ems/lib/log"
)
// 启动一个 goroutine 监听信号量
func ReloadRoutine() {
sigCh := make(chan os.Signal, 1)
//signal.Notify(sigCh, syscall.SIGUSR1)
signal.Notify(sigCh, syscall.SIGTRAP)
for {
<-sigCh
log.Info("Received reload signal, reloading config...")
err := ReadConfig(*ConfigFile)
if err != nil {
log.Error("Failed to ReadConfig:", err)
continue
}
log.Info("Config reloaded successfully.")
}
}

View File

@@ -1,41 +0,0 @@
package main
import (
"fmt"
"os"
"be.ems/lib/dborm"
"be.ems/lib/global"
"be.ems/lib/log"
)
func main() {
log.InitLogger(yamlConfig.Logger.File, yamlConfig.Logger.Duration, yamlConfig.Logger.Count, "omc:crontask", GetLogLevel())
fmt.Printf("OMC crontask version: %s\n", global.Version)
log.Infof("========================= OMC crontask startup =========================")
log.Infof("OMC crontask version: %s %s %s", global.Version, global.BuildTime, global.GoVer)
err := dborm.InitDbClient(yamlConfig.Database.Type, yamlConfig.Database.User, yamlConfig.Database.Password,
yamlConfig.Database.Host, yamlConfig.Database.Port, yamlConfig.Database.Name, yamlConfig.Database.ConnParam)
if err != nil {
fmt.Println("dborm.initDbClient err:", err)
os.Exit(1)
}
err = initDbClient()
if err != nil {
fmt.Println("initDBClient error:", err)
os.Exit(1)
}
ReadTasksYaml(yamlConfig.Tasks.File)
//go ReloadRoutine()
go initCronTasks()
go initCronTabs()
select {}
}

View File

@@ -1,534 +0,0 @@
package main
import (
"database/sql"
"fmt"
"time"
"be.ems/lib/log"
_ "github.com/go-sql-driver/mysql"
"xorm.io/xorm"
)
type NullTime sql.NullTime
type DBClient struct {
dbType string
dbUrl string
dbConnMaxLifetime time.Duration
dbMaxIdleConns int
dbMaxOpenConns int
IsShowSQL bool
xEngine *xorm.Engine
}
var dbClient DBClient
func initDbClient() error {
db := yamlConfig.Database
dbClient.dbUrl = fmt.Sprintf("%s:%s@tcp(%s:%s)/%s?charset=utf8mb4&collation=utf8mb4_general_ci&parseTime=true&loc=Local",
db.User, db.Password, db.Host, db.Port, db.Name)
dbClient.dbType = db.Type
dbClient.dbConnMaxLifetime = 0
dbClient.dbMaxIdleConns = 0
dbClient.dbMaxOpenConns = 0
if log.GetLevel() == log.LOG_TRACE {
dbClient.IsShowSQL = true
}
var err error
dbClient.xEngine, err = xorm.NewEngine(dbClient.dbType, dbClient.dbUrl)
if err != nil {
log.Error("Failed to connect database:", err)
return err
}
//dbClient.xEngine.Ping() // 可以判断是否能连接
//if err != nil {
// log.Error("Failed to ping database:", err)
// return err
//}
// defer dbClient.xEngine.Close() // 退出后关闭
if dbClient.IsShowSQL {
dbClient.xEngine.ShowSQL(true)
}
dbClient.xEngine.SetConnMaxLifetime(dbClient.dbConnMaxLifetime)
dbClient.xEngine.SetMaxIdleConns(dbClient.dbMaxIdleConns)
dbClient.xEngine.SetMaxOpenConns(dbClient.dbMaxOpenConns)
dbClient.xEngine.DatabaseTZ = time.Local // 必须
dbClient.xEngine.TZLocation = time.Local // 必须
return nil
}
var xEngine *xorm.Engine
func XormConnectDatabaseWithUri(sql string) (*xorm.Engine, error) {
sqlStr := fmt.Sprintf("%s?charset=utf8mb4&collation=utf8mb4_general_ci&parseTime=true&loc=Local", sql)
var err error
xEngine, err = xorm.NewEngine("mysql", sqlStr) //1、Create xorm engine
if err != nil {
fmt.Println("Failed to connect database:", err)
return nil, err
}
xEngine.ShowSQL(true)
return xEngine, nil
}
type NeInfo struct {
Id int `json:"-" xorm:"pk 'id' autoincr"`
NeType string `json:"neType" xorm:"ne_type"`
NeId string `json:"neId" xorm:"ne_id"` // neUID/rmUID 网元唯一标识
RmUID string `json:"rmUID" xorm:"rm_uid"` // neUID/rmUID网元UID
NeName string `json:"neName" xorm:"ne_name"` // NeName/UserLabel 网元名称/网元设备友好名称
Ip string `json:"ip" xorm:"ip"`
Port string `json:"port" xorm:"port"`
PvFlag string `json:"pvFlag" xorm:"pv_flag"` // 网元虚实性标识 VNF/PNF: 虚拟/物理
NeAddress string `json:"neAddress" xorm:"ne_address"` // 只对PNF
Province string `json:"province" xorm:"province"` // 网元所在省份
VendorName string `json:"vendorName" xorm:"vendor_name"` // 厂商名称
Dn string `json:"dn" xorm:"dn"` // 网络标识
Status int `json:"status" xorm:"status"`
UpdateTime string `json:"-" xorm:"-"`
}
func XormGetNeInfoByType(neType string, nes *[]NeInfo) (*[]NeInfo, error) {
log.Debug("XormGetNeInfoByType processing... ")
ne := new(NeInfo)
rows, err := dbClient.xEngine.Table("ne_info").Where("ne_type =?", neType).Rows(ne)
if err != nil {
log.Error("Failed to get table ne_info from database:", err)
return nil, err
}
defer rows.Close()
for rows.Next() {
err := rows.Scan(ne)
if err != nil {
log.Error("Failed to get table ne_info from database:", err)
return nil, err
}
*nes = append(*nes, *ne)
}
log.Debug("nes:", nes)
return nes, nil
}
func XormGetAllNeInfo(nes *[]NeInfo) (*[]NeInfo, error) {
log.Debug("XormGetAllNeInfo processing... ")
ne := new(NeInfo)
rows, err := dbClient.xEngine.Table("ne_info").Rows(ne)
if err != nil {
log.Error("Failed to get table ne_info from database:", err)
return nil, err
}
defer rows.Close()
for rows.Next() {
err := rows.Scan(ne)
if err != nil {
log.Error("Failed to get table ne_info from database:", err)
return nil, err
}
*nes = append(*nes, *ne)
}
log.Debug("nes:", nes)
return nes, nil
}
type NeState struct {
Id int `json:"id" xorm:"pk 'id' autoincr"`
NeType string `json:"neType" xorm:"ne_type"`
NeId string `json:"neId" xorm:"ne_id"`
Version string `json:"version" xorm:"column 'version' VARCHAR(16)"`
Capability uint32 `json:"capability" xorm:"capability"`
SerialNum string `json:"serialNum" xorm:"serial_num"`
ExpiryDate string `json:"expiryDate" xorm:"expiry_date"`
CpuUsage string `json:"cpuUsage" xorm:"cpu_usage"`
MemUsage string `json:"memUsage" xorm:"mem_usage"`
DiskSpace string `json:"diskSpace" xorm:"disk_space"`
Timestamp string `json:"timestamp" xorm:"-" `
}
func XormInsertNeState(neState *NeState) (int64, error) {
log.Debug("XormInsertNeState processing... ")
var affected int64 = 0
session := dbClient.xEngine.NewSession()
defer session.Close()
affected, err := session.InsertOne(neState)
if err != nil {
return 0, err
}
err = session.Commit()
if err != nil {
return 0, err
}
return affected, err
}
type NorthboundPm struct {
Id int `json:"-" xorm:"pk 'id' autoincr"`
Date string `json:"Date" xorm:"date"`
Index int `json:"Index" xorm:"index"` // 1天中测量时间粒度(如15分钟)的切片索引: 0~95
Timestamp string `json:"-" xorm:"-"`
NeName string `json:"NeName" xorm:"ne_name"` // UserLabel
RmUID string `json:"RmUID" xorm:"rm_uid"`
NeType string `json:"NeType" xorm:"ne_type"` // 网元类型
PmVersion string `json:"PmVersion" xorm:"pm_version"` // 性能数据版本号
Dn string `json:"Dn" xorm:"dn"` // (???)网元标识, 如:RJN-CMZJ-TZ,SubNetwork=5GC88,ManagedElement=SMF53456,SmfFunction=53456
Period string `json:"Period" xorm:"period"` // 测量时间粒度选项5/15/30/60
TimeZone string `json:"TimeZone" xorm:"time_zone"`
StartTime string `json:"StartTime" xorm:"start_time"`
Datas []struct {
ObjectType string `json:"ObjectType" xorm:"object_type"` // 网络资源类别名称, Pm指标项列表中为空间粒度 如SmfFunction
KPIs []struct {
KPIID string `json:"KPIID" xorm:"pm_name"` // 指标项, 如: SMF.AttCreatePduSession._Dnn
KPIValues []struct {
Name string `json:"Name" xorm:"name"` // 单个的写"Total", 或者指标项有多个测量项如Dnn的名称写对应的Dnn"cmnet"/"ims"
Value int `json:"Value" xorm:"value"`
} `json:"KPIValues" xorm:"sub_datas"`
} `json:"KPIs" xorm:"pm_datas"`
} `json:"Datas" xorm:"datas"`
}
func XormInsertNorthboundPm(pm *NorthboundPm) (int64, error) {
log.Debug("XormInsertNorthboundPm processing... ")
var affected int64 = 0
session := dbClient.xEngine.NewSession()
defer session.Close()
affected, err := session.InsertOne(pm)
session.Commit()
return affected, err
}
func XormGetNorthboundPm(date string, index int, neType string, pms *[]NorthboundPm) (*[]NorthboundPm, error) {
log.Debug("XormGetNorthboundPm processing... ")
pm := new(NorthboundPm)
rows, err := dbClient.xEngine.Table("nbi_pm").
Where("`ne_type` = ? AND `date` = ? AND `index` = ?", neType, date, index).
Rows(pm)
if err != nil {
log.Error("Failed to get table nbi_pm from database:", err)
return nil, err
}
defer rows.Close()
for rows.Next() {
err := rows.Scan(pm)
if err != nil {
log.Error("Failed to get table nbi_pm from database:", err)
return nil, err
}
*pms = append(*pms, *pm)
}
log.Debug("pms:", pms)
return pms, nil
}
func XormGetMeasureThreshold(tableName string, where string, datas *[]MeasureThreshold) (*[]MeasureThreshold, error) {
log.Debug("XormGetMeasureThreshold processing... ")
row := new(MeasureThreshold)
rows, err := dbClient.xEngine.Table(tableName).Where(where).Rows(row)
if err != nil {
log.Errorf("Failed to get table %s from database: %v", tableName, err)
return nil, err
}
defer rows.Close()
for rows.Next() {
err := rows.Scan(row)
if err != nil {
log.Error("Failed to get table measure_threshold from database:", err)
return nil, err
}
*datas = append(*datas, *row)
}
log.Debug("datas:", datas)
return datas, nil
}
type MeasureThreshold struct {
Id int `json:"id" xorm:"pk 'id' autoincr"`
NeType string `json:"neType" xorm:"ne_type"`
KpiSet string `json:"kpiSet" xorm:"kpi_set"`
Threshold int64 `json:"threshold" xorm:"threshold"`
Status string `json:"status" xorm:"Status"`
OrigSeverity string `json:"origSeverity" xorm:"orig_severity"`
AlarmCode string `json:"alarmCode" xorm:"alarm_code"`
AlarmFlag bool `json:"alarmFlag" xorm:"alarm_flag"`
}
type MeasureData struct {
// Id int `json:"id" xorm:"pk 'id' autoincr"`
Id int `json:"id" xorm:"-"`
Date string `json:"date" xorm:"date"`
TaskId int `json:"taskId"`
NeType string `json:"neType" xorm:"ne_type"`
NeName string `json:"neName" xorm:"ne_name"`
RmUid string `json:"rmUid" xorm:"rm_uid"`
GranulOption string `json:"granulOption" xorm:"granul_option"`
StartTime string `json:"startTime"`
EndTime string `json:"endTime"`
KpiCode string `json:"kpiCode" xorm:"kpi_code"`
KpiId string `json:"kpiId" xorm:"kpi_id"`
KpiExt string `json:"kpiExt" xorm:"kpi_ext"`
Value int64 `json:"value"`
Timestamp string `json:"timestamp"`
}
func XormGetMeasureData(where string, datas *[]MeasureData) (*[]MeasureData, error) {
log.Debug("XormGetMeasureData processing... ")
row := new(MeasureData)
rows, err := dbClient.xEngine.Where(where).Rows(row)
if err != nil {
log.Errorf("Failed to get table measure_data from database: %v", err)
return nil, err
}
defer rows.Close()
for rows.Next() {
err := rows.Scan(row)
if err != nil {
log.Error("Failed to get table measure_data from database:", err)
return nil, err
}
*datas = append(*datas, *row)
}
log.Debug("datas:", datas)
return datas, nil
}
func XormGetMeasureDataLastOne(neType, rmUID string, taskId int) (*MeasureData, error) {
log.Debug("XormGetMeasureDataOneByKpi processing... ")
measureData := new(MeasureData)
_, err := dbClient.xEngine.
SQL("select * from measure_data where ne_type=? and rm_uid=? and task_id=? order by start_time desc limit 1", neType, rmUID, taskId).
Get(measureData)
if err != nil {
log.Errorf("Failed to get measure_data: %v", err)
return nil, err
}
return measureData, nil
}
func XormGetMeasureDataOneByKpi(kpi string) (*MeasureData, error) {
log.Debug("XormGetMeasureDataOneByKpi processing... ")
measureData := new(MeasureData)
_, err := dbClient.xEngine.
SQL("select * from measure_data where kpi_id = ? order by timestamp desc limit 1", kpi).
Get(measureData)
if err != nil {
log.Errorf("Failed to get table measure_data from database: %v", err)
return nil, err
}
log.Debug("measureData:", measureData)
return measureData, nil
}
type AlarmDefine struct {
AlarmId string `json:"alarmId" xorm:"alarm_id"`
AlarmCode int `json:"alarmCode" xorm:"alarm_code"`
AlarmTitle string `json:"alarmTitle" xorm:"alarm_title"`
NeType string `json:"neType" xorm:"ne_type"`
AlarmType string `json:"alarmType" xorm:"alarm_type"`
OrigSeverity string `json:"origSeverity" xorm:"orig_severity"`
ObjectUid string `json:"objectUid" xorm:"object_uid"`
ObjectName string `json:"objectName" xorm:"object_name"`
ObjectType string `json:"objectType" xorm:"object_type"`
LocationInfo string `json:"locationInfo"`
SpecificProblem string `json:"specificProblem"`
SpecificProblemId string `json:"specificProblemId" xorm:"specific_problem_id"`
AddInfo string `json:"addInfo" xorm:"add_info"`
Threshold int64 `json:"threshold" xorm:"threshold"`
Status string `json:"status" xorm:"status"`
}
func XormGetAlarmDefine(alarmCode string) (*AlarmDefine, error) {
log.Debug("XormGetAlarmDefine processing... ")
alarmDefine := new(AlarmDefine)
_, err := dbClient.xEngine.
Where("alarm_code=? and status='Active'", alarmCode).
Get(alarmDefine)
if err != nil {
log.Error("Failed to get table alarm_define from database:", err)
return nil, err
}
return alarmDefine, nil
}
const (
AlarmStatusClear = 0
AlarmStatusActive = 1
AlarmStatusClearString = "0"
AlarmStatusActiveString = "1"
)
const (
ClearTypeUnclear = 0
ClearTypeAutoClear = 1
ClearTypeManualClear = 2
)
type Alarm struct {
Id int `json:"-" xorm:"pk 'id' autoincr"`
AlarmSeq int `json:"alarmSeq"`
AlarmId string `json:"alarmId" xorm:"alarm_id"`
NeId string `json:"neId"`
AlarmCode int `json:"alarmCode"`
AlarmTitle string `json:"alarmTitle"`
EventTime string `json:"eventTime"`
AlarmType string `json:"alarmType"`
OrigSeverity string `json:"origSeverity"`
PerceivedSeverity string `json:"perceivedSeverity"`
PVFlag string `json:"pvFlag" xorm:"pv_flag"`
NeName string `json:"neName"`
NeType string `json:"neType"`
ObjectUid string `json:"objectUid" xorm:"object_uid"`
ObjectName string `json:"objectName" xorm:"object_name"`
ObjectType string `json:"objectType" xorm:"object_type"`
LocationInfo string `json:"locationInfo"`
Province string `json:"province"`
AlarmStatus int `json:"alarmStatus" xorm:"alarm_status"`
SpecificProblem string `json:"specificProblem"`
SpecificProblemID string `json:"specificProblemID" xorm:"specific_problem_id"`
AddInfo string `json:"addInfo"`
// ClearType int `json:"-" xorm:"clear_type"` // 0: Unclear, 1: Auto clear, 2: Manual clear
// ClearTime sql.NullTime `json:"-" xorm:"clear_time"`
}
type AlarmLog struct {
NeType string `json:"neType" xorm:"ne_type"`
NeId string `json:"neId" xorm:"ne_id"`
AlarmSeq string `json:"alarmSeq" xorm:"alarm_seq"`
AlarmId string `json:"alarmId" xorm:"alarm_id"`
AlarmCode int `json:"alarmCode" xorm:"alarm_code"`
AlarmStatus int `json:"alarmStatus" xorm:"alarm_status"`
EventTime string `json:"eventTime" xorm:"event_time"`
// ClearTime sql.NullTime `json:"clearTime" xorm:"clear_time"`
LogTime string `json:"logTime" xorm:"-"`
}
func XormInsertAlarm(alarm *Alarm) (int64, error) {
log.Debug("XormInsertAlarm processing... ")
var affected int64 = 0
session := dbClient.xEngine.NewSession()
defer session.Close()
affected, err := session.InsertOne(alarm)
session.Commit()
return affected, err
}
func XormInsertTalbeOne(tbInfo interface{}) (int64, error) {
log.Debug("XormInsertTalbeOne processing... ")
var affected int64 = 0
session := dbClient.xEngine.NewSession()
defer session.Close()
affected, err := session.InsertOne(tbInfo)
session.Commit()
return affected, err
}
func XormGetDataBySQL(sql string) (*[]map[string]string, error) {
//log.Debug("XormGetDataBySQL processing... ")
rows := make([]map[string]string, 0)
rows, err := dbClient.xEngine.QueryString(sql)
if err != nil {
log.Errorf("Failed to QueryString:", err)
return nil, err
}
return &rows, nil
}
func XormGetTableOneByWhere(where string, tableName string) (*[]interface{}, error) {
log.Debug("XormGetTableOneByWhere processing... ")
row := new([]interface{})
tb, err := dbClient.xEngine.TableInfo(tableName)
if err != nil {
log.Error("Failed to get TableInfo:", err)
return nil, err
}
columns := tb.Columns()
log.Debug("columns:", columns)
has, err := dbClient.xEngine.Table(tableName).Where(where).Get(row)
if err != nil {
log.Errorf("Failed to get table %s from database:%v", tableName, err)
return nil, err
} else if has == false {
log.Infof("Not found data from %s where=%s", tableName, where)
return nil, nil
}
log.Debugf("%s:%v", tableName, row)
return row, nil
}
func XormGetTableOneById(id int, tableName string) (*[]interface{}, error) {
log.Debug("XormGetTableOneById processing... ")
rows := new([]interface{})
has, err := dbClient.xEngine.Table(tableName).ID(id).Get(rows)
if err != nil {
log.Errorf("Failed to get table %s from database:id=%d, %v", tableName, id, err)
return nil, err
} else if has == false {
log.Infof("Not found table %s from database:id=%d", tableName, id)
return nil, nil
}
log.Debugf("%s:%v", tableName, rows)
return rows, nil
}
func XormUpdateTableById(id int, tableName string, tbInfo interface{}, cols ...string) (int64, error) {
log.Debug("XormUpdateTableById processing... ")
session := dbClient.xEngine.NewSession()
defer session.Close()
affected, err := session.Table(tableName).ID(id).MustCols(cols...).Update(tbInfo)
if err != nil {
log.Errorf("Failed to update table %s from database:%v", tableName, err)
return 0, err
}
session.Commit()
return affected, nil
}
func XormUpdateTableByWhere(where string, tableName string, tbInfo interface{}) (int64, error) {
log.Debug("XormUpdateTableByWhere processing... ")
session := dbClient.xEngine.NewSession()
defer session.Close()
affected, err := session.Table(tableName).Where(where).Update(tbInfo)
if err != nil {
log.Errorf("Failed to update table %s from database:%v", tableName, err)
return 0, err
}
session.Commit()
return affected, nil
}

View File

@@ -1,49 +0,0 @@
ManagedElement:
Id: ""
UserLabel: ""
VendorName: ""
ManagedBy: ""
ManagementIpAddress: ""
SwVersion: ""
PatchInfo: ""
AdministrativeState: ""
OperationalState: ""
AmfFunction:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
Fqdn: ""
SbiServiceList: ""
AmfGuamiList: ""
SnssaiList: ""
MaxUser: ""
RelativeCapacity: ""
MaxGnbNum: ""
EpRpDynN8Amf:
Id: ""
UserLabel: ""
LocIpAddrList: ""
FarIpSubnetworkList: ""
EpRpDynN11Amf:
Id: ""
UserLabel: ""
LocIpAddrList: ""
FarIpSubnetworkList: ""
EpRpDynN12Amf:
Id: ""
UserLabel: ""
LocIpAddrList: ""
FarIpSubnetworkList: ""
IPResource:
Id: ""
UserLabel: ""
InterfaceType: ""
LocIpV4AddrList: ""
LocIpV6AddrList: ""

View File

@@ -1,48 +0,0 @@
ManagedElement:
Id: ""
UserLabel: ""
VendorName: ""
ManagedBy: ""
ManagementIpAddress: ""
SwVersion: ""
PatchInfo: ""
AdministrativeState: ""
OperationalState: ""
SmfFunction:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
Fqdn: ""
SbiServiceList: ""
MaxPduSessions: ""
MaxQfi: ""
UpfList: ""
AddrPool:
Id: ""
UserLabel: ""
AddrType: "Static"
IpVersion: ""
AddrSegList: ""
EpRpDynN7Smf:
Id: ""
UserLabel: ""
LocIpAddrList: ""
FarIpSubnetworkList: ""
EpRpDynN10Smf:
Id: ""
UserLabel: ""
LocIpAddrList: ""
FarIpSubnetworkList: ""
IPResource:
Id: ""
UserLabel: ""
InterfaceType: ""
LocIpV4AddrList: ""
LocIpV6AddrList: ""

View File

@@ -1,39 +0,0 @@
ManagedElement:
Id: ""
UserLabel: ""
VendorName: ""
ManagedBy: ""
ManagementIpAddress: ""
SwVersion: ""
PatchInfo: ""
AdministrativeState: ""
OperationalState: ""
UdmFunction:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
Fqdn: ""
SbiServiceList: ""
UdrFunction:
Id: ""
UserLabel: ""
AddrType: "Static"
IpVersion: ""
AddrSegList: ""
AusfFunction:
Id: ""
UserLabel: ""
LocIpAddrList: ""
FarIpSubnetworkList: ""
IPResource:
Id: ""
UserLabel: ""
InterfaceType: ""
LocIpV4AddrList: ""
LocIpV6AddrList: ""

View File

@@ -1,141 +0,0 @@
ManagedElement:
Id: ""
UserLabel: ""
VendorName: ""
ManagedBy: ""
ManagementIpAddress: ""
SwVersion: ""
PatchInfo: ""
AdministrativeState: ""
OperationalState: ""
InventoryUnitRack:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
Fqdn: ""
SbiServiceList: ""
MaxPduSessions: ""
MaxQfi: ""
UpfList: ""
InventoryUnitShelf:
Id: ""
UserLabel: ""
VendorUnitFamilyType: ""
VendorUnitTypeNumber: ""
VendorName: ""
SerialNumber: ""
VersionNumber: ""
DateOfManufacture: ""
DateOfLastService: ""
ManufacturerData: ""
RackPosition: ""
InventoryUnitPack:
Id: ""
UserLabel: ""
VendorUnitFamilyType: ""
VendorUnitTypeNumber: ""
VendorName: ""
SerialNumber: ""
VersionNumber: ""
DateOfManufacture: ""
DateOfLastService: ""
ManufacturerData: ""
PortsInformation: ""
PackPosition: ""
SlotsOccupied: ""
InventoryUnitHost:
Id: ""
UserLabel: ""
VendorUnitFamilyType: ""
VendorUnitTypeNumber: ""
VendorName: ""
SerialNumber: ""
VersionNumber: ""
DateOfManufacture: ""
DateOfLastService: ""
ManufacturerData: ""
HostPosition: ""
NumberOfCpu: ""
MemSize: ""
HardDiskSize: ""
InventoryUnitAccessory:
Id: ""
UserLabel: ""
VendorUnitFamilyType: ""
VendorUnitTypeNumber: ""
VendorName: ""
SerialNumber: ""
VersionNumber: ""
DateOfManufacture: ""
DateOfLastService: ""
ManufacturerData: ""
AccessoryPosition: ""
AccessoryType: ""
AddtionalInformation: ""
UpfFunction:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
MaxQosFlows: ""
MaxThroughput: ""
EpRpDynN9Upf:
Id: ""
UserLabel: ""
LocIpAddrList: ""
FarIpSubnetworkList: ""
EpRpDynN3Upf:
Id: ""
UserLabel: ""
LocIpAddrList: ""
FarIpSubnetworkList: ""
AmfFunction:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
Fqdn: ""
SmfFunction:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
Fqdn: ""
UdrFunction:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
Fqdn: ""
AusfFunction:
Id: ""
UserLabel: ""
AdministrativeState: ""
OperationalState: ""
VnfInstanceId: ""
Fqdn: ""
IPResource:
Id: ""
UserLabel: ""
InterfaceType: ""
LocIpV4AddrList: ""
LocIpV6AddrList: ""

View File

@@ -1,43 +0,0 @@
# file: log file name
# level: /trace/debug/info/error/warn/error/fatal, default: debug
# duration: saved days, default is 30 days
logger:
file: d:/omc.git/be.ems/crontask/log/crontask.log
level: trace
duration: 24
count: 10
omc:
name: OMC01
hosturi: http://127.0.0.1:3040
hostno: R001
province: GD
netabbr: HX
vendor:
tasks:
file: ./etc/tasks.yaml
database:
type: mysql
user: root
password: 1000omc@kp!
host: 127.0.0.1
port: 33066
name: omc_db
connParam: charset=utf8mb4&collation=utf8mb4_general_ci&parseTime=True&interpolateParams=True
backup: d:/omc.git/be.ems/restagent/database
# northbound interface, cm/pm
# duration(day): saved days
nbi:
cm:
cfgfiledir: ./etc/cm
xmlfiledir: ./ftp/cm
version: V2.5.0
duration: 30
pm:
cfgfiledir: ./etc/pm
xmlfiledir: ./ftp/pm
version: V2.5.0
duration: 30

View File

@@ -1,194 +0,0 @@
# example:
# tasks:
# - name: test # task comment
# uri: # restful uri
# params: # params of url
# interval: 30 # do sometion in the interval
# unit: Seconds #Seconds/Minutes/Hours/Days/Weeks, Monday/Tuesday/.../Sunday,
# at: 00:10:00 # do at time such as xx:xx:xx
# do: HelloWorldTask # (Do what: callback function)
#
# Attention: must restart crontask after modified this file
#
tasks:
- name: test # task comment
status: Active #active/inactive
uri: # restful uri
params: # params of http url
body: # body of http request
interval: 60 # do sometion in the interval
unit: Seconds #Seconds/Minutes/Hours/Days/Weeks, Monday/Tuesday/.../Sunday,
at: 00:10:00 # do at time such as xx:xx:xx when unit such as Day/Days/Mondays...
do: TaskHelloWorld # (Do what: callback function)
- name: Cron user login OMC as startup
status: Inactive
uri: /login
params:
body: '{"username":"cronuser","password":"tcu@1000OMC!","code":"","uuid":""}'
interval: 0
unit: Startup
at: 00:00:00
do: TaskCronUserLoginOMC
- name: clear expired history alarm
uri: /api/rest/databaseManagement/v1/omc_db/alarm
params: WHERE=now()+>+ADDDATE(event_time,+interval+(SELECT+`value`+FROM+config+WHERE+config_tag='historyDuration')+day)+and+alarm_status='0'
interval: 1
unit: Days
at: 00:10:00
do: TaskDeleteExpiredRecord
- name: clear expired history gold_kpi
uri: /api/rest/databaseManagement/v1/omc_db/gold_kpi
params: WHERE=now()+>+ADDDATE(`date`,+interval+IFNULL((SELECT+`value`+FROM+config+WHERE+config_tag='goldKpiDuration'),7)+day)
interval: 1
unit: Days
at: 00:11:00
do: TaskDeleteExpiredRecord
- name: clear deleted custom pm kpi
uri: /api/rest/databaseManagement/v1/omc_db/pm_custom_title
params: WHERE=now()+>+ADDDATE(update_time,+interval+(SELECT+`value`+FROM+config+WHERE+config_tag='keepPMCKpi')+day)+and+status='Deleted'
interval: 1
unit: Days
at: 00:15:00
do: TaskDeleteExpiredRecord
- name: clear expired ne etc backup
uri: /api/rest/databaseManagement/v1/omc_db/ne_backup
params: WHERE=now()+>+ADDDATE(`create_time`,+interval+IFNULL((SELECT+`value`+FROM+config+WHERE+config_tag='BackUpSaveTime'),30)+day)
interval: 1
unit: Days
at: 15:02:00
do: TaskRemoveExpiredFile
- name: update expired user session
uri: /api/rest/databaseManagement/v1/omc_db/session
params: WHERE=NOW()+>+ADDDATE(shake_time,+interval+expires+second)+and+status='online'
body: '{"session":{"status":"offline"}}'
interval: 30
unit: Seconds
at:
do: TaskUpdateTable
- name: clear expired log
uri:
params:
interval: 1
unit: Days
at: 00:50:00
do: TaskDeleteExpiredRecord
- name: Backup measure data
uri: /api/rest/databaseManagement/v1/omc_db/measure_data
params: SQL=select+*+into+outfile+'%s'+fields+terminated+by+','+escaped+by+''+optionally+enclosed+by+''+lines+terminated+by+'\n'+from+(select+'id','date','task_id','ne_name','rm_uid','ne_type','granul_option','kpi_code','kpi_id','kpi_ext','start_time','end_time','value','timestamp'+union+select+id,date,task_id,ne_name,rm_uid,ne_type,granul_option,kpi_code,kpi_id,kpi_ext,start_time,end_time,value,timestamp+from+measure_data)+b
interval: 1
unit: Days
at: 00:20:00
do: TaskDBBackupCSVGetBySQL
- name: Backup operation log
uri: /api/rest/databaseManagement/v1/omc_db/operation_log
params: SQL=select+*+into+outfile+'%s'+fields+terminated+by+','+escaped+by+''+optionally+enclosed+by+''+lines+terminated+by+'\n'+from+(select+'op_id','account_name','op_ip','subsys_tag','op_type','op_content','op_result','begin_time','end_time','vnf_flag','log_time'+union+select+op_id,account_name,op_ip,subsys_tag,op_type,op_content,op_result,begin_time,end_time,vnf_flag,log_time+from+operation_log)+b
interval: 1
unit: Days
at: 00:26:00
do: TaskDBBackupCSVGetBySQL
- name: Backup security log
uri: /api/rest/databaseManagement/v1/omc_db/security_log
params: SQL=select+*+into+outfile+'%s'+fields+terminated+by+','+escaped+by+''+optionally+enclosed+by+''+lines+terminated+by+'\n'+from+(select+'id','account_name','account_type','op_ip','op_type','op_content','op_result','op_time'+union+select+id,account_name,account_type,op_ip,op_type,op_content,op_result,op_time+from+security_log)+b
interval: 1
unit: Days
at: 00:28:00
do: TaskDBBackupCSVGetBySQL
- name: Backup alarm log
uri: /api/rest/databaseManagement/v1/omc_db/alarm_log
params: SQL=select+*+into+outfile+'%s'+fields+terminated+by+','+escaped+by+''+optionally+enclosed+by+''+lines+terminated+by+'\n'+from+(select+'id','ne_type','ne_id','alarm_seq','alarm_id','alarm_code','alarm_status','event_time','log_time'+union+select+id,ne_type,ne_id,alarm_seq,alarm_id,alarm_code,alarm_status,event_time,log_time+from+alarm_log)+b
interval: 1
unit: Days
at: 00:30:00
do: TaskDBBackupCSVGetBySQL
- name: handshake to NF
status: Inactive
uri: /api/rest/systemManagement/v1/elementType/%s/objectType/systemState
params:
interval: 10
unit: Seconds
at:
do: TaskHandShakeToNF
- name: Export CM from NF
uri: /api/rest/systemManagement/v1/elementType/%s/objectType/cm
params: ne_id=%s
interval: 1
unit: Days
at: 00:15
do: TaskExportCmFromNF
- name: Generate NRM xml file
uri:
params:
interval: 1
unit: Day
at: 00:00,06:00,12:00,18:00,23:19
do: GenCmXmlFile
- name: Task of Generate measure threshold alarm
status: Inactive
uri: /api/rest/faultManagement/v1/elementType/%s/objectType/alarms
params: 10200
interval: 10
unit: Seconds
at:
do: TaskGenMeasureThresholdAlarm
- name: Task of Generate license alarm
status: Inactive
uri: /api/rest/faultManagement/v1/elementType/%s/objectType/alarms
params: 10100 #alarm_code
interval: 1
unit: Days
at: 20:01
do: TaskGenLicenseAlarm
- name: Task of Generate NE system state alarm
status: Active
uri: /api/rest/faultManagement/v1/elementType/%s/objectType/alarms
params: 10000
interval: 5
unit: Seconds
at:
do: TaskGenNeStateAlarm
- name: Task of Generate Measure Report Timeout
status: Active
uri: /api/rest/faultManagement/v1/elementType/%s/objectType/alarms
params: 10201
interval: 10
unit: Seconds
at:
do: TaskGenMeasureReportTimeoutAlarm
- name: Monitor proces list and write system log
uri: /api/rest/databaseManagement/v1/omc_db/system_log
params:
body:
interval: 5
unit: Seconds
at:
do: TaskWriteSystemLog
- name: Copy log to /opt/omc/ftp/log
uri:
params: cp -rf /usr/local/omc/database/*.csv /opt/omc/ftp/log
interval: 10
unit: Minutes
at:
do: TaskRunShellCommand
# - name: Import CM to NF
# uri: /api/rest/systemManagement/v1/elementType/udm/objectType/cm
# params: ne_id=SZ_01
# interval: 15
# unit: Seconds
# at:
# do: TaskImportCmToNF
crontab:
# - name: 每隔1分钟执行
# tab: 0 */1 * * * ? // crontab: rule like linux crontab
# do: CronHelloWorldTask // function name to call
# params:
- name: Generate PM xml file
status: Active
tab: 5,20,35,50 * * * *
do: GenPmXmlFile
uri: this is uri
params: Generating PM xml file
# - name: Import CM to NF
# tab: 0 * * * * *
# do: TaskImportCmToNF
# uri: /api/rest/systemManagement/v1/elementType/udm/objectType/cm
# params: ne_id=SZ_01

View File

@@ -1,200 +0,0 @@
package main
import (
"encoding/json"
"encoding/xml"
"fmt"
"os"
"sort"
"strconv"
"strings"
"time"
cmschema "be.ems/crontask/cm/schema"
"be.ems/lib/dborm"
"be.ems/lib/global"
"be.ems/lib/log"
"gopkg.in/yaml.v3"
)
const (
// Header is a generic XML header suitable for use with the output of Marshal.
// This is not automatically added to any output of this package,
// it is provided as a convenience.
Header = `<?xml version="1.0" encoding="UTF-8"?>` + "\n"
)
const (
AdminStateLocked = "Locked"
AdminStateUnlocked = "Unlocked"
AdminStateShuttingdown = "ShuttingDown"
OperationStateDisabled = "Disabled"
OperationStateEnabled = "Enabled"
)
func (t *TaskFunc) GenCmXmlFile(uri, params, body string) {
log.Debug("GenCmXmlFile processing ...")
for _, neType := range neTypes {
t.GenNFXmlFile(neType)
}
}
func (t *TaskFunc) ReadCmYaml(cmfile string) (map[string]interface{}, error) {
log.Debug("cmfile:", cmfile)
file, err := os.ReadFile(cmfile)
if err != nil {
log.Error(err)
return nil, err
}
log.Debug("file:", file)
resultMap := make(map[string]interface{})
err = yaml.Unmarshal(file, resultMap)
if err != nil {
log.Error(err)
return nil, err
}
log.Debug("resultMap:", resultMap)
return resultMap, nil
}
func (t *TaskFunc) GenNFXmlFile(neType string) error {
log.Info("GenNFXmlFile processing...")
var nes []NeInfo
_, err := XormGetNeInfoByType(neType, &nes)
if err != nil {
log.Error("Failed to get all ne info:", err)
return err
}
cmfile := fmt.Sprintf("%s/cm-%s.yaml", yamlConfig.NBI.CM.CfgFileDir, strings.ToLower(neType))
resultMap, _ := t.ReadCmYaml(cmfile)
ti := time.Now()
timestamp := ti.Format("2006-01-02 15:04:05")
timefile := ti.Format("20060102150405")
date := ti.Format("20060102")
_, offset := ti.Zone()
var tzOffset string
if offset >= 0 {
tzOffset = "UTC+" + strconv.Itoa(offset/3600)
} else {
tzOffset = "UTC" + strconv.Itoa(offset/3600)
}
nrmFile := new(cmschema.DataFile)
//创建目录
path := fmt.Sprintf("%s/HX/%s/%s/CM/%s", yamlConfig.OMC.Province, yamlConfig.OMC.Vendor, yamlConfig.OMC.Name, date)
folderPath := global.CreateDir(path, yamlConfig.NBI.CM.XmlFileDir)
nrmFile.FileHeader = cmschema.FileHeader{
TimeStamp: timestamp,
TimeZone: tzOffset,
VendorName: yamlConfig.OMC.Vendor,
ElementType: neType,
CmVersion: yamlConfig.NBI.CM.Version,
}
nrmFile.XsiAttr = "http://www.w3.org/2001/XMLSchema-instance"
nrmFile.XsiLoc = "file:///usr/loal/omc/etc/schema/cm-schema.xsd"
for objectType, e := range resultMap {
//objects := cmschema.Objects{ObjectType: objectType}
objectData := cmschema.ObjectData{ObjectType: objectType}
sortResult := make(map[string]string)
keys := make([]string, 0)
for key, value := range e.(map[string]interface{}) {
sortResult[key] = fmt.Sprintf("%v", value)
keys = append(keys, key)
}
sort.Strings(keys)
for s, ne := range nes {
cmResult, err := dborm.XormGetNorthboundCmLatestObject(neType, ne.NeId, objectType)
if err != nil {
log.Error("Failed to XormGetNorthboundCmLatestObject:", err)
continue
}
log.Trace("cmResult:", cmResult)
//dataMap := make(map[string]interface{})
valueMap := make(map[string]interface{})
if cmResult.ValueJSON != "" {
err = json.Unmarshal([]byte(cmResult.ValueJSON), &valueMap)
if err != nil {
log.Error("Failed to json.Unmarshal:", err)
continue
}
}
log.Trace("valueMap:", valueMap)
rmUID := ne.RmUID
var object cmschema.Object
if ne.PvFlag == "VNF" {
vmId := fmt.Sprintf("kylin10.0-00%d-%s", s+1, neType)
vnfInstanceID := fmt.Sprintf("2%xd55b4-%d018-41f4-af%d5-28b%d828788", s+10, s+6, s+4, s+3)
object = cmschema.Object{RmUIDAttr: rmUID,
DnAttr: "DC=www.xxx.com.cn,SubNetwork=10001,SubNetwork=114214,ManagedElement=325",
UserLabelAttr: ne.NeName, PVFlagAttr: ne.PvFlag, VMIDAttr: vmId, VNFInstanceIDAttr: vnfInstanceID}
} else {
object = cmschema.Object{RmUIDAttr: rmUID,
DnAttr: "DC=www.xxx.com.cn,SubNetwork=10001,SubNetwork=114214,ManagedElement=325",
UserLabelAttr: ne.NeName, PVFlagAttr: ne.PvFlag}
}
i := 1
for _, p := range keys {
if s == 0 {
//objects.FieldName.N = append(objects.FieldName.N, cmschema.N{IAttr: i, Value: p})
objectData.FieldName.N = append(objectData.FieldName.N, cmschema.N{IAttr: i, Value: p})
}
var v string
if valueMap[p] == nil || valueMap[p] == "" {
v = "-"
} else {
v = fmt.Sprintf("%v", valueMap[p])
}
object.V = append(object.V, cmschema.V{IAttr: i, Value: v})
i++
}
//objects.FieldValue.Object = append(objects.FieldValue.Object, object)
objectData.FieldValue.Object = append(objectData.FieldValue.Object, object)
}
//nrmFile.Objects = append(nrmFile.Objects, objects)
nrmFile.ObjectData = objectData
x, _ := xml.MarshalIndent(nrmFile, "", " ")
x = append([]byte(xml.Header), x...)
xmlfile := fmt.Sprintf("%s/%s-CM-%s-%s-%s-%s-%s-001.xml",
folderPath, yamlConfig.OMC.Province, neType, objectType, yamlConfig.OMC.HostNo, yamlConfig.NBI.CM.Version, timefile)
tmpXmlfile := xmlfile + ".tmp"
err := os.WriteFile(tmpXmlfile, x, 0664)
if err != nil {
log.Error("Failed to WriteFile xml file:", err)
continue
}
err = os.Rename(tmpXmlfile, xmlfile)
if err != nil {
log.Error("Failed to Rename xml file:", err)
continue
}
zipFile := xmlfile + ".zip"
err = global.ZipOneFile(xmlfile, zipFile, false)
if err != nil {
log.Error("Failed to ZipOneFile:", err)
continue
}
err = os.Remove(xmlfile)
if err != nil {
log.Error("Failed to delete file:", err)
continue
}
}
return nil
}

View File

@@ -1,196 +0,0 @@
package main
import (
"encoding/xml"
"fmt"
"os"
"regexp"
"strconv"
"strings"
"time"
pmschema "be.ems/crontask/pm/schema"
"be.ems/lib/global"
"be.ems/lib/log"
"gopkg.in/yaml.v3"
)
var (
neTypes = []string{"AMF", "SMF", "UDM", "UPF", "AUSF"}
)
func (t *TaskFunc) GenPmXmlFile(uri, params, body string) {
for _, neType := range neTypes {
log.Debugf("GenPmXmlFile process %s xml file", neType)
t.GenNFPMXmlFile(neType)
}
}
func (t *TaskFunc) ReadPmYaml(cmfile string) (map[string]interface{}, error) {
log.Debug("cmfile:", cmfile)
file, err := os.ReadFile(cmfile)
if err != nil {
log.Error(err)
return nil, err
}
log.Debug("file:", file)
resultMap := make(map[string]interface{})
err = yaml.Unmarshal(file, resultMap)
if err != nil {
log.Error(err)
return nil, err
}
log.Debug("resultMap:", resultMap)
return resultMap, nil
}
func (t *TaskFunc) IsPseudoSubPmName(pmName string) bool {
return strings.Contains(pmName, "._")
}
func (t *TaskFunc) GenNFPMXmlFile(neType string) error {
log.Info("GenNFPMXmlFile processing...")
var nes []NeInfo
_, err := XormGetNeInfoByType(neType, &nes)
if err != nil {
log.Error("Failed to get all ne info:", err)
return err
}
ti := time.Now()
var startTime string
timestamp := ti.Format("2006-01-02 15:04:05")
index := global.GetCurrentTimeSliceIndexByPeriod(ti, 15)
date := ti.Format("2006-01-02")
dateHour := ti.Format("2006010215")
log.Debugf("date: %s index:%d dateHour:%s", date, index, dateHour)
_, offset := ti.Zone()
var tzOffset string
if offset >= 0 {
tzOffset = "UTC+" + strconv.Itoa(offset/3600)
} else {
tzOffset = "UTC" + strconv.Itoa(offset/3600)
}
pmFile := new(pmschema.PmFile)
var pmResults []NorthboundPm
_, err = XormGetNorthboundPm(date, index, neType, &pmResults)
if err != nil {
log.Error("Failed to get nbi_pm:", err)
return err
}
if len(pmResults) == 0 {
log.Errorf("%s:%v", neType, global.ErrPMNotFoundData)
return global.ErrPMNotFoundData
}
pmFile.XsAttr = "http://www.w3.org/2001/XMLSchema"
pmFile.XsiAttr = "http://www.w3.org/2001/XMLSchema-instance"
//创建目录
path := fmt.Sprintf("%s/HX/%s/%s/PM/%s", yamlConfig.OMC.Province, yamlConfig.OMC.Vendor, yamlConfig.OMC.Name, dateHour)
folderPath := global.CreateDir(path, yamlConfig.NBI.PM.XmlFileDir)
var objectType string
var measurement pmschema.Measurements
for _, pmResult := range pmResults {
for _, pmData := range pmResult.Datas {
objectType = pmData.ObjectType
measurement = pmschema.Measurements{ObjectType: objectType}
measurement.PmData.Object.RmUIDAttr = pmResult.RmUID
measurement.PmData.Object.DnAttr = pmResult.Dn
measurement.PmData.Object.UserLabelAttr = pmResult.NeName
startTime = pmResult.StartTime
i := 1
for _, pmKPI := range pmData.KPIs {
measurement.PmName.N = append(measurement.PmName.N, pmschema.N{IAttr: i, Value: pmKPI.KPIID})
cv := pmschema.CV{IAttr: i}
isPseudo := true
value := "0"
reg := regexp.MustCompile(`_\w+`)
//sns := strings.Split(pmData.KPIID, "_")
for _, v := range pmKPI.KPIValues {
if fmt.Sprintf("%v", v.Name) == "Total" {
isPseudo = false
value = fmt.Sprintf("%v", v.Value)
break
} else {
isPseudo = true
//if len(sns) > 1 {
// sns := strings.Split(sns[1], ".")
//}
sn := reg.ReplaceAllString(pmKPI.KPIID, v.Name)
//sn := sns[0] + v.Name
// cv.NV = append(cv.NV, pmschema.NV{SN: sn, SV: fmt.Sprintf("%v", v.Value)})
cv.SN = append(cv.SN, sn)
cv.SV = append(cv.SV, fmt.Sprintf("%v", v.Value))
}
}
if isPseudo == false {
measurement.PmData.Object.V = append(measurement.PmData.Object.V, pmschema.V{IAttr: i, Value: value})
} else {
measurement.PmData.Object.CV = append(measurement.PmData.Object.CV, cv)
}
i++
//measurement.PmData.Object.V = append(measurement.PmData.Object.V, pmschema.V{IAttr: i, Value: sortValues[pmName].Value})
//measurement.PmData.Object.CV = sortValues[pmName].Value
}
pmFile.Measurements = append(pmFile.Measurements, measurement)
startTime = startTime[:len("2006-01-02 15:04:05")]
pmFile.FileHeader = pmschema.FileHeader{
TimeStamp: timestamp,
TimeZone: tzOffset,
Period: 15,
VendorName: yamlConfig.OMC.Vendor,
ElementType: neType,
PmVersion: yamlConfig.NBI.PM.Version,
StartTime: startTime,
}
x, _ := xml.MarshalIndent(pmFile, "", " ")
x = append([]byte(xml.Header), x...)
//folderName := global.GetFmtTimeString(global.DateTime, startTime, global.DateHour)
//folderPath := global.CreateDir(folderName, yamlConfig.NBI.PM.XmlFileDir)
timefile := global.GetFmtTimeString(time.DateTime, startTime, global.DateData)
xmlfile := fmt.Sprintf("%s/%s-PM-%s-%s-%s-%s-%s-15-001.xml",
folderPath, yamlConfig.OMC.Province, neType, objectType, yamlConfig.OMC.HostNo, yamlConfig.NBI.PM.Version, timefile)
tmpXmlfile := xmlfile + ".tmp"
err = os.WriteFile(tmpXmlfile, x, 0664)
if err != nil {
log.Error("Failed to WriteFile xml file:", err)
continue
}
err = os.Rename(tmpXmlfile, xmlfile)
if err != nil {
log.Error("Failed to Rename xml file:", err)
continue
}
zipFile := xmlfile + ".zip"
err = global.ZipOneFile(xmlfile, zipFile, false)
if err != nil {
log.Error("Failed to ZipOneFile:", err)
continue
}
err = os.Remove(xmlfile)
if err != nil {
log.Error("Failed to delete file:", err)
continue
}
}
}
return nil
}

View File

@@ -1,18 +0,0 @@
# Makefile for OMC-OMC-crontask project
PROJECT = OMC
VERSION = 2.2501.1
LIBDIR = be.ems/lib
BINNAME = crontask
.PHONY: build $(BINNAME)
build $(BINNAME):
go build -o $(BINNAME) -v -ldflags "-s -w -X '$(LIBDIR)/global.Version=$(VERSION)' \
-X '$(LIBDIR)/global.BuildTime=`date`' \
-X '$(LIBDIR)/global.GoVer=`go version`'"
run: $(BINNAME)
./$(BINNAME)
clean:
rm ./$(BINNAME)

View File

@@ -1,77 +0,0 @@
// Code generated by xgen. DO NOT EDIT.
package pmschema
import "encoding/xml"
// FileHeader ...
type FileHeader struct {
TimeStamp string `xml:"TimeStamp"`
TimeZone string `xml:"TimeZone"`
Period int `xml:"Period"`
VendorName string `xml:"VendorName"`
ElementType string `xml:"ElementType"`
PmVersion string `xml:"PmVersion"`
StartTime string `xml:"StartTime"`
}
// N ...
type N struct {
IAttr int `xml:"i,attr"`
Value string `xml:",chardata"`
}
// PmName ...
type PmName struct {
N []N `xml:"N"`
}
// V ...
type V struct {
IAttr int `xml:"i,attr"`
Value string `xml:",chardata"`
}
// NV ...
type NV struct {
XMLName xml.Name `xml:"-"`
SN string `xml:"SN"`
SV string `xml:"SV"`
}
// CV ...
type CV struct {
IAttr int `xml:"i,attr"`
// NV []NV `xml:"NV"`
SN []string `xml:"SN"`
SV []string `xml:"SV"`
}
// Object ...
type Object struct {
RmUIDAttr string `xml:"rmUID,attr"`
DnAttr string `xml:"Dn,attr"`
UserLabelAttr string `xml:"UserLabel,attr"`
V []V `xml:"V"`
CV []CV `xml:"CV"`
}
// PmData ...
type PmData struct {
Object Object `xml:"Object"`
}
// Measurements ...
type Measurements struct {
ObjectType string `xml:"ObjectType"`
PmName PmName `xml:"PmName"`
PmData PmData `xml:"PmData"`
}
// PmFile ...
type PmFile struct {
FileHeader FileHeader `xml:"FileHeader"`
Measurements []Measurements `xml:"Measurements"`
XsAttr string `xml:"xmlns:xs,attr"`
XsiAttr string `xml:"xmlns:xsi,attr"`
}

View File

@@ -1,65 +0,0 @@
// Code generated by xgen. DO NOT EDIT.
package pmschema
// FileHeader ...
type FileHeader struct {
TimeStamp string `xml:"TimeStamp"`
TimeZone string `xml:"TimeZone"`
Period int `xml:"Period"`
VendorName string `xml:"VendorName"`
ElementType string `xml:"ElementType"`
PmVersion string `xml:"PmVersion"`
StartTime string `xml:"StartTime"`
}
// N ...
type N struct {
IAttr int `xml:"i,attr"`
Value string `xml:",chardata"`
}
// PmName ...
type PmName struct {
N *N `xml:"N"`
}
// V ...
type V struct {
IAttr int `xml:"i,attr"`
Value string `xml:",chardata"`
}
// CV ...
type CV struct {
IAttr int `xml:"i,attr"`
SN []string `xml:"SN"`
SV []string `xml:"SV"`
}
// Object ...
type Object struct {
RmUIDAttr string `xml:"rmUID,attr"`
DnAttr string `xml:"Dn,attr"`
UserLabelAttr string `xml:"UserLabel,attr"`
V []*V `xml:"V"`
CV []*CV `xml:"CV"`
}
// PmData ...
type PmData struct {
Object *Object `xml:"Object"`
}
// Measurements ...
type Measurements struct {
ObjectType string `xml:"ObjectType"`
PmName *PmName `xml:"PmName"`
PmData *PmData `xml:"PmData"`
}
// PmFile ...
type PmFile struct {
FileHeader FileHeader `xml:"FileHeader"`
Measurements []*Measurements `xml:"Measurements"`
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,135 +0,0 @@
package main
import (
"flag"
"fmt"
"os"
//"os"
"encoding/binary"
"encoding/hex"
"os/exec"
)
const magicMicroseconds = 0xa1b2c3d4
const versionMajor = 2
const versionMinor = 4
func WriteEmptyPcap(filename string, timestamp int64, length int, data []byte) error {
var cooked = [...]byte{0x00, 0x00, 0x03, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00}
var buf []byte
//24+16+16 = 56
buf = make([]byte, 56+length)
binary.LittleEndian.PutUint32(buf[0:4], magicMicroseconds)
binary.LittleEndian.PutUint16(buf[4:6], versionMajor)
binary.LittleEndian.PutUint16(buf[6:8], versionMinor)
// bytes 8:12 stay 0 (timezone = UTC)
// bytes 12:16 stay 0 (sigfigs is always set to zero, according to
// http://wiki.wireshark.org/Development/LibpcapFileFormat
binary.LittleEndian.PutUint32(buf[16:20], 0x00040000)
binary.LittleEndian.PutUint32(buf[20:24], 0x00000071)
// Packet Header
binary.LittleEndian.PutUint64(buf[24:32], uint64(timestamp))
binary.LittleEndian.PutUint32(buf[32:36], uint32(length+16))
binary.LittleEndian.PutUint32(buf[36:40], uint32(length+16))
copy(buf[40:], cooked[:])
copy(buf[56:], data[:])
err := os.WriteFile(filename, buf[:], 0644)
//fmt.Printf("CAP: %v\n", buf)
return err
}
// tshark -r gtp.pcap -T json -d tcp.port==8080,http2 -Y "http2"
func execTshark(html string, filename string, proto string, port int) {
var tshark *exec.Cmd
var sharkCmd string
pcapPath := filename
dataPort := fmt.Sprintf("tcp.port==%d,http2", port)
if proto == "http2" {
//tshark = exec.Command("tshark", "-r"+pcapPath,
// "-Y"+proto,
// "-d"+dataPort,
// "-T", "pdml")
sharkCmd = fmt.Sprintf("tshark -r %s -T pdml -d tcp.port==%s,http2 -Y \"%s\" > %s.pdml", pcapPath, dataPort, proto, pcapPath)
tshark = exec.Command("sh", "-c", sharkCmd)
} else {
//tshark = exec.Command("tshark", "-r"+pcapPath,
// "-Y"+proto,
// "-T", "pdml")
sharkCmd = fmt.Sprintf("tshark -r %s -T pdml -Y \"%s\" > %s.pdml", pcapPath, proto, pcapPath)
tshark = exec.Command("sh", "-c", sharkCmd)
}
_, err := tshark.CombinedOutput()
if err != nil {
fmt.Printf("tshark failed with %s\n", err)
} else {
//fmt.Printf("combined out:\n%s\n", string(out))
pdmlFile := fmt.Sprintf("%s.pdml", filename)
//err1 := os.WriteFile(pdmlFile, []byte(out), 0666)
//if err1 != nil {
// fmt.Println("write html failed")
//}else {
//xsltproc pdml2html.xsl ngap.pdml > /home/omcuser/ngap.html
command := fmt.Sprintf("xsltproc /usr/local/omc/etc/schema/pdml2html.xsl %s > %s", pdmlFile, html)
dest := exec.Command("sh", "-c", command)
_, err2 := dest.Output()
if err2 != nil {
fmt.Println("Error:", err2, command)
}
//}
}
}
func ipDataHandle(html string, iftype string, port int, timestamp int64, data []byte) int {
var filePath, proto string
if iftype == "N2" || iftype == "N1" {
filePath = fmt.Sprintf("/tmp/ng%d.pcap", timestamp)
proto = "ngap"
} else if iftype == "N4" {
filePath = fmt.Sprintf("/tmp/pf%d.pcap", timestamp)
proto = "pfcp"
} else {
filePath = fmt.Sprintf("/tmp/hp%d.pcap", timestamp)
proto = "http2"
}
err := WriteEmptyPcap(filePath, timestamp, len(data), data)
if err != nil {
fmt.Printf("tshark failed with %s\n", err)
} else {
execTshark(html, filePath, proto, port)
}
return 0
}
func main() {
var html, iftype, ipdata string
var timestamp int64
var port int
flag.Int64Var(&timestamp, "t", 0, "timestamp")
flag.StringVar(&iftype, "i", "", "interface type")
flag.IntVar(&port, "p", 0, "data port")
flag.StringVar(&ipdata, "d", "", "ip packet data")
flag.StringVar(&html, "f", "", "html file path")
flag.Parse()
ds, err := hex.DecodeString(ipdata)
if err != nil {
return
}
ipDataHandle(html, iftype, port, timestamp, ds)
}

View File

@@ -1,29 +0,0 @@
# Makefile for rest agent project
PROJECT = OMC
VERSION = 2.2501.1
RelDate = `date +%Y%m%d`
Release = $(RelDate)
RelVer = $(VERSION)-$(RelDate)
PLATFORM = amd64
ARMPLATFORM = aarch64
BUILDDIR = ../../build
DEBBUILDDIR = ../../debbuild
RPMBUILDDIR = $(HOME)/goprojects/rpmbuild
INSTALLDIR = /usr/local/omc
RELEASEDIR = ../../release
LIBDIR = be.ems/lib
BINNAME = data2html
.PHONY: build $(BINNAME)
build $(BINNAME):
go build -o $(BINNAME) -v -ldflags "-s -w -X '$(LIBDIR)/global.Version=$(VERSION)' \
-X '$(LIBDIR)/global.BuildTime=`date`' \
-X '$(LIBDIR)/global.GoVer=`go version`'"
run: $(BINNAME)
./$(BINNAME)
clean:
rm ./$(BINNAME)

View File

@@ -35,25 +35,14 @@ EmsFEDir = $(GitLocalRoot)/fe.ems
EmsFEVue3Dir = $(GitLocalRoot)/fe.ems.vue3/dist
LibDir = $(EmsBEDir)/lib
RESTAGENT = restagent
CRONTASK = crontask
SshSvcBin = sshsvc
CapTraceBin = captrace
Data2htmlBin = data2html
NBI_ALARM = nbi_alarm
NBI_AGENT = nbi_agent
4A_AGENT = 4a_agent
RestagentDir = $(EmsBEDir)/$(RESTAGENT)
CrontaskDir = $(EmsBEDir)/$(CRONTASK)
SshSvcDir = $(EmsBEDir)/$(SshSvcBin)
CapTraceDir = $(EmsBEDir)/$(CapTraceBin)
Data2htmlDir = $(EmsBEDir)/$(Data2htmlBin)
DBSQLSrcDir = $(EmsBEDir)/database
MiscDir = $(EmsBEDir)/misc
FrontBuildDir = $(BuildOMCDir)/htdocs
FrontSrcDir = $(EmsBEDir)/front
ReleaseDebs = $(ReleaseDir)/debs/$(PLATFORM)
CrontaskSize = 27788951
RestagentSize = 29525312
BinWriterDir = $(HOME)/bin
ifeq ($(shell grep VERSION_ID /etc/os-release), VERSION_ID="22.04")
DebBuildDir := $(DebBuild2204Dir)
@@ -67,44 +56,30 @@ DebPkgFile := $(PROJECT)-r$(VERSION)-$(Release)-ub18.deb
DebPkgFileBA := $(PROJECT)-r$(VERSION)-$(Release)-ub18-ba.deb
endif
.PHONY: all $(RESTAGENT) $(CRONTASK) $(SshSvcBin) $(CapTraceBin) $(Data2htmlBin)
all: $(RESTAGENT) $(CRONTASK) $(SshSvcBin) $(CapTraceBin) $(Data2htmlBin)
.PHONY: all $(RESTAGENT) $(SshSvcBin)
all: $(RESTAGENT) $(SshSvcBin)
cd $(RestagentDir)
go build -o $(RESTAGENT) -v -ldflags "-s -w -X 'be.ems/lib/conifg.Version=$(RelVer)' \
-X '$(LibDir)/conifg.BuildTime=`date`' \
-X '$(LibDir)/conifg.GoVer=`go version`'"
cd $(CrontaskDir)
go build -o $(CRONTASK) -v -ldflags "-s -w -X '$(LibDir)/conifg.Version=$(RelVer)' \
-X '$(LibDir)/conifg.BuildTime=`date`' \
-X '$(LibDir)/conifg.GoVer=`go version`'"
cd $(SshSvcDir)
go build -o $(SshSvcBin) -v -ldflags "-s -w -X '$(LibDir)/conifg.Version=$(RelVer)' \
-X '$(LibDir)/conifg.BuildTime=`date`' \
-X '$(LibDir)/conifg.GoVer=`go version`'"
cd $(CapTraceDir)
go build -o $(CapTraceBin) -v -ldflags "-s -w -X '$(LibDir)/conifg.Version=$(RelVer)' \
-X '$(LibDir)/conifg.BuildTime=`date`' \
-X '$(LibDir)/conifg.GoVer=`go version`'"
cd $(Data2htmlDir)
go build -o $(Data2htmlBin) -v -ldflags "-s -w -X '$(LibDir)/conifg.Version=$(RelVer)' \
-X '$(LibDir)/conifg.BuildTime=`date`' \
-X '$(LibDir)/conifg.GoVer=`go version`'"
clean:
rm ./restagent/$(RESTAGENT) ./crontask/$(CRONTASK) $(SshSvcDir)/$(SshSvcBin) $(CapTraceDir)/$(CapTraceBin) $(Data2htmlDir)/$(Data2htmlBin)
rm ./restagent/$(RESTAGENT) $(SshSvcDir)/$(SshSvcBin)
dist:
tar -zvcPf $(ReleaseDir)/$(PROJECT)-src-$(VERSION).tar.gz \
../lib \
../restagent \
../crontask \
../initems \
../database \
../docs \
../misc \
../config \
--exclude=../restagent/restagent \
--exclude=../crontask/crontask \
--exclude=../initems/initems
pre_build: $(BINNAME)
@@ -114,10 +89,7 @@ pre_build: $(BINNAME)
rm -rf $(DebBuildDir)/usr/local/omc/etc/db/*
cp -rf $(RestagentDir)/$(RESTAGENT) $(BuildOMCBinDir)
cp -rf $(CrontaskDir)/$(CRONTASK) $(BuildOMCBinDir)
cp -rf $(SshSvcDir)/$(SshSvcBin) $(BuildOMCBinDir)
cp -rf $(CapTraceDir)/$(CapTraceBin) $(BuildOMCBinDir)
cp -rf $(Data2htmlDir)/$(Data2htmlBin) $(BuildOMCBinDir)
cp -rf $(SshSvcDir)/$(SshSvcBin) $(BuildOMCBinDir)
cp -rf $(MiscDir)/* $(BuildOMCBinDir)
cp -rf $(ConfigEtcDir)/* $(BuildOMCEtcDir)
rm -rf $(BuildOMCEtcDir)/db/*
@@ -162,16 +134,7 @@ rpm: $(BINNAME)
rm -rf $(RpmBuildDir)/BUILD/usr/local/omc/etc/db/*
cp -rf $(RestagentDir)/$(RESTAGENT) $(BuildOMCBinDir)
cp -rf $(CrontaskDir)/$(CRONTASK) $(BuildOMCBinDir)
cp -rf $(SshSvcDir)/$(SshSvcBin) $(BuildOMCBinDir)
cp -rf $(CapTraceDir)/$(CapTraceBin) $(BuildOMCBinDir)
cp -rf $(Data2htmlDir)/$(Data2htmlBin) $(BuildOMCBinDir)
# $(BinWriterDir)/binWriter $(BuildOMCBinDir)/$(RESTAGENT) $(RestagentSize)
# $(BinWriterDir)/binWriter $(BuildOMCBinDir)/$(CRONTASK) $(CrontaskSize)
# cp -rf $(MiscDir)/ne-hosts $(BuildOMCBinDir)
# cp -rf ./nbi/$(NBI_ALARM)/bin/$(NBI_ALARM) $(BinDir2)
# cp -rf ./nbi/$(NBI_AGENT)/bin/$(NBI_AGENT) $(BinDir2)
# cp -rf ./$(4A_AGENT)/bin/$(4A_AGENT) $(BinDir2)
cp -rf $(MiscDir)/* $(BuildOMCBinDir)
# rm -rf $(BinDir2)/ne-hosts
cp -rf $(ConfigEtcDir)/* $(BuildOMCEtcDir)

View File

@@ -1,7 +1,7 @@
#!/bin/bash
ProcList="restagent crontask sshsvc captrace"
ProcListDesc="crontask sshsvc captrace restagent"
ProcList="restagent sshsvc"
ProcListDesc="restagent sshsvc"
BinDir=/usr/local/omc/bin

View File

@@ -1,6 +1,6 @@
#!/bin/bash
ProcList="restagent crontask sshsvc captrace data2html"
ProcList="restagent sshsvc"
ProjectL=omc
VERSION=2.2501.1
RelDate=`date +%Y%m%d`

Binary file not shown.

Binary file not shown.

Binary file not shown.