Files
be.ems/src/modules/network_data/service/all_perf_kpi.go

253 lines
6.9 KiB
Go
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
package service
import (
"encoding/json"
"fmt"
"sort"
"time"
"be.ems/src/framework/constants/cachekey"
"be.ems/src/framework/database/redis"
"be.ems/src/framework/utils/parse"
"be.ems/src/modules/network_data/model"
"be.ems/src/modules/network_data/repository"
neModel "be.ems/src/modules/network_element/model"
)
// 实例化数据层 PerfKPI 结构体
var NewPerfKPI = &PerfKPI{
perfKPIRepository: repository.NewPerfKPI,
}
// PerfKPI 性能统计 服务层处理
type PerfKPI struct {
perfKPIRepository *repository.PerfKPI // 性能统计数据信息
}
// SelectGoldKPI 通过网元指标数据信息
func (r *PerfKPI) SelectGoldKPI(query model.GoldKPIQuery) []map[string]any {
// 获取数据指标id
var kpiIds []string
kpiTitles := r.perfKPIRepository.SelectGoldKPITitle(query.NeType)
for _, kpiId := range kpiTitles {
kpiIds = append(kpiIds, kpiId.KPIID)
}
data := r.perfKPIRepository.SelectGoldKPI(query, kpiIds)
if data == nil {
return []map[string]any{}
}
return data
}
// SelectGoldKPITitle 网元对应的指标名称
func (r *PerfKPI) SelectGoldKPITitle(neType string) []model.GoldKPITitle {
return r.perfKPIRepository.SelectGoldKPITitle(neType)
}
// FindData 通过网元指标数据信息
func (s PerfKPI) FindData(query model.GoldKPIQuery) []map[string]any {
// 原始数据
rows := s.perfKPIRepository.SelectKPI(query)
if len(rows) <= 0 {
return []map[string]any{}
}
kpiIdsHas := false
kpiIds := []string{}
// 处理数据
arr := []map[string]any{}
for _, row := range rows {
// 解析 JSON 字符串为 map
var kpiValues []map[string]any
err := json.Unmarshal([]byte(row.KpiValues), &kpiValues)
if err != nil {
continue
}
item := map[string]any{
"neType": row.NeType,
"neName": row.NeName,
"rmUID": row.RmUid,
"startIndex": row.Index,
"timeGroup": row.CreatedAt,
}
// 遍历 kpiValues 数组
for _, v := range kpiValues {
kpiId := "-"
if k, ok := v["kpi_id"]; ok {
kpiId = fmt.Sprint(k)
}
item[kpiId] = v["value"]
}
arr = append(arr, item)
// 添加指标ID
if !kpiIdsHas {
for _, v := range kpiValues {
kpiId := "-"
if k, ok := v["kpi_id"]; ok {
kpiId = fmt.Sprint(k)
}
kpiIds = append(kpiIds, kpiId)
}
kpiIdsHas = true
}
}
// 时间密度分钟 数值单位秒 5分钟的传入300秒
timeInterval := query.Interval
// 创建一个map来存储按时间段合并后的数据
timeGroup := make(map[int64][]map[string]any)
// 遍历每个数据项
for _, v := range arr {
itemTime := parse.Number(v["timeGroup"])
// 计算时间戳的x分钟时间段使用秒并除以x分钟
timeMinute := itemTime / 1000 / timeInterval * timeInterval
// 合并到对应的时间段
timeGroup[timeMinute] = append(timeGroup[timeMinute], v)
}
// 时间组合输出
data := []map[string]any{}
for _, records := range timeGroup {
if len(records) <= 0 {
continue
}
// 转换为具体时间显示(根据需要可以格式化显示)
// timeStr := time.Unix(k, 0).Format("2006-01-02 15:04:05")
// fmt.Printf("Time Group: %s records: %d\n", timeStr, len(records))
startItem := records[len(records)-1] // 取最后一条数据也是最开始startIndex
if len(records) >= 2 { // 最后一条数据不参与计算
for _, record := range records[:len(records)-1] {
// fmt.Printf(" - startIndex: %v, Value: %v\n", record["startIndex"], record["timeGroup"])
// 遍历kpiIds数组对lastRecord赋值
for _, kpiId := range kpiIds {
if v, ok := record[kpiId]; ok {
// 特殊字段只取一次收到的非0值
if kpiId == "AMF.01" || kpiId == "UDM.01" || kpiId == "UDM.02" || kpiId == "UDM.03" || kpiId == "SMF.01" {
// startItem[kpiId] = parse.Number(v)
continue // startIndex的值不累加不取最后
} else {
value := parse.Number(startItem[kpiId])
startItem[kpiId] = value + parse.Number(v)
}
}
}
}
}
data = append(data, startItem)
}
// 按时间排序
sort.SliceStable(data, func(i, j int) bool {
vi := parse.Number(data[i]["timeGroup"])
vj := parse.Number(data[j]["timeGroup"])
if query.SortOrder == "asc" {
return vi < vj // asc
}
return vi > vj // desc
})
return data
}
// UPFTodayFlowFind 查询UPF总流量 N3上行 N6下行
// day 统计天数
func (r PerfKPI) UPFTodayFlowFind(rmUID string, day int) (int64, int64) {
// 获取当前日期
now := time.Now()
var upTotal, downTotal int64
// 查询最近day天的数据
for i := 0; i <= day; i++ {
dateKey := now.AddDate(0, 0, -i).Format("2006-01-02")
key := fmt.Sprintf("%sUPF_FLOW:%s:%s", cachekey.NE_DATA_KEY, rmUID, dateKey)
// 读取缓存数据
up, err := redis.GetHash("", key, "up")
if err != nil || up == "" {
up = "0"
}
down, err := redis.GetHash("", key, "down")
if err != nil || down == "" {
down = "0"
}
upTotal += parse.Number(up)
downTotal += parse.Number(down)
}
return upTotal, downTotal
}
// UPFTodayFlow UPF流量今日统计
func (r PerfKPI) UPFTodayFlowUpdate(rmUID string, upValue, downValue int64) error {
// 按日期存储统计数据
dateKey := time.Now().Format("2006-01-02")
key := fmt.Sprintf("%sUPF_FLOW:%s:%s", cachekey.NE_DATA_KEY, rmUID, dateKey)
// 使用HIncrBy实时累加统计值
if err := redis.IncrBy("", key, "up", upValue); err != nil {
return err
}
if err := redis.IncrBy("", key, "down", downValue); err != nil {
return err
}
return nil
}
// UPFTodayFlowLoad UPF上下行数据到redis
// day 统计天数
func (r PerfKPI) UPFTodayFlowLoad(day int) {
cacheKeys, _ := redis.GetKeys("", cachekey.NE_KEY+"UPF:*")
if len(cacheKeys) == 0 {
return
}
now := time.Now()
for _, key := range cacheKeys {
var v neModel.NeInfo
jsonStr, _ := redis.Get("", key)
if len(jsonStr) > 7 {
json.Unmarshal([]byte(jsonStr), &v)
}
if v.NeType == "UPF" && v.RmUID != "" {
// 查询最近day天的数据
for i := 0; i <= day; i++ {
dateKey := now.AddDate(0, 0, -i).Format("2006-01-02")
key := fmt.Sprintf("%sUPF_FLOW:%s:%s", cachekey.NE_DATA_KEY, v.RmUID, dateKey)
// 根据传入天数计算时间范围
beginTime := now.AddDate(0, 0, -i).Truncate(24 * time.Hour).UnixMilli()
endTime := beginTime + 24*60*60*1000 - 1
// 查询历史数据
// down * 8 / 1000 / 1000 单位M
info := r.perfKPIRepository.SelectUPFTotalFlow("UPF", v.RmUID, fmt.Sprint(beginTime), fmt.Sprint(endTime))
if v, ok := info["up"]; ok && v == nil {
info["up"] = 0
}
if v, ok := info["down"]; ok && v == nil {
info["down"] = 0
}
upTotal := parse.Number(info["up"])
downTotal := parse.Number(info["down"])
err := redis.SetHash("", key, map[string]any{
"up": upTotal,
"down": downTotal,
})
if err != nil {
continue
}
// 设置key的过期时间为30天自动清理旧数据
daySub := (30 - i) * 24
err = redis.Expire("", key, time.Duration(daySub)*time.Hour)
if err != nil {
continue
}
}
}
}
}