架构调整 去除elasticsearch改为ck
This commit is contained in:
parent
611fab28e7
commit
d26f484b58
@ -93,28 +93,6 @@ func InitClickHouse() (fn func(), err error) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// 初始化mysql连接
|
|
||||||
func InitEsClient() (fn func(), err error) {
|
|
||||||
config := model.GlobConfig.Comm.ElasticSearch
|
|
||||||
|
|
||||||
db.EsClient, err = db.NewEsClient(
|
|
||||||
config.Addresses,
|
|
||||||
config.Username,
|
|
||||||
config.Password,
|
|
||||||
)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
log.Println(fmt.Sprintf("ES组件初始化成功!连接:%v,用户名:%v,密码:%v",
|
|
||||||
config.Addresses,
|
|
||||||
config.Username,
|
|
||||||
config.Password,
|
|
||||||
))
|
|
||||||
fn = func() {}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// 初始化redis
|
// 初始化redis
|
||||||
func InitRedisPool() (fn func(), err error) {
|
func InitRedisPool() (fn func(), err error) {
|
||||||
config := model.GlobConfig.Comm.Redis
|
config := model.GlobConfig.Comm.Redis
|
||||||
|
@ -61,7 +61,7 @@ func Init() {
|
|||||||
error_handling,
|
error_handling,
|
||||||
report_type,
|
report_type,
|
||||||
status)
|
status)
|
||||||
TTL part_date + toIntervalMonth(1)
|
TTL part_date + toIntervalMonth(3)
|
||||||
SETTINGS index_granularity = 8192;
|
SETTINGS index_granularity = 8192;
|
||||||
`)
|
`)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -69,5 +69,38 @@ func Init() {
|
|||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_, err = db.ClickHouseSqlx.Exec(`DROP TABLE IF EXISTS xwl_real_time_warehousing` + sinker.GetClusterSql() + `;`)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Println(fmt.Sprintf("clickhouse 删除表 xwl_real_time_warehousing 失败:%s", err.Error()))
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = db.ClickHouseSqlx.Exec(`
|
||||||
|
|
||||||
|
CREATE TABLE xwl_real_time_warehousing ` + sinker.GetClusterSql() + `
|
||||||
|
(
|
||||||
|
|
||||||
|
table_id Int64,
|
||||||
|
|
||||||
|
create_time DateTime DEFAULT now(),
|
||||||
|
|
||||||
|
event_name String,
|
||||||
|
|
||||||
|
report_data String
|
||||||
|
)
|
||||||
|
ENGINE = ` + sinker.GetMergeTree("xwl_real_time_warehousing") + `
|
||||||
|
PARTITION BY (toYYYYMMDD(create_time))
|
||||||
|
ORDER BY (toYYYYMMDD(create_time),
|
||||||
|
table_id,
|
||||||
|
event_name)
|
||||||
|
TTL create_time + toIntervalMonth(3)
|
||||||
|
SETTINGS index_granularity = 8192;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
log.Println(fmt.Sprintf("clickhouse 建表 xwl_real_time_warehousing 失败:%s", err.Error()))
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
log.Println("初始化CK数据完成!")
|
log.Println("初始化CK数据完成!")
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,6 @@ func main() {
|
|||||||
application.RegisterInitFnObserver(application.InitRbac),
|
application.RegisterInitFnObserver(application.InitRbac),
|
||||||
application.RegisterInitFnObserver(application.InitOpenWinBrowser),
|
application.RegisterInitFnObserver(application.InitOpenWinBrowser),
|
||||||
application.RegisterInitFnObserver(application.InitClickHouse),
|
application.RegisterInitFnObserver(application.InitClickHouse),
|
||||||
application.RegisterInitFnObserver(application.InitEsClient),
|
|
||||||
application.RegisterInitFnObserver(application.InitRedisPool),
|
application.RegisterInitFnObserver(application.InitRedisPool),
|
||||||
application.RegisterInitFnObserver(application.InitDebugSarama),
|
application.RegisterInitFnObserver(application.InitDebugSarama),
|
||||||
)
|
)
|
||||||
|
@ -4,8 +4,6 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"github.com/1340691923/xwl_bi/engine/db"
|
"github.com/1340691923/xwl_bi/engine/db"
|
||||||
"github.com/1340691923/xwl_bi/engine/logs"
|
"github.com/1340691923/xwl_bi/engine/logs"
|
||||||
"github.com/1340691923/xwl_bi/model"
|
"github.com/1340691923/xwl_bi/model"
|
||||||
@ -14,6 +12,7 @@ import (
|
|||||||
parser "github.com/1340691923/xwl_bi/platform-basic-libs/sinker/parse"
|
parser "github.com/1340691923/xwl_bi/platform-basic-libs/sinker/parse"
|
||||||
"github.com/1340691923/xwl_bi/platform-basic-libs/util"
|
"github.com/1340691923/xwl_bi/platform-basic-libs/util"
|
||||||
"github.com/valyala/fastjson"
|
"github.com/valyala/fastjson"
|
||||||
|
"go.uber.org/zap"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
@ -56,19 +55,9 @@ func MysqlConsumer() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddRealTimeData(kafkaData model.KafkaData, data string, realTimeWarehousing *consumer_data.RealTimeWarehousing) (err error) {
|
func AddRealTimeData(realTimeWarehousingData *consumer_data.RealTimeWarehousingData, realTimeWarehousing *consumer_data.RealTimeWarehousing) (err error) {
|
||||||
|
|
||||||
clientReportData := consumer_data.ClientReportData{
|
err = realTimeWarehousing.Add(realTimeWarehousingData)
|
||||||
Data: data,
|
|
||||||
TableId: kafkaData.TableId,
|
|
||||||
Date: util.Str2Time(kafkaData.ReportTime, util.TimeFormat).Format(util.TimeFormatDay4),
|
|
||||||
}
|
|
||||||
err = clientReportData.CreateIndex()
|
|
||||||
if err != nil {
|
|
||||||
logs.Logger.Error(" clientReportData.CreateIndex", zap.Error(err))
|
|
||||||
}
|
|
||||||
bulkIndexRequest := clientReportData.GetReportData()
|
|
||||||
err = realTimeWarehousing.Add(bulkIndexRequest)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/1340691923/xwl_bi/application"
|
"github.com/1340691923/xwl_bi/application"
|
||||||
@ -69,7 +68,6 @@ func main() {
|
|||||||
application.RegisterInitFnObserver(application.InitLogs),
|
application.RegisterInitFnObserver(application.InitLogs),
|
||||||
application.RegisterInitFnObserver(application.InitMysql),
|
application.RegisterInitFnObserver(application.InitMysql),
|
||||||
application.RegisterInitFnObserver(application.InitClickHouse),
|
application.RegisterInitFnObserver(application.InitClickHouse),
|
||||||
application.RegisterInitFnObserver(application.InitEsClient),
|
|
||||||
application.RegisterInitFnObserver(application.InitRedisPool),
|
application.RegisterInitFnObserver(application.InitRedisPool),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -109,11 +107,9 @@ func main() {
|
|||||||
realTimeDataSarama := sinker.NewKafkaSarama()
|
realTimeDataSarama := sinker.NewKafkaSarama()
|
||||||
reportData2CKSarama := realTimeDataSarama.Clone()
|
reportData2CKSarama := realTimeDataSarama.Clone()
|
||||||
go action.MysqlConsumer()
|
go action.MysqlConsumer()
|
||||||
|
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||||
err = realTimeDataSarama.Init(model.GlobConfig.Comm.Kafka, model.GlobConfig.Comm.Kafka.ReportTopicName, model.GlobConfig.Comm.Kafka.RealTimeDataGroup, func(msg model.InputMessage, markFn func()) {
|
err = realTimeDataSarama.Init(model.GlobConfig.Comm.Kafka, model.GlobConfig.Comm.Kafka.ReportTopicName, model.GlobConfig.Comm.Kafka.RealTimeDataGroup, func(msg model.InputMessage, markFn func()) {
|
||||||
|
|
||||||
//ETL
|
//ETL
|
||||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
|
||||||
var kafkaData model.KafkaData
|
var kafkaData model.KafkaData
|
||||||
err = json.Unmarshal(msg.Value, &kafkaData)
|
err = json.Unmarshal(msg.Value, &kafkaData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -121,34 +117,20 @@ func main() {
|
|||||||
markFn()
|
markFn()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
reportDataTmp := kafkaData.ReqData
|
appid,err := strconv.Atoi(kafkaData.TableId)
|
||||||
reqData, err := json.Marshal(util.Bytes2str(reportDataTmp))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Logger.Error("json.Marshal Err", zap.Error(err))
|
logs.Logger.Error("strconv.Atoi(kafkaData.TableId) Err", zap.Error(err))
|
||||||
markFn()
|
markFn()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
xwlDistinctId := gjson.GetBytes(kafkaData.ReqData, "xwl_distinct_id")
|
|
||||||
|
|
||||||
if xwlDistinctId.String() == "" {
|
|
||||||
logs.Logger.Sugar().Errorf("xwl_distinct_id 为空", util.Bytes2str(kafkaData.ReqData))
|
|
||||||
markFn()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
kafkaData.Offset = msg.Offset
|
|
||||||
|
|
||||||
buff := bytes.Buffer{}
|
|
||||||
buff.WriteString(`{"event_name":"`)
|
|
||||||
buff.WriteString(kafkaData.EventName)
|
|
||||||
buff.WriteString(`","create_time":"`)
|
|
||||||
buff.WriteString(kafkaData.ReportTime)
|
|
||||||
buff.WriteString(`","data":`)
|
|
||||||
buff.WriteString(util.Bytes2str(reqData))
|
|
||||||
buff.WriteString(`}`)
|
|
||||||
addRealTimeData := buff.String()
|
|
||||||
|
|
||||||
//添加实时数据
|
//添加实时数据
|
||||||
err = action.AddRealTimeData(kafkaData, addRealTimeData, realTimeWarehousing)
|
err = realTimeWarehousing.Add(&consumer_data.RealTimeWarehousingData{
|
||||||
|
Appid: int64(appid),
|
||||||
|
EventName: kafkaData.EventName,
|
||||||
|
CreateTime: kafkaData.ReportTime,
|
||||||
|
Data: kafkaData.ReqData,
|
||||||
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Logger.Error("AddRealTimeData err", zap.Error(err))
|
logs.Logger.Error("AddRealTimeData err", zap.Error(err))
|
||||||
}
|
}
|
||||||
@ -162,7 +144,6 @@ func main() {
|
|||||||
|
|
||||||
err = reportData2CKSarama.Init(model.GlobConfig.Comm.Kafka, model.GlobConfig.Comm.Kafka.ReportTopicName, model.GlobConfig.Comm.Kafka.ReportData2CKGroup, func(msg model.InputMessage, markFn func()) {
|
err = reportData2CKSarama.Init(model.GlobConfig.Comm.Kafka, model.GlobConfig.Comm.Kafka.ReportTopicName, model.GlobConfig.Comm.Kafka.ReportData2CKGroup, func(msg model.InputMessage, markFn func()) {
|
||||||
|
|
||||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
|
||||||
var kafkaData model.KafkaData
|
var kafkaData model.KafkaData
|
||||||
err = json.Unmarshal(msg.Value, &kafkaData)
|
err = json.Unmarshal(msg.Value, &kafkaData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -198,7 +179,7 @@ func main() {
|
|||||||
case model.EventReportType:
|
case model.EventReportType:
|
||||||
eventType = "事件属性类型不合法"
|
eventType = "事件属性类型不合法"
|
||||||
}
|
}
|
||||||
reportAcceptStatus.Add(consumer_data.ReportAcceptStatusData{
|
reportAcceptStatus.Add(&consumer_data.ReportAcceptStatusData{
|
||||||
PartDate: kafkaData.ReportTime,
|
PartDate: kafkaData.ReportTime,
|
||||||
TableId: tableId,
|
TableId: tableId,
|
||||||
ReportType: eventType,
|
ReportType: eventType,
|
||||||
@ -230,7 +211,7 @@ func main() {
|
|||||||
serverT := util.Str2Time(kafkaData.ReportTime, util.TimeFormat)
|
serverT := util.Str2Time(kafkaData.ReportTime, util.TimeFormat)
|
||||||
|
|
||||||
if math.Abs(serverT.Sub(clinetT).Minutes()) > 10 {
|
if math.Abs(serverT.Sub(clinetT).Minutes()) > 10 {
|
||||||
reportAcceptStatus.Add(consumer_data.ReportAcceptStatusData{
|
reportAcceptStatus.Add(&consumer_data.ReportAcceptStatusData{
|
||||||
PartDate: kafkaData.ReportTime,
|
PartDate: kafkaData.ReportTime,
|
||||||
TableId: tableId,
|
TableId: tableId,
|
||||||
ReportType: kafkaData.GetReportTypeErr(),
|
ReportType: kafkaData.GetReportTypeErr(),
|
||||||
@ -266,7 +247,7 @@ func main() {
|
|||||||
//新增表结构
|
//新增表结构
|
||||||
if err := action.AddTableColumn(
|
if err := action.AddTableColumn(
|
||||||
kafkaData,
|
kafkaData,
|
||||||
func(data consumer_data.ReportAcceptStatusData) { reportAcceptStatus.Add(data) },
|
func(data consumer_data.ReportAcceptStatusData) { reportAcceptStatus.Add(&data) },
|
||||||
tableName,
|
tableName,
|
||||||
metric,
|
metric,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
@ -281,7 +262,7 @@ func main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//入库成功
|
//入库成功
|
||||||
if err := reportAcceptStatus.Add(consumer_data.ReportAcceptStatusData{
|
if err := reportAcceptStatus.Add(&consumer_data.ReportAcceptStatusData{
|
||||||
PartDate: kafkaData.ReportTime,
|
PartDate: kafkaData.ReportTime,
|
||||||
TableId: tableId,
|
TableId: tableId,
|
||||||
DataName: kafkaData.EventName,
|
DataName: kafkaData.EventName,
|
||||||
|
@ -65,11 +65,6 @@
|
|||||||
"reportData2CKGroup": "reportData2CKGroup2",
|
"reportData2CKGroup": "reportData2CKGroup2",
|
||||||
"realTimeDataGroup": "realTimeDataGroup2"
|
"realTimeDataGroup": "realTimeDataGroup2"
|
||||||
},
|
},
|
||||||
"elasticSearch": {
|
|
||||||
"Addresses":["http://192.168.1.236:9200"],
|
|
||||||
"Username":"",
|
|
||||||
"Password":""
|
|
||||||
},
|
|
||||||
"redis": {
|
"redis": {
|
||||||
"addr":"192.168.1.236:6379",
|
"addr":"192.168.1.236:6379",
|
||||||
"passwd":"",
|
"passwd":"",
|
||||||
|
@ -2,14 +2,18 @@ package controller
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"github.com/1340691923/xwl_bi/engine/db"
|
||||||
|
"github.com/1340691923/xwl_bi/engine/logs"
|
||||||
"github.com/1340691923/xwl_bi/platform-basic-libs/jwt"
|
"github.com/1340691923/xwl_bi/platform-basic-libs/jwt"
|
||||||
"github.com/1340691923/xwl_bi/platform-basic-libs/request"
|
"github.com/1340691923/xwl_bi/platform-basic-libs/request"
|
||||||
"github.com/1340691923/xwl_bi/platform-basic-libs/response"
|
"github.com/1340691923/xwl_bi/platform-basic-libs/response"
|
||||||
"github.com/1340691923/xwl_bi/platform-basic-libs/service/consumer_data"
|
|
||||||
"github.com/1340691923/xwl_bi/platform-basic-libs/service/debug_data"
|
"github.com/1340691923/xwl_bi/platform-basic-libs/service/debug_data"
|
||||||
"github.com/1340691923/xwl_bi/platform-basic-libs/service/realdata"
|
"github.com/1340691923/xwl_bi/platform-basic-libs/service/realdata"
|
||||||
|
"github.com/1340691923/xwl_bi/platform-basic-libs/util"
|
||||||
"github.com/gofiber/fiber/v2"
|
"github.com/gofiber/fiber/v2"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type RealDataController struct {
|
type RealDataController struct {
|
||||||
@ -21,7 +25,7 @@ func (this RealDataController) List(ctx *fiber.Ctx) error {
|
|||||||
|
|
||||||
type ReqData struct {
|
type ReqData struct {
|
||||||
Appid int `json:"appid"`
|
Appid int `json:"appid"`
|
||||||
SearchKw string `json:"search_kw"`
|
SearchKw string `json:"searchKw"`
|
||||||
Date string `json:"date"`
|
Date string `json:"date"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -32,18 +36,42 @@ func (this RealDataController) List(ctx *fiber.Ctx) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
appid := strconv.Itoa(reqData.Appid)
|
appid := strconv.Itoa(reqData.Appid)
|
||||||
searchKw := reqData.SearchKw
|
|
||||||
date := reqData.Date
|
|
||||||
|
|
||||||
clientReportData := consumer_data.ClientReportData{
|
type Res struct {
|
||||||
TableId: appid,
|
CreateTime string `json:"create_time" db:"-"`
|
||||||
|
CreateTimeDb time.Time `json:"-" db:"create_time"`
|
||||||
|
EventName string `json:"event_name" db:"event_name"`
|
||||||
|
ReportData string `json:"report_data" db:"report_data"`
|
||||||
}
|
}
|
||||||
res, err := clientReportData.GetList(ctx.Context(), searchKw, date)
|
|
||||||
if err != nil {
|
filterSql := ""
|
||||||
|
|
||||||
|
date := strings.Split(reqData.Date,",")
|
||||||
|
|
||||||
|
args := []interface{}{appid}
|
||||||
|
|
||||||
|
if len(date) == 2{
|
||||||
|
filterSql = filterSql+ ` and create_time >= toDateTime(?) and create_time <=toDateTime(?) `
|
||||||
|
args = append(args, date[0],date[1])
|
||||||
|
}
|
||||||
|
logs.Logger.Sugar().Infof("reqData.SearchKw",reqData.SearchKw)
|
||||||
|
if strings.TrimSpace(reqData.SearchKw)!="" {
|
||||||
|
filterSql = filterSql+ ` and event_name like '%`+reqData.SearchKw+`%' `
|
||||||
|
}
|
||||||
|
sql := `select report_data,event_name,create_time as create_time from xwl_real_time_warehousing where table_id = ? `+filterSql+` order by create_time desc limit 0,1000;`
|
||||||
|
logs.Logger.Sugar().Infof("sql",sql,args)
|
||||||
|
var res []Res
|
||||||
|
err := db.ClickHouseSqlx.Select(&res,sql,
|
||||||
|
args...,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
return this.Error(ctx, err)
|
return this.Error(ctx, err)
|
||||||
}
|
}
|
||||||
|
for index:= range res{
|
||||||
|
res[index].CreateTime = res[index].CreateTimeDb.Format(util.TimeFormat)
|
||||||
|
}
|
||||||
|
|
||||||
return this.Success(ctx, response.SearchSuccess, map[string]interface{}{"list": res.Hits.Hits})
|
return this.Success(ctx, response.SearchSuccess, map[string]interface{}{"list": res})
|
||||||
}
|
}
|
||||||
|
|
||||||
//错误数据列表
|
//错误数据列表
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
package db
|
|
||||||
|
|
||||||
import "github.com/olivere/elastic"
|
|
||||||
|
|
||||||
var EsClient *elastic.Client
|
|
||||||
|
|
||||||
func NewEsClient(address []string, username, password string) (esClient *elastic.Client, err error) {
|
|
||||||
optList := []elastic.ClientOptionFunc{elastic.SetSniff(false)}
|
|
||||||
|
|
||||||
optList = append(optList, elastic.SetURL(address...))
|
|
||||||
|
|
||||||
if username != "" || password != "" {
|
|
||||||
optList = append(optList, elastic.SetBasicAuth(username, password))
|
|
||||||
}
|
|
||||||
|
|
||||||
esClient, err = elastic.NewSimpleClient(optList...)
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
@ -22,7 +22,6 @@ type Config struct {
|
|||||||
Mysql MysqlConfig `json:"mysql"`
|
Mysql MysqlConfig `json:"mysql"`
|
||||||
ClickHouse ClickHouseConfig `json:"clickhouse"`
|
ClickHouse ClickHouseConfig `json:"clickhouse"`
|
||||||
Kafka KafkaCfg `json:"kafka"`
|
Kafka KafkaCfg `json:"kafka"`
|
||||||
ElasticSearch EsConfig `json:"elasticSearch"`
|
|
||||||
Redis RedisConfig `json:"redis"`
|
Redis RedisConfig `json:"redis"`
|
||||||
} `json:"comm"`
|
} `json:"comm"`
|
||||||
}
|
}
|
||||||
@ -50,12 +49,6 @@ type RedisConfig struct {
|
|||||||
MaxActive int `json:"maxActive"`
|
MaxActive int `json:"maxActive"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type EsConfig struct {
|
|
||||||
Addresses []string `json:"addresses"`
|
|
||||||
Username string `json:"username"`
|
|
||||||
Password string `json:"password"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ClickHouseConfig struct {
|
type ClickHouseConfig struct {
|
||||||
Username string `json:"username"`
|
Username string `json:"username"`
|
||||||
Pwd string `json:"pwd"`
|
Pwd string `json:"pwd"`
|
||||||
|
@ -1,118 +0,0 @@
|
|||||||
package consumer_data
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"github.com/1340691923/xwl_bi/engine/db"
|
|
||||||
"github.com/1340691923/xwl_bi/engine/logs"
|
|
||||||
"github.com/olivere/elastic"
|
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ClientReportData struct {
|
|
||||||
Data string
|
|
||||||
TableId string
|
|
||||||
Date string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *ClientReportData) Name() string {
|
|
||||||
return "client_report_data" + this.TableId
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *ClientReportData) GetReportData() *elastic.BulkIndexRequest {
|
|
||||||
return elastic.NewBulkIndexRequest().Index(this.CreateReportName()).Type(this.getTyp()).Doc(this.Data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *ClientReportData) CreateIndex() (err error) {
|
|
||||||
|
|
||||||
indexName := this.CreateReportName()
|
|
||||||
indexExists, err := db.EsClient.IndexExists(indexName).Do(context.Background())
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if !indexExists {
|
|
||||||
db.EsClient.CreateIndex(indexName).Body(this.createIndexStr()).Do(context.Background())
|
|
||||||
|
|
||||||
_, err = db.EsClient.Alias().Add(indexName, this.GetAliasName()).Do(context.Background())
|
|
||||||
if err != nil {
|
|
||||||
logs.Logger.Error("别名创建失败", zap.Error(err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
//type
|
|
||||||
func (this *ClientReportData) getTyp() string {
|
|
||||||
return "_doc"
|
|
||||||
}
|
|
||||||
|
|
||||||
//index
|
|
||||||
func (this *ClientReportData) GetAliasName() string {
|
|
||||||
return fmt.Sprintf("%s%s", this.Name(), "_index")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *ClientReportData) CreateReportName() string {
|
|
||||||
return fmt.Sprintf("%v_%v", this.Name(), this.Date)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (this *ClientReportData) GetList(ctx context.Context, searchKw, date string) (*elastic.SearchResult, error) {
|
|
||||||
|
|
||||||
search := db.EsClient.Search(this.GetAliasName())
|
|
||||||
|
|
||||||
dateArr := strings.Split(date, ",")
|
|
||||||
|
|
||||||
q := elastic.NewBoolQuery()
|
|
||||||
|
|
||||||
if len(dateArr) == 2 {
|
|
||||||
q = q.Must(elastic.NewRangeQuery("create_time").
|
|
||||||
Gte(dateArr[0]).
|
|
||||||
Lte(dateArr[1]).IncludeLower(false).IncludeUpper(false))
|
|
||||||
}
|
|
||||||
|
|
||||||
if searchKw != "" {
|
|
||||||
q = q.Must(elastic.NewMatchQuery("data", searchKw))
|
|
||||||
highlight := elastic.NewHighlight().Field("data").PreTags("<b style='color:red'>").PostTags("</b>").NumOfFragments(0)
|
|
||||||
search = search.Highlight(highlight)
|
|
||||||
}
|
|
||||||
|
|
||||||
return search.Query(q).Sort("create_time", false).From(0).Size(1000).Do(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
//创建索引字符串
|
|
||||||
func (this *ClientReportData) createIndexStr() string {
|
|
||||||
s := `
|
|
||||||
{
|
|
||||||
"settings": {
|
|
||||||
"number_of_replicas": 0,
|
|
||||||
"number_of_shards": 1
|
|
||||||
},
|
|
||||||
"mappings" : {
|
|
||||||
"_doc" : {
|
|
||||||
"dynamic" : "false",
|
|
||||||
"properties": {
|
|
||||||
"event_name": {
|
|
||||||
"type": "keyword"
|
|
||||||
},
|
|
||||||
"create_time": {
|
|
||||||
"format": "yyyy-MM-dd HH:mm:ss",
|
|
||||||
"type": "date"
|
|
||||||
},
|
|
||||||
"data": {
|
|
||||||
"type": "text",
|
|
||||||
"analyzer":"english",
|
|
||||||
"fields": {
|
|
||||||
"keyword": {
|
|
||||||
"type": "keyword"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`
|
|
||||||
return s
|
|
||||||
}
|
|
@ -1,18 +1,23 @@
|
|||||||
package consumer_data
|
package consumer_data
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"github.com/1340691923/xwl_bi/engine/db"
|
"github.com/1340691923/xwl_bi/engine/db"
|
||||||
"github.com/1340691923/xwl_bi/engine/logs"
|
"github.com/1340691923/xwl_bi/engine/logs"
|
||||||
jsoniter "github.com/json-iterator/go"
|
"github.com/1340691923/xwl_bi/platform-basic-libs/util"
|
||||||
"github.com/olivere/elastic"
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type RealTimeWarehousingData struct {
|
||||||
|
Appid int64
|
||||||
|
EventName string
|
||||||
|
CreateTime string
|
||||||
|
Data []byte
|
||||||
|
}
|
||||||
|
|
||||||
type RealTimeWarehousing struct {
|
type RealTimeWarehousing struct {
|
||||||
buffer []*elastic.BulkIndexRequest
|
buffer []*RealTimeWarehousingData
|
||||||
bufferMutex *sync.RWMutex
|
bufferMutex *sync.RWMutex
|
||||||
batchSize int
|
batchSize int
|
||||||
flushInterval int
|
flushInterval int
|
||||||
@ -20,7 +25,7 @@ type RealTimeWarehousing struct {
|
|||||||
|
|
||||||
func NewRealTimeWarehousing(batchSize, flushInterval int) *RealTimeWarehousing {
|
func NewRealTimeWarehousing(batchSize, flushInterval int) *RealTimeWarehousing {
|
||||||
realTimeWarehousing := &RealTimeWarehousing{
|
realTimeWarehousing := &RealTimeWarehousing{
|
||||||
buffer: make([]*elastic.BulkIndexRequest, 0, batchSize),
|
buffer: make([]*RealTimeWarehousingData, 0, batchSize),
|
||||||
bufferMutex: new(sync.RWMutex),
|
bufferMutex: new(sync.RWMutex),
|
||||||
batchSize: batchSize,
|
batchSize: batchSize,
|
||||||
flushInterval: flushInterval,
|
flushInterval: flushInterval,
|
||||||
@ -37,39 +42,47 @@ func (this *RealTimeWarehousing) Flush() (err error) {
|
|||||||
this.bufferMutex.Lock()
|
this.bufferMutex.Lock()
|
||||||
if len(this.buffer) > 0 {
|
if len(this.buffer) > 0 {
|
||||||
startNow := time.Now()
|
startNow := time.Now()
|
||||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
|
||||||
bulkRequest := db.EsClient.Bulk()
|
tx, err := db.ClickHouseSqlx.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
stmt, err := tx.Prepare("INSERT INTO xwl_real_time_warehousing (table_id,event_name,create_time, report_data) VALUES (?,?,?)")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
for _, buffer := range this.buffer {
|
for _, buffer := range this.buffer {
|
||||||
bulkRequest.Add(buffer)
|
if _, err := stmt.Exec(
|
||||||
}
|
buffer.Appid,
|
||||||
res, err := bulkRequest.Do(context.Background())
|
buffer.EventName,
|
||||||
|
buffer.CreateTime,
|
||||||
if err != nil {
|
util.Bytes2str(buffer.Data),
|
||||||
logs.Logger.Error("ES出现错误,休息10秒钟继续", zap.Error(err))
|
); err != nil {
|
||||||
time.Sleep(time.Second * 10)
|
stmt.Close()
|
||||||
this.Flush()
|
return err
|
||||||
} else {
|
|
||||||
if res.Errors {
|
|
||||||
resStr, _ := json.MarshalToString(res)
|
|
||||||
logs.Logger.Error("ES出现错误", zap.String("res", resStr))
|
|
||||||
} else {
|
|
||||||
lostTime := time.Now().Sub(startNow).String()
|
|
||||||
len := len(this.buffer)
|
|
||||||
if len > 0 {
|
|
||||||
logs.Logger.Info("ES入库成功", zap.String("所花时间", lostTime), zap.Int("数据长度为", len))
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.buffer = make([]*elastic.BulkIndexRequest, 0, this.batchSize)
|
if err := tx.Commit(); err != nil {
|
||||||
|
logs.Logger.Error("入库数据状态出现错误", zap.Error(err))
|
||||||
|
} else {
|
||||||
|
lostTime := time.Now().Sub(startNow).String()
|
||||||
|
len := len(this.buffer)
|
||||||
|
if len > 0 {
|
||||||
|
logs.Logger.Info("入库数据状态成功", zap.String("所花时间", lostTime), zap.Int("数据长度为", len))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stmt.Close()
|
||||||
|
|
||||||
|
this.buffer = make([]*RealTimeWarehousingData, 0, this.batchSize)
|
||||||
}
|
}
|
||||||
this.bufferMutex.Unlock()
|
this.bufferMutex.Unlock()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *RealTimeWarehousing) Add(data *elastic.BulkIndexRequest) (err error) {
|
func (this *RealTimeWarehousing) Add(data *RealTimeWarehousingData) (err error) {
|
||||||
this.bufferMutex.Lock()
|
this.bufferMutex.Lock()
|
||||||
this.buffer = append(this.buffer, data)
|
this.buffer = append(this.buffer, data)
|
||||||
this.bufferMutex.Unlock()
|
this.bufferMutex.Unlock()
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"github.com/1340691923/xwl_bi/engine/db"
|
"github.com/1340691923/xwl_bi/engine/db"
|
||||||
"github.com/1340691923/xwl_bi/engine/logs"
|
"github.com/1340691923/xwl_bi/engine/logs"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
"log"
|
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
@ -22,7 +21,7 @@ type ReportAcceptStatusData struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ReportAcceptStatus struct {
|
type ReportAcceptStatus struct {
|
||||||
buffer []ReportAcceptStatusData
|
buffer []*ReportAcceptStatusData
|
||||||
bufferMutex *sync.RWMutex
|
bufferMutex *sync.RWMutex
|
||||||
batchSize int
|
batchSize int
|
||||||
flushInterval int
|
flushInterval int
|
||||||
@ -33,7 +32,7 @@ const SuccessStatus = 1
|
|||||||
|
|
||||||
func NewReportAcceptStatus(batchSize int, flushInterval int) *ReportAcceptStatus {
|
func NewReportAcceptStatus(batchSize int, flushInterval int) *ReportAcceptStatus {
|
||||||
reportAcceptStatus := &ReportAcceptStatus{
|
reportAcceptStatus := &ReportAcceptStatus{
|
||||||
buffer: make([]ReportAcceptStatusData, 0, batchSize),
|
buffer: make([]*ReportAcceptStatusData, 0, batchSize),
|
||||||
bufferMutex: new(sync.RWMutex),
|
bufferMutex: new(sync.RWMutex),
|
||||||
batchSize: batchSize,
|
batchSize: batchSize,
|
||||||
flushInterval: flushInterval,
|
flushInterval: flushInterval,
|
||||||
@ -58,16 +57,14 @@ func (this *ReportAcceptStatus) Flush() (err error) {
|
|||||||
|
|
||||||
tx, err := db.ClickHouseSqlx.Begin()
|
tx, err := db.ClickHouseSqlx.Begin()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt, err := tx.Prepare("INSERT INTO xwl_acceptance_status (status,part_date, table_id,report_type, data_name, error_reason, error_handling, report_data, xwl_kafka_offset) VALUES (?,?,?, ?, ?, ?, ?, ?, ?, ?, ?)")
|
stmt, err := tx.Prepare("INSERT INTO xwl_acceptance_status (status,part_date, table_id,report_type, data_name, error_reason, error_handling, report_data, xwl_kafka_offset) VALUES (?,?,?, ?, ?, ?, ?, ?, ?, ?, ?)")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer stmt.Close()
|
|
||||||
|
|
||||||
for _, buffer := range this.buffer {
|
for _, buffer := range this.buffer {
|
||||||
if _, err := stmt.Exec(
|
if _, err := stmt.Exec(
|
||||||
buffer.Status,
|
buffer.Status,
|
||||||
@ -80,27 +77,27 @@ func (this *ReportAcceptStatus) Flush() (err error) {
|
|||||||
buffer.ReportData,
|
buffer.ReportData,
|
||||||
buffer.XwlKafkaOffset,
|
buffer.XwlKafkaOffset,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
log.Fatal(err)
|
stmt.Close()
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := tx.Commit(); err != nil {
|
if err := tx.Commit(); err != nil {
|
||||||
logs.Logger.Error("入库数据状态出现错误", zap.Error(err))
|
logs.Logger.Error("入库数据状态出现错误", zap.Error(err))
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
lostTime := time.Now().Sub(startNow).String()
|
lostTime := time.Now().Sub(startNow).String()
|
||||||
len := len(this.buffer)
|
len := len(this.buffer)
|
||||||
if len > 0 {
|
if len > 0 {
|
||||||
logs.Logger.Info("入库数据状态成功", zap.String("所花时间", lostTime), zap.Int("数据长度为", len))
|
logs.Logger.Info("入库数据状态成功", zap.String("所花时间", lostTime), zap.Int("数据长度为", len))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
stmt.Close()
|
||||||
this.buffer = make([]ReportAcceptStatusData, 0, this.batchSize)
|
this.buffer = make([]*ReportAcceptStatusData, 0, this.batchSize)
|
||||||
this.bufferMutex.Unlock()
|
this.bufferMutex.Unlock()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *ReportAcceptStatus) Add(data ReportAcceptStatusData) (err error) {
|
func (this *ReportAcceptStatus) Add(data *ReportAcceptStatusData) (err error) {
|
||||||
this.bufferMutex.Lock()
|
this.bufferMutex.Lock()
|
||||||
this.buffer = append(this.buffer, data)
|
this.buffer = append(this.buffer, data)
|
||||||
this.bufferMutex.Unlock()
|
this.bufferMutex.Unlock()
|
||||||
|
@ -59,8 +59,6 @@ func (this *UserReport) InflowOfKakfa() (err error) {
|
|||||||
msg.Timestamp = time.Now()
|
msg.Timestamp = time.Now()
|
||||||
|
|
||||||
return sendMsg(msg)
|
return sendMsg(msg)
|
||||||
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *UserReport) Put() {
|
func (this *UserReport) Put() {
|
||||||
|
@ -238,22 +238,19 @@ export default {
|
|||||||
let list = []
|
let list = []
|
||||||
let index = 0
|
let index = 0
|
||||||
for (const v of res.data.list) {
|
for (const v of res.data.list) {
|
||||||
const _source = v['_source']
|
const _source = {}
|
||||||
_source['dataFormat'] = JSON.stringify(JSON.parse(v['_source']['data']), null, '\t')
|
|
||||||
|
|
||||||
if (v.hasOwnProperty('highlight')) {
|
_source['dataFormat'] = JSON.stringify(JSON.parse(v["report_data"]), null, '\t')
|
||||||
_source['data'] = v['highlight']['data'][0]
|
_source['event_name'] = v["event_name"]
|
||||||
} else {
|
_source['create_time'] = v["create_time"]
|
||||||
_source['data'] = v['_source']['data']
|
_source['data'] = v["report_data"]
|
||||||
}
|
|
||||||
_source['isFormatData'] = false
|
_source['isFormatData'] = false
|
||||||
_source['index'] = index
|
_source['index'] = index
|
||||||
|
|
||||||
list.push(_source)
|
list.push(_source)
|
||||||
index++
|
index++
|
||||||
}
|
}
|
||||||
list = filterData(list, this.input.trim())
|
list = filterData(list, this.input.trim())
|
||||||
|
console.log("list",list)
|
||||||
this.total = list.length
|
this.total = list.length
|
||||||
this.list = list
|
this.list = list
|
||||||
this.trueList = list
|
this.trueList = list
|
||||||
|
Loading…
x
Reference in New Issue
Block a user