This commit is contained in:
1340691923@qq.com 2022-03-10 10:37:56 +08:00
parent f85275068d
commit 8ef9ba1d10
4 changed files with 36 additions and 24 deletions

View File

@ -104,9 +104,11 @@ func main() {
log.Println(fmt.Sprintf("sinker 服务启动成功,性能检测入口为: http://127.0.0.1:%v", model.GlobConfig.Sinker.PprofHttpPort)) log.Println(fmt.Sprintf("sinker 服务启动成功,性能检测入口为: http://127.0.0.1:%v", model.GlobConfig.Sinker.PprofHttpPort))
realTimeWarehousing := consumer_data.NewRealTimeWarehousing(model.GlobConfig.Sinker.RealTimeWarehousing.BufferSize, model.GlobConfig.Sinker.RealTimeWarehousing.FlushInterval) sinkerC := model.GlobConfig.Sinker
reportAcceptStatus := consumer_data.NewReportAcceptStatus(model.GlobConfig.Sinker.ReportAcceptStatus.BufferSize, model.GlobConfig.Sinker.ReportAcceptStatus.FlushInterval)
reportData2CK := consumer_data.NewReportData2CK(model.GlobConfig.Sinker.ReportData2CK.BufferSize, model.GlobConfig.Sinker.ReportData2CK.FlushInterval) realTimeWarehousing := consumer_data.NewRealTimeWarehousing(sinkerC.RealTimeWarehousing)
reportAcceptStatus := consumer_data.NewReportAcceptStatus(sinkerC.ReportAcceptStatus)
reportData2CK := consumer_data.NewReportData2CK(sinkerC.ReportData2CK)
realTimeDataSarama := sinker.NewKafkaSarama() realTimeDataSarama := sinker.NewKafkaSarama()
reportData2CKSarama := realTimeDataSarama.Clone() reportData2CKSarama := realTimeDataSarama.Clone()
@ -114,8 +116,11 @@ func main() {
go sinker.ClearDimsCacheByTime(time.Minute * 30) go sinker.ClearDimsCacheByTime(time.Minute * 30)
var json = jsoniter.ConfigCompatibleWithStandardLibrary var json = jsoniter.ConfigCompatibleWithStandardLibrary
err = realTimeDataSarama.Init(model.GlobConfig.Comm.Kafka, model.GlobConfig.Comm.Kafka.ReportTopicName, model.GlobConfig.Comm.Kafka.RealTimeDataGroup, func(msg model.InputMessage, markFn func()) { err = realTimeDataSarama.Init(
model.GlobConfig.Comm.Kafka,
model.GlobConfig.Comm.Kafka.ReportTopicName,
model.GlobConfig.Comm.Kafka.RealTimeDataGroup,
func(msg model.InputMessage, markFn func()) {
//ETL //ETL
var kafkaData model.KafkaData var kafkaData model.KafkaData
err = json.Unmarshal(msg.Value, &kafkaData) err = json.Unmarshal(msg.Value, &kafkaData)
@ -154,7 +159,11 @@ func main() {
panic(err) panic(err)
} }
err = reportData2CKSarama.Init(model.GlobConfig.Comm.Kafka, model.GlobConfig.Comm.Kafka.ReportTopicName, model.GlobConfig.Comm.Kafka.ReportData2CKGroup, func(msg model.InputMessage, markFn func()) { err = reportData2CKSarama.Init(
model.GlobConfig.Comm.Kafka,
model.GlobConfig.Comm.Kafka.ReportTopicName,
model.GlobConfig.Comm.Kafka.ReportData2CKGroup,
func(msg model.InputMessage, markFn func()) {
var kafkaData model.KafkaData var kafkaData model.KafkaData
err = json.Unmarshal(msg.Value, &kafkaData) err = json.Unmarshal(msg.Value, &kafkaData)

View File

@ -3,6 +3,7 @@ package consumer_data
import ( import (
"github.com/1340691923/xwl_bi/engine/db" "github.com/1340691923/xwl_bi/engine/db"
"github.com/1340691923/xwl_bi/engine/logs" "github.com/1340691923/xwl_bi/engine/logs"
"github.com/1340691923/xwl_bi/model"
"github.com/1340691923/xwl_bi/platform-basic-libs/util" "github.com/1340691923/xwl_bi/platform-basic-libs/util"
"go.uber.org/zap" "go.uber.org/zap"
"sync" "sync"
@ -23,16 +24,16 @@ type RealTimeWarehousing struct {
flushInterval int flushInterval int
} }
func NewRealTimeWarehousing(batchSize, flushInterval int) *RealTimeWarehousing { func NewRealTimeWarehousing(config model.BatchConfig) *RealTimeWarehousing {
logs.Logger.Info("NewRealTimeWarehousing", zap.Int("batchSize", batchSize), zap.Int("flushInterval", flushInterval)) logs.Logger.Info("NewRealTimeWarehousing", zap.Int("batchSize", config.BufferSize), zap.Int("flushInterval", config.FlushInterval))
realTimeWarehousing := &RealTimeWarehousing{ realTimeWarehousing := &RealTimeWarehousing{
buffer: make([]*RealTimeWarehousingData, 0, batchSize), buffer: make([]*RealTimeWarehousingData, 0, config.BufferSize),
bufferMutex: new(sync.RWMutex), bufferMutex: new(sync.RWMutex),
batchSize: batchSize, batchSize: config.BufferSize,
flushInterval: flushInterval, flushInterval: config.FlushInterval,
} }
if flushInterval > 0 { if config.FlushInterval > 0 {
realTimeWarehousing.RegularFlushing() realTimeWarehousing.RegularFlushing()
} }

View File

@ -3,6 +3,7 @@ package consumer_data
import ( import (
"github.com/1340691923/xwl_bi/engine/db" "github.com/1340691923/xwl_bi/engine/db"
"github.com/1340691923/xwl_bi/engine/logs" "github.com/1340691923/xwl_bi/engine/logs"
"github.com/1340691923/xwl_bi/model"
"go.uber.org/zap" "go.uber.org/zap"
"sync" "sync"
"time" "time"
@ -32,16 +33,16 @@ const (
SuccessStatus = 1 SuccessStatus = 1
) )
func NewReportAcceptStatus(batchSize int, flushInterval int) *ReportAcceptStatus { func NewReportAcceptStatus(config model.BatchConfig) *ReportAcceptStatus {
logs.Logger.Info("NewReportAcceptStatus", zap.Int("batchSize", batchSize), zap.Int("flushInterval", flushInterval)) logs.Logger.Info("NewReportAcceptStatus", zap.Int("batchSize", config.BufferSize), zap.Int("flushInterval", config.FlushInterval))
reportAcceptStatus := &ReportAcceptStatus{ reportAcceptStatus := &ReportAcceptStatus{
buffer: make([]*ReportAcceptStatusData, 0, batchSize), buffer: make([]*ReportAcceptStatusData, 0, config.BufferSize),
bufferMutex: new(sync.RWMutex), bufferMutex: new(sync.RWMutex),
batchSize: batchSize, batchSize: config.BufferSize,
flushInterval: flushInterval, flushInterval: config.FlushInterval,
} }
if flushInterval > 0 { if config.FlushInterval > 0 {
reportAcceptStatus.RegularFlushing() reportAcceptStatus.RegularFlushing()
} }

View File

@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"github.com/1340691923/xwl_bi/engine/db" "github.com/1340691923/xwl_bi/engine/db"
"github.com/1340691923/xwl_bi/engine/logs" "github.com/1340691923/xwl_bi/engine/logs"
"github.com/1340691923/xwl_bi/model"
model2 "github.com/1340691923/xwl_bi/platform-basic-libs/sinker/model" model2 "github.com/1340691923/xwl_bi/platform-basic-libs/sinker/model"
parser "github.com/1340691923/xwl_bi/platform-basic-libs/sinker/parse" parser "github.com/1340691923/xwl_bi/platform-basic-libs/sinker/parse"
"go.uber.org/zap" "go.uber.org/zap"
@ -21,15 +22,15 @@ type ReportData2CK struct {
flushInterval int flushInterval int
} }
func NewReportData2CK(batchSize int, flushInterval int) *ReportData2CK { func NewReportData2CK(config model.BatchConfig) *ReportData2CK {
logs.Logger.Info("NewReportData2CK", zap.Int("batchSize", batchSize), zap.Int("flushInterval", flushInterval)) logs.Logger.Info("NewReportData2CK", zap.Int("batchSize", config.BufferSize), zap.Int("flushInterval", config.FlushInterval))
reportData2CK := &ReportData2CK{ reportData2CK := &ReportData2CK{
buffer: make([]map[string]*parser.FastjsonMetric, 0, batchSize), buffer: make([]map[string]*parser.FastjsonMetric, 0, config.BufferSize),
bufferMutex: new(sync.RWMutex), bufferMutex: new(sync.RWMutex),
batchSize: batchSize, batchSize: config.BufferSize,
flushInterval: flushInterval, flushInterval: config.FlushInterval,
} }
if flushInterval > 0 { if config.FlushInterval > 0 {
reportData2CK.RegularFlushing() reportData2CK.RegularFlushing()
} }