|
- package models
-
- import (
- "encoding/json"
- "fmt"
- "sort"
- "strconv"
- "strings"
- "time"
-
- "code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/timeutil"
- "xorm.io/builder"
- "xorm.io/xorm"
- )
-
- const (
- PAGE_SIZE = 2000
- BATCH_INSERT_SIZE = 50
- )
-
- type UserBusinessAnalysisAll struct {
- ID int64 `xorm:"pk"`
-
- CountDate int64 `xorm:"pk"`
-
- //action :ActionMergePullRequest // 11
- CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //action :ActionCommitRepo // 5
- CommitCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //action :ActionCreateIssue // 10
- IssueCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //comment table current date
- CommentCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //watch table current date
- FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //star table current date
- StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //follow table
- WatchedCount int `xorm:"NOT NULL DEFAULT 0"`
-
- // user table
- GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`
-
- //
- CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`
-
- //attachement table
- CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`
-
- //0
- CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //issue, issueassignees
- SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //baike
- EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //user
- RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`
-
- //repo
- CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //login count, from elk
- LoginCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //openi index
- OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`
-
- //user
- Email string `xorm:"NOT NULL"`
-
- //user
- Name string `xorm:"NOT NULL"`
-
- DataDate string `xorm:"NULL"`
-
- //cloudbraintask
- CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
- GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
- NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
- GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
- NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
- NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
- GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
- CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
- CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
- UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
- UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
-
- UserLocation string `xorm:"NULL"`
-
- FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
- CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
- CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
- RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
- CollectImage int `xorm:"NOT NULL DEFAULT 0"`
- CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
- RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
-
- Phone string `xorm:"NULL"`
- }
-
- type UserBusinessAnalysis struct {
- ID int64 `xorm:"pk"`
- DataDate string `xorm:"pk"`
- CountDate int64 `xorm:"NULL"`
-
- //action :ActionMergePullRequest // 11
- CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //action :ActionCommitRepo // 5
- CommitCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //action :ActionCreateIssue // 6
- IssueCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //comment table current date
- CommentCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //watch table current date
- FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //star table current date
- StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //follow table
- WatchedCount int `xorm:"NOT NULL DEFAULT 0"`
-
- // user table
- GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`
-
- //
- CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`
-
- //attachement table
- CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`
-
- //0
- CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //issue, issueassignees
- SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //baike
- EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //user
- RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`
-
- //repo
- CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //login count, from elk
- LoginCount int `xorm:"NOT NULL DEFAULT 0"`
-
- //openi index
- OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`
-
- //user
- Email string `xorm:"NOT NULL"`
-
- //user
- Name string `xorm:"NOT NULL"`
-
- CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
- GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
- NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
- GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
- NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
- NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
- GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
- CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
- CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
- UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
- UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
-
- UserLocation string `xorm:"NULL"`
-
- FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
- CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
- CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
- RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
- CollectImage int `xorm:"NOT NULL DEFAULT 0"`
- CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
- RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
-
- Phone string `xorm:"NULL"`
- }
-
- type UserBusinessAnalysisQueryOptions struct {
- ListOptions
- UserName string
- SortType string
- StartTime int64
- EndTime int64
- IsAll bool
- }
-
- type UserBusinessAnalysisList []*UserBusinessAnalysis
-
- func (ulist UserBusinessAnalysisList) Swap(i, j int) { ulist[i], ulist[j] = ulist[j], ulist[i] }
- func (ulist UserBusinessAnalysisList) Len() int { return len(ulist) }
- func (ulist UserBusinessAnalysisList) Less(i, j int) bool {
- return ulist[i].ID > ulist[j].ID
- }
-
- func getLastCountDate() int64 {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
- statictisSess.Limit(1, 0)
- userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
- if err := statictisSess.Table("user_business_analysis").OrderBy("count_date desc").Limit(1, 0).
- Find(&userBusinessAnalysisList); err == nil {
- for _, userRecord := range userBusinessAnalysisList {
- return userRecord.CountDate - 10000
- }
- } else {
- log.Info("query error." + err.Error())
- }
- currentTimeNow := time.Now()
- pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location())
- return pageStartTime.Unix()
- }
-
- func QueryMetricsPage(start int64, end int64) ([]*UserMetrics, int64) {
-
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
- cond := "count_date >" + fmt.Sprint(start) + " and count_date<" + fmt.Sprint(end)
-
- userMetricsList := make([]*UserMetrics, 0)
- //.Limit(pageSize, page*pageSize)
- if err := statictisSess.Table(new(UserMetrics)).Where(cond).OrderBy("count_date desc").
- Find(&userMetricsList); err != nil {
- return nil, 0
- }
- postUserMetricsList := postDeal(userMetricsList)
- return postUserMetricsList, int64(len(postUserMetricsList))
- }
-
- func QueryMetrics(start int64, end int64) ([]*UserMetrics, int) {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
- userMetricsList := make([]*UserMetrics, 0)
- if err := statictisSess.Table(new(UserMetrics)).Where("count_date >" + fmt.Sprint(start) + " and count_date<" + fmt.Sprint(end)).OrderBy("count_date desc").
- Find(&userMetricsList); err != nil {
- return nil, 0
- }
- postUserMetricsList := postDeal(userMetricsList)
- return postUserMetricsList, int(len(postUserMetricsList))
- }
-
- func duplicateRemoval(userMetricsList []*UserMetrics) []*UserMetrics {
- userMetricsResult := make([]*UserMetrics, 0)
- for i := 0; i < len(userMetricsList); i++ {
- if i > 0 {
- if userMetricsList[i].DataDate == userMetricsList[i-1].DataDate {
- continue
- }
- }
- userMetricsResult = append(userMetricsResult, userMetricsList[i])
- }
- return userMetricsResult
- }
-
- func postDeal(userMetricsList []*UserMetrics) []*UserMetrics {
- duplicateRemovalUserMetricsList := duplicateRemoval(userMetricsList)
- for _, userMetrics := range duplicateRemovalUserMetricsList {
- userMetrics.DisplayDate = userMetrics.DataDate
- userMetrics.TotalRegistUser = userMetrics.ActivateRegistUser + userMetrics.NotActivateRegistUser
- userMetrics.TotalNotActivateRegistUser = userMetrics.TotalUser - userMetrics.TotalActivateRegistUser
- }
- return duplicateRemovalUserMetricsList
- }
-
- func QueryMetricsForAll(start int64, end int64) []*UserMetrics {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
- userMetricsList := make([]*UserMetrics, 0)
- if err := statictisSess.Table(new(UserMetrics)).Where("count_date >" + fmt.Sprint(start) + " and count_date<" + fmt.Sprint(end)).OrderBy("count_date desc").
- Find(&userMetricsList); err != nil {
- return nil
- }
- duplicateRemovalUserMetricsList := duplicateRemoval(userMetricsList)
- return makeResultForMonth(duplicateRemovalUserMetricsList, len(duplicateRemovalUserMetricsList))
- }
-
- func QueryMetricsForYear(start int64, end int64) []*UserMetrics {
-
- allUserInfo, count := QueryMetrics(start, end)
-
- return makeResultForMonth(allUserInfo, count)
- }
-
- func makeResultForMonth(allUserInfo []*UserMetrics, count int) []*UserMetrics {
- monthMap := make(map[string]*UserMetrics)
- if count > 0 {
- for _, userMetrics := range allUserInfo {
- dateTime := time.Unix(userMetrics.CountDate, 0)
- month := fmt.Sprint(dateTime.Year()) + "-" + fmt.Sprint(int(dateTime.Month()))
- if _, ok := monthMap[month]; !ok {
- monthUserMetrics := &UserMetrics{
- DisplayDate: month,
- ActivateRegistUser: userMetrics.ActivateRegistUser,
- NotActivateRegistUser: userMetrics.NotActivateRegistUser,
- TotalUser: userMetrics.TotalUser,
- TotalNotActivateRegistUser: userMetrics.TotalUser - userMetrics.TotalActivateRegistUser,
- TotalActivateRegistUser: userMetrics.TotalActivateRegistUser,
- TotalHasActivityUser: userMetrics.TotalHasActivityUser,
- HasActivityUser: userMetrics.HasActivityUser,
- DaysForMonth: 1,
- TotalRegistUser: userMetrics.ActivateRegistUser + userMetrics.NotActivateRegistUser,
- }
- monthMap[month] = monthUserMetrics
- } else {
- value := monthMap[month]
- value.ActivateRegistUser += userMetrics.ActivateRegistUser
- value.NotActivateRegistUser += userMetrics.NotActivateRegistUser
- value.HasActivityUser += userMetrics.HasActivityUser
- value.TotalRegistUser += userMetrics.ActivateRegistUser + userMetrics.NotActivateRegistUser
- value.ActivateIndex = float64(value.ActivateRegistUser) / float64(value.TotalRegistUser)
- value.DaysForMonth += 1
- }
- }
- }
- result := make([]*UserMetrics, 0)
- for _, value := range monthMap {
- result = append(result, value)
- }
- sort.Slice(result, func(i, j int) bool {
- return strings.Compare(result[i].DisplayDate, result[j].DisplayDate) > 0
- })
- return result
- }
-
- func QueryRankList(key string, tableName string, limit int) ([]*UserBusinessAnalysisAll, int64) {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- userBusinessAnalysisAllList := make([]*UserBusinessAnalysisAll, 0)
- if err := statictisSess.Table(tableName).OrderBy(key+" desc,id desc").Limit(limit, 0).
- Find(&userBusinessAnalysisAllList); err != nil {
- return nil, 0
- }
- return userBusinessAnalysisAllList, int64(len(userBusinessAnalysisAllList))
- }
-
- func QueryUserStaticDataByTableName(start int, pageSize int, tableName string, queryObj interface{}, userName string) ([]*UserBusinessAnalysisAll, int64) {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
- var cond = builder.NewCond()
- if len(userName) > 0 {
- cond = cond.And(
- builder.Like{"lower(name)", strings.ToLower(userName)},
- )
- }
- allCount, err := statictisSess.Where(cond).Count(queryObj)
- if err != nil {
- log.Info("query error." + err.Error())
- return nil, 0
- }
- log.Info("query return total:" + fmt.Sprint(allCount))
- userBusinessAnalysisAllList := make([]*UserBusinessAnalysisAll, 0)
- if err := statictisSess.Table(tableName).Where(cond).OrderBy("user_index desc,id desc").Limit(pageSize, start).
- Find(&userBusinessAnalysisAllList); err != nil {
- return nil, 0
- }
- return userBusinessAnalysisAllList, allCount
- }
-
- func QueryUserStaticDataAll(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysisAll, int64) {
- log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll))
-
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- allCount, err := statictisSess.Count(new(UserBusinessAnalysisAll))
- if err != nil {
- log.Info("query error." + err.Error())
- return nil, 0
- }
- log.Info("query return total:" + fmt.Sprint(allCount))
-
- pageSize := PAGE_SIZE
- totalPage := int(allCount) / pageSize
- userBusinessAnalysisReturnList := make([]*UserBusinessAnalysisAll, 0)
- for i := 0; i <= int(totalPage); i++ {
- userBusinessAnalysisAllList := make([]*UserBusinessAnalysisAll, 0)
- if err := statictisSess.Table("user_business_analysis_all").OrderBy("id desc").Limit(pageSize, i*pageSize).
- Find(&userBusinessAnalysisAllList); err != nil {
- return nil, 0
- }
- log.Info("query " + fmt.Sprint(i+1) + " result size=" + fmt.Sprint(len(userBusinessAnalysisAllList)))
- for _, userRecord := range userBusinessAnalysisAllList {
- userBusinessAnalysisReturnList = append(userBusinessAnalysisReturnList, userRecord)
- }
- }
-
- log.Info("return size=" + fmt.Sprint(len(userBusinessAnalysisReturnList)))
- return userBusinessAnalysisReturnList, allCount
- }
-
- func QueryDataForUserDefineFromDb(opts *UserBusinessAnalysisQueryOptions, key string) ([]*UserBusinessAnalysis, int64) {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- var cond = builder.NewCond()
- cond = cond.And(
- builder.Eq{"data_date": key},
- )
- if len(opts.UserName) > 0 {
- cond = cond.And(
- builder.Like{"name", opts.UserName},
- )
- }
- allCount, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis))
- if err == nil {
- if allCount > 0 {
- userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
- if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("id desc").Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).
- Find(&userBusinessAnalysisList); err != nil {
- return nil, 0
- }
- return userBusinessAnalysisList, allCount
- }
- }
- return nil, 0
- }
-
- func WriteDataToDb(dataList []*UserBusinessAnalysis, key string) {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
- log.Info("write to db, size=" + fmt.Sprint(len(dataList)))
- userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
- for _, data := range dataList {
- data.DataDate = key
- userBusinessAnalysisList = append(userBusinessAnalysisList, data)
- if len(userBusinessAnalysisList) > BATCH_INSERT_SIZE {
- statictisSess.Insert(userBusinessAnalysisList)
- userBusinessAnalysisList = make([]*UserBusinessAnalysis, 0)
- }
- }
- if len(userBusinessAnalysisList) > 0 {
- statictisSess.Insert(userBusinessAnalysisList)
- }
- }
-
- func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wikiCountMap map[string]int) ([]*UserBusinessAnalysis, int64) {
- log.Info("start to count other user info data")
- sess := x.NewSession()
- defer sess.Close()
-
- currentTimeNow := time.Now()
- log.Info("current time:" + currentTimeNow.Format("2006-01-02 15:04:05"))
-
- start_unix := opts.StartTime
-
- end_unix := opts.EndTime
- CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location())
- DataDate := currentTimeNow.Format("2006-01-02 15:04")
-
- CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
- CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
- IssueCountMap := queryCreateIssue(start_unix, end_unix)
-
- CommentCountMap := queryComment(start_unix, end_unix)
- FocusRepoCountMap := queryWatch(start_unix, end_unix)
- StarRepoCountMap := queryStar(start_unix, end_unix)
- WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)
-
- StartTime := time.Unix(start_unix, 0)
- EndTime := time.Unix(end_unix, 0)
- CommitCodeSizeMap, err := GetAllUserKPIStats(StartTime, EndTime)
- if err != nil {
- log.Info("query commit code errr.")
- } else {
- log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
- CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
- log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
- }
- CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
- SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
- CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
- LoginCountMap := queryLoginCount(start_unix, end_unix)
- OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
- CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
- AiModelManageMap := queryUserModel(start_unix, end_unix)
-
- CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
- RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
- CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
- RecommendImage := queryRecommedImage(start_unix, end_unix)
-
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- cond := "type != 1 and is_active=true"
- count, err := sess.Where(cond).Count(new(User))
-
- ParaWeight := getParaWeight()
- ResultList := make([]*UserBusinessAnalysis, 0)
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- userList := make([]*User, 0)
- sess.Find(&userList)
-
- for i, userRecord := range userList {
- var dateRecord UserBusinessAnalysis
- dateRecord.ID = userRecord.ID
- log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name)
- dateRecord.CountDate = CountDate.Unix()
- dateRecord.DataDate = DataDate
- dateRecord.Email = userRecord.Email
- dateRecord.Phone = userRecord.PhoneNumber
- dateRecord.RegistDate = userRecord.CreatedUnix
- dateRecord.Name = userRecord.Name
- dateRecord.UserLocation = userRecord.Location
- dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime())
-
- dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap)
- dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap)
- dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap)
- dateRecord.CommentCount = getMapValue(dateRecord.ID, CommentCountMap)
- dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap)
- dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap)
- dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap)
- dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap)
- if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok {
- dateRecord.CommitCodeSize = 0
- } else {
- dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines)
- }
- dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap)
- dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap)
- dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap)
-
- dateRecord.EncyclopediasCount = getMapKeyStringValue(dateRecord.Name, wikiCountMap)
-
- dateRecord.CreateRepoCount = getMapValue(dateRecord.ID, CreateRepoCountMap)
-
- dateRecord.LoginCount = getMapValue(dateRecord.ID, LoginCountMap)
-
- if _, ok := OpenIIndexMap[dateRecord.ID]; !ok {
- dateRecord.OpenIIndex = 0
- } else {
- dateRecord.OpenIIndex = OpenIIndexMap[dateRecord.ID]
- }
-
- dateRecord.CloudBrainTaskNum = getMapValue(dateRecord.ID, CloudBrainTaskMap)
- dateRecord.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuDebugJob", CloudBrainTaskItemMap)
- dateRecord.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuDebugJob", CloudBrainTaskItemMap)
- dateRecord.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuTrainJob", CloudBrainTaskItemMap)
- dateRecord.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
- dateRecord.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
- dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
- dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
- dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
-
- dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
- dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
- dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset)
- dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage)
- dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage)
- dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage)
-
- dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight)
- ResultList = append(ResultList, &dateRecord)
- }
-
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- log.Info("query user define,count=" + fmt.Sprint(len(ResultList)))
- return ResultList, int64(len(ResultList))
- }
-
- func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysis, int64) {
-
- log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll))
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- //currentTimeNow := time.Now()
- //pageStartTime := getLastCountDate()
- //pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).Unix()
-
- var cond = builder.NewCond()
- if len(opts.UserName) > 0 {
- cond = cond.And(
- builder.Like{"name", opts.UserName},
- )
- }
- cond = cond.And(
- builder.Gte{"count_date": opts.StartTime},
- )
- cond = cond.And(
- builder.Lte{"count_date": opts.EndTime},
- )
-
- count, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis))
- if err != nil {
- log.Info("query error." + err.Error())
- return nil, 0
- }
-
- if opts.Page >= 0 && opts.PageSize > 0 {
- var start int
- if opts.Page == 0 {
- start = 0
- } else {
- start = (opts.Page - 1) * opts.PageSize
- }
- statictisSess.Limit(opts.PageSize, start)
- }
-
- userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
- if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("count_date,id desc").
- Find(&userBusinessAnalysisList); err != nil {
- return nil, 0
- }
-
- resultMap := make(map[int64]*UserBusinessAnalysis)
-
- if len(userBusinessAnalysisList) > 0 {
- var newAndCond = builder.NewCond()
- var newOrCond = builder.NewCond()
- for _, userRecord := range userBusinessAnalysisList {
- newOrCond = newOrCond.Or(
- builder.Eq{"id": userRecord.ID},
- )
- }
- newAndCond = newAndCond.And(
- newOrCond,
- )
- if !opts.IsAll {
- newAndCond = newAndCond.And(
- builder.Gte{"count_date": opts.StartTime},
- )
- newAndCond = newAndCond.And(
- builder.Lte{"count_date": opts.EndTime},
- )
- }
-
- allCount, err := statictisSess.Where(newAndCond).Count(new(UserBusinessAnalysis))
- if err != nil {
- log.Info("query error." + err.Error())
- return nil, 0
- }
-
- pageSize := 1000
- totalPage := int(allCount) / pageSize
-
- for i := 0; i <= int(totalPage); i++ {
- userBusinessAnalysisList = make([]*UserBusinessAnalysis, 0)
- if err := statictisSess.Table("user_business_analysis").Where(newAndCond).OrderBy("count_date desc").Limit(pageSize, i*pageSize).
- Find(&userBusinessAnalysisList); err != nil {
- return nil, 0
- }
- log.Info("query result size=" + fmt.Sprint(len(userBusinessAnalysisList)))
- for _, userRecord := range userBusinessAnalysisList {
- if _, ok := resultMap[userRecord.ID]; !ok {
- resultMap[userRecord.ID] = userRecord
- } else {
- resultMap[userRecord.ID].CodeMergeCount += userRecord.CodeMergeCount
- resultMap[userRecord.ID].CommitCount += userRecord.CommitCount
- resultMap[userRecord.ID].IssueCount += userRecord.IssueCount
- resultMap[userRecord.ID].CommentCount += userRecord.CommentCount
- resultMap[userRecord.ID].FocusRepoCount += userRecord.FocusRepoCount
- resultMap[userRecord.ID].StarRepoCount += userRecord.StarRepoCount
- resultMap[userRecord.ID].WatchedCount += userRecord.WatchedCount
- resultMap[userRecord.ID].CommitCodeSize += userRecord.CommitCodeSize
- resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize
- resultMap[userRecord.ID].CommitDatasetNum += userRecord.CommitDatasetNum
- resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount
- resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount
- resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount
- resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount
- resultMap[userRecord.ID].LoginCount += userRecord.LoginCount
- }
- }
- }
- }
-
- userBusinessAnalysisReturnList := UserBusinessAnalysisList{}
- for _, v := range resultMap {
- userBusinessAnalysisReturnList = append(userBusinessAnalysisReturnList, v)
- }
- sort.Sort(userBusinessAnalysisReturnList)
- log.Info("return size=" + fmt.Sprint(len(userBusinessAnalysisReturnList)))
- return userBusinessAnalysisReturnList, count
- }
-
- func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageStartTime time.Time, pageEndTime time.Time, userMetrics map[string]int) {
- sess := x.NewSession()
- defer sess.Close()
-
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
- log.Info("truncate all data from table: " + tableName)
- statictisSess.Exec("TRUNCATE TABLE " + tableName)
-
- log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05"))
- log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05"))
-
- start_unix := pageStartTime.Unix()
- end_unix := pageEndTime.Unix()
-
- currentTimeNow := time.Now()
- startTime := currentTimeNow.AddDate(0, 0, -1)
-
- CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
- CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
- IssueCountMap := queryCreateIssue(start_unix, end_unix)
-
- CommentCountMap := queryComment(start_unix, end_unix)
- FocusRepoCountMap := queryWatch(start_unix, end_unix)
- StarRepoCountMap := queryStar(start_unix, end_unix)
- WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)
- CommitCodeSizeMap, err := GetAllUserKPIStats(pageStartTime, pageEndTime)
- if err != nil {
- log.Info("query commit code errr.")
- } else {
- log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
- CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
- log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
- }
- //CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
- CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
- SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
- CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
- LoginCountMap := queryLoginCount(start_unix, end_unix)
-
- OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix)
- CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
- AiModelManageMap := queryUserModel(start_unix, end_unix)
-
- CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
- RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
- CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
- RecommendImage := queryRecommedImage(start_unix, end_unix)
-
- DataDate := currentTimeNow.Format("2006-01-02") + " 00:01"
-
- cond := "type != 1 and is_active=true"
- count, err := sess.Where(cond).Count(new(User))
- if err != nil {
- log.Info("query user error. return.")
- return
- }
- ParaWeight := getParaWeight()
- var indexTotal int64
- indexTotal = 0
- insertCount := 0
- userIndexMap := make(map[int64]float64, 0)
- maxUserIndex := 0.0
- minUserIndex := 100000000.0
- for {
- sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- userList := make([]*User, 0)
- sess.Find(&userList)
- dateRecordBatch := make([]UserBusinessAnalysisAll, 0)
- for _, userRecord := range userList {
- var dateRecordAll UserBusinessAnalysisAll
- dateRecordAll.ID = userRecord.ID
- dateRecordAll.Email = userRecord.Email
- dateRecordAll.Phone = userRecord.PhoneNumber
- dateRecordAll.RegistDate = userRecord.CreatedUnix
- dateRecordAll.Name = userRecord.Name
- dateRecordAll.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime())
- dateRecordAll.DataDate = DataDate
- dateRecordAll.UserLocation = userRecord.Location
-
- dateRecordAll.CodeMergeCount = getMapValue(dateRecordAll.ID, CodeMergeCountMap)
- dateRecordAll.CommitCount = getMapValue(dateRecordAll.ID, CommitCountMap)
- dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap)
- dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap)
- dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap)
- dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap)
- dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap)
- dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap)
- if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok {
- dateRecordAll.CommitCodeSize = 0
- } else {
- dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines)
- }
- //dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap)
- dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap)
- dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap)
- dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap)
- dateRecordAll.EncyclopediasCount = getMapKeyStringValue(dateRecordAll.Name, wikiCountMap)
- dateRecordAll.CreateRepoCount = getMapValue(dateRecordAll.ID, CreateRepoCountMap)
- dateRecordAll.LoginCount = getMapValue(dateRecordAll.ID, LoginCountMap)
-
- if _, ok := OpenIIndexMap[dateRecordAll.ID]; !ok {
- dateRecordAll.OpenIIndex = 0
- } else {
- dateRecordAll.OpenIIndex = OpenIIndexMap[dateRecordAll.ID]
- }
-
- dateRecordAll.CloudBrainTaskNum = getMapValue(dateRecordAll.ID, CloudBrainTaskMap)
- dateRecordAll.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuDebugJob", CloudBrainTaskItemMap)
- dateRecordAll.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuDebugJob", CloudBrainTaskItemMap)
- dateRecordAll.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuTrainJob", CloudBrainTaskItemMap)
- dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
- dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
- dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
- dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
- dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap)
- dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset)
- dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset)
- dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset)
- dateRecordAll.CollectImage = getMapValue(dateRecordAll.ID, CollectImage)
- dateRecordAll.CollectedImage = getMapValue(dateRecordAll.ID, CollectedImage)
- dateRecordAll.RecommendImage = getMapValue(dateRecordAll.ID, RecommendImage)
-
- dateRecordAll.UserIndexPrimitive = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight)
- userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndexPrimitive
- if maxUserIndex < dateRecordAll.UserIndexPrimitive {
- maxUserIndex = dateRecordAll.UserIndexPrimitive
- }
- if minUserIndex > dateRecordAll.UserIndexPrimitive {
- minUserIndex = dateRecordAll.UserIndexPrimitive
- }
-
- dateRecordBatch = append(dateRecordBatch, dateRecordAll)
- if len(dateRecordBatch) >= BATCH_INSERT_SIZE {
- err := insertTable(dateRecordBatch, tableName, statictisSess)
- insertCount += BATCH_INSERT_SIZE
- if err != nil {
- log.Info("insert all data failed." + err.Error())
- }
- dateRecordBatch = make([]UserBusinessAnalysisAll, 0)
- }
- if tableName == "user_business_analysis_all" {
- tValue := getUserActivateAll(dateRecordAll)
- if tValue > 0 {
- log.Info("dateRecordAll name=" + dateRecordAll.Name + " value=" + fmt.Sprint(tValue))
- userMetrics["TotalHasActivityUser"] = getMapKeyStringValue("TotalHasActivityUser", userMetrics) + 1
- }
- }
- }
- if len(dateRecordBatch) > 0 {
- err := insertTable(dateRecordBatch, tableName, statictisSess)
- insertCount += len(dateRecordBatch)
- if err != nil {
- log.Info("insert all data failed." + err.Error())
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- if tableName == "user_business_analysis_all" {
- log.Info("TotalHasActivityUser=" + fmt.Sprint(userMetrics["TotalHasActivityUser"]))
- }
- //normalization
- for k, v := range userIndexMap {
- tmpResult := (v - minUserIndex) / (maxUserIndex - minUserIndex)
- if tmpResult > 0.99 {
- tmpResult = 0.99
- }
- updateUserIndex(tableName, statictisSess, k, tmpResult)
- }
- log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount))
- }
-
- func updateUserIndex(tableName string, statictisSess *xorm.Session, userId int64, userIndex float64) {
- updateSql := "UPDATE public." + tableName + " set user_index=" + fmt.Sprint(userIndex*100) + " where id=" + fmt.Sprint(userId)
- statictisSess.Exec(updateSql)
- }
-
- func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, statictisSess *xorm.Session) error {
-
- insertBatchSql := "INSERT INTO public." + tableName +
- "(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " +
- "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive,phone) " +
- "VALUES"
-
- for i, record := range dateRecords {
- insertBatchSql += "(" + fmt.Sprint(record.ID) + ", " + fmt.Sprint(record.CountDate) + ", " + fmt.Sprint(record.CodeMergeCount) + ", " + fmt.Sprint(record.CommitCount) +
- ", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) +
- ", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) +
- ", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) +
- ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," +
- fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ",'" + record.Phone + "')"
- if i < (len(dateRecords) - 1) {
- insertBatchSql += ","
- }
- }
-
- _, err := statictisSess.Exec(insertBatchSql)
- return err
- }
-
- func RefreshUserStaticAllTabel(wikiCountMap map[string]int, userMetrics map[string]int) {
- currentTimeNow := time.Now()
- pageStartTime := time.Date(2021, 11, 5, 0, 0, 0, 0, currentTimeNow.Location())
- pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location())
- refreshUserStaticTable(wikiCountMap, "user_business_analysis_all", pageStartTime, pageEndTime, userMetrics)
- log.Info("refresh all data finished.")
-
- pageStartTime = time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location())
- refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_year", pageStartTime, pageEndTime, userMetrics)
-
- thisMonth := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), 1, 0, 0, 0, 0, currentTimeNow.Location())
- refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_month", thisMonth, pageEndTime, userMetrics)
-
- offset := int(time.Monday - currentTimeNow.Weekday())
- if offset > 0 {
- offset = -6
- }
- pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, offset)
- refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_week", pageStartTime, pageEndTime, userMetrics)
-
- pageEndTime = pageStartTime
- pageStartTime = pageStartTime.AddDate(0, 0, -7)
- refreshUserStaticTable(wikiCountMap, "user_business_analysis_last_week", pageStartTime, pageEndTime, userMetrics)
-
- pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -30)
- pageEndTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location())
- refreshUserStaticTable(wikiCountMap, "user_business_analysis_last30_day", pageStartTime, pageEndTime, userMetrics)
-
- pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -1)
- pageEndTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).AddDate(0, 0, -1)
- refreshUserStaticTable(wikiCountMap, "user_business_analysis_yesterday", pageStartTime, pageEndTime, userMetrics)
-
- pageStartTime = thisMonth.AddDate(0, -1, 0)
- pageEndTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), 1, 23, 59, 59, 0, currentTimeNow.Location()).AddDate(0, 0, -1)
- refreshUserStaticTable(wikiCountMap, "user_business_analysis_last_month", pageStartTime, pageEndTime, userMetrics)
-
- }
-
- func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, endTime time.Time, isReCount bool) error {
-
- log.Info("start to count other user info data")
- sess := x.NewSession()
- defer sess.Close()
-
- currentTimeNow := time.Now()
- log.Info("current time:" + currentTimeNow.Format("2006-01-02 15:04:05"))
-
- start_unix := startTime.Unix()
- log.Info("DB query time:" + startTime.Format("2006-01-02 15:04:05"))
-
- end_unix := endTime.Unix()
- CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location())
- if isReCount {
- CountDate = time.Date(startTime.Year(), startTime.Month(), startTime.Day(), 0, 1, 0, 0, currentTimeNow.Location())
- }
-
- DataDate := CountDate.Format("2006-01-02")
- CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
- CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
- IssueCountMap := queryCreateIssue(start_unix, end_unix)
-
- CommentCountMap := queryComment(start_unix, end_unix)
- FocusRepoCountMap := queryWatch(start_unix, end_unix)
- StarRepoCountMap := queryStar(start_unix, end_unix)
- WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)
-
- CommitCodeSizeMap, err := GetAllUserKPIStats(startTime, endTime)
- if err != nil {
- log.Info("query commit code errr.")
- } else {
- //log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
- CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
- log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
- }
- CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
- SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
- CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
- LoginCountMap := queryLoginCount(start_unix, end_unix)
- OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
- CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
- AiModelManageMap := queryUserModel(start_unix, end_unix)
-
- CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
- RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
- CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
- RecommendImage := queryRecommedImage(start_unix, end_unix)
-
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- log.Info("truncate all data from table:user_business_analysis ")
- statictisSess.Exec("TRUNCATE TABLE user_business_analysis")
-
- cond := "type != 1"
- count, err := sess.Where(cond).Count(new(User))
- if err != nil {
- log.Info("query user error. return.")
- return err
- }
- userNewAddActivity := make(map[int64]map[int64]int64)
- userAcitvateJsonMap := make(map[int64]map[int64]int64)
- userCurrentDayRegistMap := make(map[int64]map[int64]int64)
- ParaWeight := getParaWeight()
- userMetrics := make(map[string]int)
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- userList := make([]*User, 0)
- sess.Find(&userList)
-
- for i, userRecord := range userList {
- var dateRecord UserBusinessAnalysis
- dateRecord.ID = userRecord.ID
- log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name)
- dateRecord.CountDate = CountDate.Unix()
-
- dateRecord.Email = userRecord.Email
- dateRecord.Phone = userRecord.PhoneNumber
- dateRecord.RegistDate = userRecord.CreatedUnix
- dateRecord.Name = userRecord.Name
- dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime())
- dateRecord.DataDate = DataDate
- dateRecord.UserLocation = userRecord.Location
- dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap)
- dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap)
- dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap)
- dateRecord.CommentCount = getMapValue(dateRecord.ID, CommentCountMap)
- dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap)
- dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap)
- dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap)
- dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap)
- if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok {
- dateRecord.CommitCodeSize = 0
- } else {
- dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines)
- }
- dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap)
- dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap)
- dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap)
-
- dateRecord.EncyclopediasCount = getMapKeyStringValue(dateRecord.Name, wikiCountMap)
-
- dateRecord.CreateRepoCount = getMapValue(dateRecord.ID, CreateRepoCountMap)
-
- dateRecord.LoginCount = getMapValue(dateRecord.ID, LoginCountMap)
-
- if _, ok := OpenIIndexMap[dateRecord.ID]; !ok {
- dateRecord.OpenIIndex = 0
- } else {
- dateRecord.OpenIIndex = OpenIIndexMap[dateRecord.ID]
- }
-
- dateRecord.CloudBrainTaskNum = getMapValue(dateRecord.ID, CloudBrainTaskMap)
- dateRecord.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuDebugJob", CloudBrainTaskItemMap)
- dateRecord.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuDebugJob", CloudBrainTaskItemMap)
- dateRecord.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuTrainJob", CloudBrainTaskItemMap)
- dateRecord.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
- dateRecord.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
- dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
- dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
- dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
-
- dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
- dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
- dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset)
- dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage)
- dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage)
- dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage)
-
- dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight)
- setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord)
- if getUserActivate(dateRecord) > 0 {
- log.Info("has activity." + userRecord.Name)
- addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID, currentTimeNow)
- }
- if userRecord.IsActive {
- addUserToMap(userAcitvateJsonMap, userRecord.CreatedUnix, dateRecord.ID, currentTimeNow)
- }
- addUserToMap(userCurrentDayRegistMap, userRecord.CreatedUnix, dateRecord.ID, currentTimeNow)
- }
-
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- RefreshUserStaticAllTabel(wikiCountMap, userMetrics)
- log.Info("start to update UserMetrics")
- //insert userMetrics table
- var useMetrics UserMetrics
- useMetrics.CountDate = CountDate.Unix()
- statictisSess.Delete(&useMetrics)
-
- useMetrics.DataDate = DataDate
- useMetrics.ActivateRegistUser = getMapKeyStringValue("ActivateRegistUser", userMetrics)
- useMetrics.HasActivityUser = getMapKeyStringValue("HasActivityUser", userMetrics)
- useMetrics.RegistActivityUser = 0
- useMetrics.NotActivateRegistUser = getMapKeyStringValue("NotActivateRegistUser", userMetrics)
- useMetrics.TotalActivateRegistUser = getMapKeyStringValue("TotalActivateRegistUser", userMetrics)
- useMetrics.TotalHasActivityUser = getMapKeyStringValue("TotalHasActivityUser", userMetrics)
- useMetrics.CurrentDayRegistUser = getMapKeyStringValue("CurrentDayRegistUser", userMetrics)
- count, err = sess.Where("type=0").Count(new(User))
- if err != nil {
- log.Info("query user error. return.")
- }
- useMetrics.TotalUser = int(count)
- if useMetrics.ActivateRegistUser+useMetrics.NotActivateRegistUser == 0 {
- useMetrics.ActivateIndex = 0
- } else {
- useMetrics.ActivateIndex = float64(useMetrics.ActivateRegistUser) / float64(useMetrics.ActivateRegistUser+useMetrics.NotActivateRegistUser)
- }
- statictisSess.Insert(&useMetrics)
- //update new user activity
- updateNewUserAcitivity(userNewAddActivity, userAcitvateJsonMap, userCurrentDayRegistMap, statictisSess)
- return nil
- }
-
- func updateNewUserAcitivity(currentUserActivity map[int64]map[int64]int64, userAcitvateJsonMap map[int64]map[int64]int64, userCurrentDayRegistMap map[int64]map[int64]int64, statictisSess *xorm.Session) {
- for key, value := range userCurrentDayRegistMap {
- useMetrics := &UserMetrics{CountDate: key}
- userAcitvateValue := userAcitvateJsonMap[key]
- HuodongValue := currentUserActivity[key]
- has, err := statictisSess.Get(useMetrics)
- if err == nil && has {
- ActivityUserArray, HuodongTotal := setUniqueUserId(useMetrics.HasActivityUserJson, HuodongValue)
- useMetrics.HasActivityUser = HuodongTotal
- useMetrics.HasActivityUserJson = ActivityUserArray
-
- useMetrics.CurrentDayRegistUser = len(value)
-
- RegistUserArray, lenRegistUser := setUniqueUserId(useMetrics.ActivityUserJson, userAcitvateValue)
- useMetrics.ActivityUserJson = RegistUserArray
- useMetrics.ActivateRegistUser = lenRegistUser
-
- updateSql := "update public.user_metrics set has_activity_user_json='" + useMetrics.HasActivityUserJson +
- "',regist_activity_user=" + fmt.Sprint(useMetrics.HasActivityUser) +
- ",activity_user_json='" + useMetrics.ActivityUserJson + "'" +
- ",activate_regist_user=" + fmt.Sprint(useMetrics.ActivateRegistUser) +
- ",not_activate_regist_user=" + fmt.Sprint(useMetrics.CurrentDayRegistUser-useMetrics.ActivateRegistUser) +
- ",current_day_regist_user=" + fmt.Sprint(useMetrics.CurrentDayRegistUser) +
- ",activate_index=" + fmt.Sprint(float64(useMetrics.ActivateRegistUser)/float64(useMetrics.CurrentDayRegistUser)) +
- ",data_date='" + time.Unix(key, 0).Format("2006-01-02") + "'" +
- " where count_date=" + fmt.Sprint(key)
-
- statictisSess.Exec(updateSql)
- }
- }
- }
-
- func setUniqueUserId(jsonString string, value map[int64]int64) (string, int) {
- if value == nil {
- value = make(map[int64]int64, 0)
- }
- userIdArrays := strings.Split(jsonString, ",")
- for _, userIdStr := range userIdArrays {
- userIdInt, err := strconv.ParseInt(userIdStr, 10, 64)
- if err == nil {
- value[userIdInt] = userIdInt
- }
- }
- userIdArray := ""
- for _, tmpValue := range value {
- userIdArray += fmt.Sprint(tmpValue) + ","
- }
- if len(userIdArray) > 0 {
- return userIdArray[0 : len(userIdArray)-1], len(value)
- }
- return userIdArray, len(value)
- }
-
- func addUserToMap(currentUserActivity map[int64]map[int64]int64, registDate timeutil.TimeStamp, userId int64, currentTimeNow time.Time) {
- registTime := registDate.AsTimeInLocation(currentTimeNow.Location())
- CountDateTime := time.Date(registTime.Year(), registTime.Month(), registTime.Day(), 0, 1, 0, 0, currentTimeNow.Location())
- CountDate := CountDateTime.Unix()
- if _, ok := currentUserActivity[CountDate]; !ok {
- userIdMap := make(map[int64]int64, 0)
- userIdMap[userId] = userId
- currentUserActivity[CountDate] = userIdMap
- } else {
- currentUserActivity[CountDate][userId] = userId
- }
- }
-
- func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, end_time int64, dateRecord UserBusinessAnalysis) {
- //ActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
- //NotActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
- //HasActivityUser int `xorm:"NOT NULL DEFAULT 0"`
- //TotalActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
- //TotalHasActivityUser
- regist_time := int64(user.CreatedUnix)
- if regist_time >= start_time && regist_time <= end_time {
- if user.IsActive {
- userMetrics["ActivateRegistUser"] = getMapKeyStringValue("ActivateRegistUser", userMetrics) + 1
- } else {
- userMetrics["NotActivateRegistUser"] = getMapKeyStringValue("NotActivateRegistUser", userMetrics) + 1
- }
- userMetrics["CurrentDayRegistUser"] = getMapKeyStringValue("CurrentDayRegistUser", userMetrics) + 1
- }
- if user.IsActive {
- userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1
- }
-
- if getUserActivate(dateRecord) > 0 {
- userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1
- }
-
- }
-
- func getParaWeight() map[string]float64 {
- result := make(map[string]float64)
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
- statictisSess.Select("*").Table(new(UserAnalysisPara))
- paraList := make([]*UserAnalysisPara, 0)
- statictisSess.Find(¶List)
- for _, paraRecord := range paraList {
- result[paraRecord.Key] = paraRecord.Value
- }
- return result
- }
-
- func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight map[string]float64) float64 {
- var result float64
- // PR数 0.20
- // commit数 0.20
- // 提出任务数 0.20
- // 评论数 0.20
- // 关注项目数 0.10
- // 点赞项目数 0.10
- // 登录次数 0.10
- result = float64(dateRecord.CodeMergeCount) * getParaWeightValue("CodeMergeCount", ParaWeight, 0.2)
- result += float64(dateRecord.CommitCount) * getParaWeightValue("CommitCount", ParaWeight, 0.2)
- result += float64(dateRecord.IssueCount) * getParaWeightValue("IssueCount", ParaWeight, 0.2)
- result += float64(dateRecord.CommentCount) * getParaWeightValue("CommentCount", ParaWeight, 0.2)
- result += float64(dateRecord.FocusRepoCount) * getParaWeightValue("FocusRepoCount", ParaWeight, 0.1)
- result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
- result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
- result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
- codeLine := float64(dateRecord.CommitCodeSize)
- limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000)
- if codeLine >= limitCodeLine {
- codeLine = limitCodeLine
- }
- result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01)
- result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
- result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
- result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
- result += float64(dateRecord.CloudBrainTaskNum) * getParaWeightValue("CloudBrainTaskNum", ParaWeight, 0.3)
- result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
- result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)
-
- result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
- result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1)
- result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2)
- result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1)
- result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1)
- result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2)
-
- return result
- }
-
- func getUserActivateAll(dateRecord UserBusinessAnalysisAll) int {
- var result int
- result += dateRecord.CodeMergeCount
- result += dateRecord.CommitCount
- result += dateRecord.IssueCount
- result += dateRecord.CommentCount
- result += dateRecord.FocusRepoCount
- result += dateRecord.StarRepoCount
- result += dateRecord.SolveIssueCount
- result += dateRecord.EncyclopediasCount
- result += dateRecord.CreateRepoCount
- result += dateRecord.CloudBrainTaskNum
- result += dateRecord.CommitModelCount
- result += dateRecord.CommitDatasetNum
- result += dateRecord.FocusOtherUser
- result += dateRecord.CollectDataset
- result += dateRecord.CollectImage
- result += dateRecord.CommitCodeSize
- return result
- }
-
- func getUserActivate(dateRecord UserBusinessAnalysis) int {
- var result int
- result += dateRecord.CodeMergeCount
- result += dateRecord.CommitCount
- result += dateRecord.IssueCount
- result += dateRecord.CommentCount
- result += dateRecord.FocusRepoCount
- result += dateRecord.StarRepoCount
- result += dateRecord.SolveIssueCount
- result += dateRecord.EncyclopediasCount
- result += dateRecord.CreateRepoCount
- result += dateRecord.CloudBrainTaskNum
- result += dateRecord.CommitModelCount
- result += dateRecord.CommitDatasetNum
- result += dateRecord.FocusOtherUser
- result += dateRecord.CollectDataset
- result += dateRecord.CollectImage
- result += dateRecord.CommitCodeSize
- return result
- }
-
- func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64) float64 {
- var result float64
- // PR数 0.20
- // commit数 0.20
- // 提出任务数 0.20
- // 评论数 0.20
- // 关注项目数 0.10
- // 点赞项目数 0.10
- // 登录次数 0.10
- result = float64(dateRecord.CodeMergeCount) * getParaWeightValue("CodeMergeCount", ParaWeight, 0.2)
- result += float64(dateRecord.CommitCount) * getParaWeightValue("CommitCount", ParaWeight, 0.2)
- result += float64(dateRecord.IssueCount) * getParaWeightValue("IssueCount", ParaWeight, 0.2)
- result += float64(dateRecord.CommentCount) * getParaWeightValue("CommentCount", ParaWeight, 0.2)
- result += float64(dateRecord.FocusRepoCount) * getParaWeightValue("FocusRepoCount", ParaWeight, 0.1)
- result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
- result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
- result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
- codeLine := float64(dateRecord.CommitCodeSize)
- limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000)
- if codeLine >= limitCodeLine {
- codeLine = limitCodeLine
- }
- result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01)
- result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
- result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
- result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
- result += float64(dateRecord.CloudBrainTaskNum) * getParaWeightValue("CloudBrainTaskNum", ParaWeight, 0.3)
- result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
- result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)
-
- result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
- result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1)
- result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2)
- result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1)
- result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1)
- result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2)
-
- return result
- }
-
- func getParaWeightValue(key string, valueMap map[string]float64, defaultValue float64) float64 {
- if _, ok := valueMap[key]; !ok {
- return defaultValue
- } else {
- return valueMap[key]
- }
- }
-
- func getMapKeyStringValue(key string, valueMap map[string]int) int {
- if _, ok := valueMap[key]; !ok {
- return 0
- } else {
- return valueMap[key]
- }
- }
-
- func getMapValue(userId int64, valueMap map[int64]int) int {
- if _, ok := valueMap[userId]; !ok {
- return 0
- } else {
- return valueMap[userId]
- }
- }
-
- func getInt(str string) int {
- re, err := strconv.ParseInt(str, 10, 32)
- if err != nil {
- return 0
- }
- return int(re)
- }
-
- func CounDataByDate(wikiCountMap map[string]int, startTime time.Time, endTime time.Time) {
- CounDataByDateAndReCount(wikiCountMap, startTime, endTime, false)
- }
-
- func querySolveIssue(start_unix int64, end_unix int64) map[int64]int {
- sess := x.NewSession()
- defer sess.Close()
- resultMap := make(map[int64]int)
- cond := "issue.is_closed=true and issue.closed_unix>=" + fmt.Sprint(start_unix) + " and issue.closed_unix<=" + fmt.Sprint(end_unix)
-
- count, err := sess.Table("issue_assignees").Join("inner", "issue", "issue.id=issue_assignees.issue_id").Where(cond).Count(new(IssueAssignees))
- if err != nil {
- log.Info("query issue error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- issueAssigneesList := make([]*IssueAssignees, 0)
- sess.Select("issue_assignees.*").Table("issue_assignees").
- Join("inner", "issue", "issue.id=issue_assignees.issue_id").
- Where(cond).OrderBy("issue_assignees.id asc").Limit(PAGE_SIZE, int(indexTotal))
-
- sess.Find(&issueAssigneesList)
-
- log.Info("query IssueAssignees size=" + fmt.Sprint(len(issueAssigneesList)))
- for _, issueAssigneesRecord := range issueAssigneesList {
- if _, ok := resultMap[issueAssigneesRecord.AssigneeID]; !ok {
- resultMap[issueAssigneesRecord.AssigneeID] = 1
- } else {
- resultMap[issueAssigneesRecord.AssigneeID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- return resultMap
- }
-
- func queryPullRequest(start_unix int64, end_unix int64) map[int64]int {
- sess := x.NewSession()
- defer sess.Close()
- resultMap := make(map[int64]int)
- cond := "pull_request.merged_unix>=" + fmt.Sprint(start_unix) + " and pull_request.merged_unix<=" + fmt.Sprint(end_unix)
- count, err := sess.Table("issue").Join("inner", "pull_request", "issue.id=pull_request.issue_id").Where(cond).Count(new(Issue))
- if err != nil {
- log.Info("query issue error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- issueList := make([]*Issue, 0)
- sess.Select("issue.*").Table("issue").Join("inner", "pull_request", "issue.id=pull_request.issue_id").Where(cond).OrderBy("issue.id asc").Limit(PAGE_SIZE, int(indexTotal))
- sess.Find(&issueList)
- log.Info("query issue(PR) size=" + fmt.Sprint(len(issueList)))
- for _, issueRecord := range issueList {
- if _, ok := resultMap[issueRecord.PosterID]; !ok {
- resultMap[issueRecord.PosterID] = 1
- } else {
- resultMap[issueRecord.PosterID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return resultMap
- }
-
- func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[int64]int {
- sess := x.NewSession()
- defer sess.Close()
- resultMap := make(map[int64]int)
-
- cond := "user_id=act_user_id and op_type=" + fmt.Sprint(actionType) + " and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
-
- count, err := sess.Where(cond).Count(new(Action))
- if err != nil {
- log.Info("query action error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,user_id,op_type,act_user_id").Table("action").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- actionList := make([]*Action, 0)
- sess.Find(&actionList)
-
- log.Info("query action size=" + fmt.Sprint(len(actionList)))
- for _, actionRecord := range actionList {
- if _, ok := resultMap[actionRecord.UserID]; !ok {
- resultMap[actionRecord.UserID] = 1
- } else {
- resultMap[actionRecord.UserID] += 1
- }
- }
-
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- return resultMap
- }
-
- func queryCreateIssue(start_unix int64, end_unix int64) map[int64]int {
-
- sess := x.NewSession()
- defer sess.Close()
- resultMap := make(map[int64]int)
- cond := "is_pull=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
-
- count, err := sess.Where(cond).Count(new(Issue))
- if err != nil {
- log.Info("query Issue error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,poster_id").Table("issue").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- issueList := make([]*Issue, 0)
- sess.Find(&issueList)
- log.Info("query issue size=" + fmt.Sprint(len(issueList)))
- for _, issueRecord := range issueList {
- if _, ok := resultMap[issueRecord.PosterID]; !ok {
- resultMap[issueRecord.PosterID] = 1
- } else {
- resultMap[issueRecord.PosterID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return resultMap
-
- }
-
- func queryComment(start_unix int64, end_unix int64) map[int64]int {
-
- sess := x.NewSession()
- defer sess.Close()
- cond := "created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
- resultMap := make(map[int64]int)
- count, err := sess.Where(cond).Count(new(Comment))
- if err != nil {
- log.Info("query Comment error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,type,poster_id").Table("comment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- commentList := make([]*Comment, 0)
- sess.Find(&commentList)
- log.Info("query Comment size=" + fmt.Sprint(len(commentList)))
- for _, commentRecord := range commentList {
- if _, ok := resultMap[commentRecord.PosterID]; !ok {
- resultMap[commentRecord.PosterID] = 1
- } else {
- resultMap[commentRecord.PosterID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return resultMap
- }
-
- func queryWatch(start_unix int64, end_unix int64) map[int64]int {
-
- sess := x.NewSession()
- defer sess.Close()
-
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
-
- resultMap := make(map[int64]int)
- count, err := sess.Where(cond).Count(new(Watch))
- if err != nil {
- log.Info("query issue error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- watchList := make([]*Watch, 0)
- sess.Select("id,user_id,repo_id").Table("watch").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- sess.Find(&watchList)
-
- log.Info("query Watch size=" + fmt.Sprint(len(watchList)))
- for _, watchRecord := range watchList {
- if _, ok := resultMap[watchRecord.UserID]; !ok {
- resultMap[watchRecord.UserID] = 1
- } else {
- resultMap[watchRecord.UserID] += 1
- }
- }
-
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- return resultMap
-
- }
-
- func queryStar(start_unix int64, end_unix int64) map[int64]int {
-
- sess := x.NewSession()
- defer sess.Close()
-
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
- resultMap := make(map[int64]int)
-
- count, err := sess.Where(cond).Count(new(Star))
- if err != nil {
- log.Info("query star error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,uid,repo_id").Table("star").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- starList := make([]*Star, 0)
- sess.Find(&starList)
-
- log.Info("query Star size=" + fmt.Sprint(len(starList)))
- for _, starRecord := range starList {
- if _, ok := resultMap[starRecord.UID]; !ok {
- resultMap[starRecord.UID] = 1
- } else {
- resultMap[starRecord.UID] += 1
- }
- }
-
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return resultMap
- }
-
- func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
-
- sess := x.NewSession()
- defer sess.Close()
- resultMap := make(map[int64]int)
- resultFocusedByOtherMap := make(map[int64]int)
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
-
- count, err := sess.Where(cond).Count(new(Follow))
- if err != nil {
- log.Info("query follow error. return.")
- return resultMap, resultFocusedByOtherMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,user_id,follow_id").Table("follow").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- followList := make([]*Follow, 0)
- sess.Find(&followList)
-
- log.Info("query Follow size=" + fmt.Sprint(len(followList)))
- for _, followRecord := range followList {
- if _, ok := resultMap[followRecord.FollowID]; !ok {
- resultMap[followRecord.FollowID] = 1
- } else {
- resultMap[followRecord.FollowID] += 1
- }
- if _, ok := resultFocusedByOtherMap[followRecord.UserID]; !ok {
- resultFocusedByOtherMap[followRecord.UserID] = 1
- } else {
- resultFocusedByOtherMap[followRecord.UserID] += 1
- }
- }
-
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- return resultMap, resultFocusedByOtherMap
- }
-
- func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
- sess := x.NewSession()
- defer sess.Close()
- userIdDdatasetMap := make(map[int64]int)
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true"
- count, err := sess.Where(cond).Count(new(Dataset))
- if err != nil {
- log.Info("query recommend dataset error. return.")
- return userIdDdatasetMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,user_id,recommend").Where(cond).Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- datasetList := make([]*Dataset, 0)
- sess.Find(&datasetList)
- log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
- for _, datasetRecord := range datasetList {
- if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
- userIdDdatasetMap[datasetRecord.UserID] = 1
- } else {
- userIdDdatasetMap[datasetRecord.UserID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return userIdDdatasetMap
- }
-
- func queryAllDataSet() (map[int64]int64, map[int64]int64) {
- sess := x.NewSession()
- defer sess.Close()
- datasetUserIdMap := make(map[int64]int64)
- userIdDdatasetMap := make(map[int64]int64)
- count, err := sess.Count(new(Dataset))
- if err != nil {
- log.Info("query dataset error. return.")
- return datasetUserIdMap, userIdDdatasetMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,user_id").Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- datasetList := make([]*Dataset, 0)
- sess.Find(&datasetList)
- log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
- for _, datasetRecord := range datasetList {
- datasetUserIdMap[datasetRecord.ID] = datasetRecord.UserID
- if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
- userIdDdatasetMap[datasetRecord.UserID] = 1
- } else {
- userIdDdatasetMap[datasetRecord.UserID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return datasetUserIdMap, userIdDdatasetMap
- }
-
- func queryRecommedImage(start_unix int64, end_unix int64) map[int64]int {
- sess := x.NewSession()
- defer sess.Close()
- userIdImageMap := make(map[int64]int)
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and type=5"
- count, err := sess.Where(cond).Count(new(Image))
- if err != nil {
- log.Info("query recommend image error. return.")
- return userIdImageMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,uid,type").Where(cond).Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- imageList := make([]*Image, 0)
- sess.Find(&imageList)
- log.Info("query imageList size=" + fmt.Sprint(len(imageList)))
- for _, imageRecord := range imageList {
- if _, ok := userIdImageMap[imageRecord.UID]; !ok {
- userIdImageMap[imageRecord.UID] = 1
- } else {
- userIdImageMap[imageRecord.UID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return userIdImageMap
- }
-
- func queryAllImage() (map[int64]int64, map[int64]int64) {
- sess := x.NewSession()
- defer sess.Close()
- imageUserIdMap := make(map[int64]int64)
- userIdDImageMap := make(map[int64]int64)
- count, err := sess.Count(new(Image))
- if err != nil {
- log.Info("query image error. return.")
- return imageUserIdMap, userIdDImageMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,uid").Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- imageList := make([]*Image, 0)
- sess.Find(&imageList)
- log.Info("query imageList size=" + fmt.Sprint(len(imageList)))
- for _, imageRecord := range imageList {
- imageUserIdMap[imageRecord.ID] = imageRecord.UID
- if _, ok := userIdDImageMap[imageRecord.UID]; !ok {
- userIdDImageMap[imageRecord.UID] = 1
- } else {
- userIdDImageMap[imageRecord.UID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return imageUserIdMap, userIdDImageMap
- }
-
- func queryDatasetStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
- sess := x.NewSession()
- defer sess.Close()
- datasetCollect := make(map[int64]int)
- datasetCollected := make(map[int64]int)
- datasetUserIdMap, _ := queryAllDataSet()
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
- count, err := sess.Where(cond).Count(new(DatasetStar))
- if err != nil {
- log.Info("query follow error. return.")
- return datasetCollect, datasetCollected
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,uid,dataset_id").Table(new(DatasetStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- datasetStarList := make([]*DatasetStar, 0)
- sess.Find(&datasetStarList)
- log.Info("query datasetStarList size=" + fmt.Sprint(len(datasetStarList)))
- for _, datasetStarRecord := range datasetStarList {
- if _, ok := datasetCollect[datasetStarRecord.UID]; !ok {
- datasetCollect[datasetStarRecord.UID] = 1
- } else {
- datasetCollect[datasetStarRecord.UID] += 1
- }
- if _, ok := datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]]; !ok {
- datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] = 1
- } else {
- datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return datasetCollect, datasetCollected
- }
-
- func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
- sess := x.NewSession()
- defer sess.Close()
- imageCollect := make(map[int64]int)
- imageCollected := make(map[int64]int)
- imageUserIdMap, _ := queryAllDataSet()
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
- count, err := sess.Where(cond).Count(new(ImageStar))
- if err != nil {
- log.Info("query follow error. return.")
- return imageCollect, imageCollected
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,uid,image_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- imageStarList := make([]*ImageStar, 0)
- sess.Find(&imageStarList)
- log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList)))
- for _, imageStarRecord := range imageStarList {
- if _, ok := imageCollect[imageStarRecord.UID]; !ok {
- imageCollect[imageStarRecord.UID] = 1
- } else {
- imageCollect[imageStarRecord.UID] += 1
- }
- if _, ok := imageCollected[imageUserIdMap[imageStarRecord.ImageID]]; !ok {
- imageCollected[imageUserIdMap[imageStarRecord.ImageID]] = 1
- } else {
- imageCollected[imageUserIdMap[imageStarRecord.ImageID]] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return imageCollect, imageCollected
- }
-
- func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
- sess := x.NewSession()
- defer sess.Close()
- resultSizeMap := make(map[int64]int)
- resultNumMap := make(map[int64]int)
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
-
- count, err := sess.Where(cond).Count(new(Attachment))
- if err != nil {
- log.Info("query attachment error. return.")
- return resultSizeMap, resultNumMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,uploader_id,size").Table("attachment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- attachmentList := make([]*Attachment, 0)
- sess.Find(&attachmentList)
-
- log.Info("query Attachment size=" + fmt.Sprint(len(attachmentList)))
- for _, attachRecord := range attachmentList {
- if _, ok := resultSizeMap[attachRecord.UploaderID]; !ok {
- resultSizeMap[attachRecord.UploaderID] = int(attachRecord.Size / (1024 * 1024)) //MB
- resultNumMap[attachRecord.UploaderID] = 1
- } else {
- resultSizeMap[attachRecord.UploaderID] += int(attachRecord.Size / (1024 * 1024)) //MB
- resultNumMap[attachRecord.UploaderID] += 1
- }
- }
-
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- return resultSizeMap, resultNumMap
- }
-
- func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int {
- sess := x.NewSession()
- defer sess.Close()
- resultMap := make(map[int64]int)
-
- cond := "is_fork=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
- count, err := sess.Where(cond).Count(new(Repository))
- if err != nil {
- log.Info("query Repository error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,owner_id,name").Table("repository").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- repoList := make([]*Repository, 0)
- sess.Find(&repoList)
- log.Info("query Repository size=" + fmt.Sprint(len(repoList)))
- for _, repoRecord := range repoList {
- if _, ok := resultMap[repoRecord.OwnerID]; !ok {
- resultMap[repoRecord.OwnerID] = 1
- } else {
- resultMap[repoRecord.OwnerID] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- return resultMap
- }
-
- func queryUserRepoOpenIIndex(start_unix int64, end_unix int64) map[int64]float64 {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- statictisSess.Select("id,repo_id,radar_total").Table("repo_statistic").Where("created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)).OrderBy("id desc")
- repoStatisticList := make([]*RepoStatistic, 0)
- statictisSess.Find(&repoStatisticList)
- repoOpenIIndexMap := make(map[int64]float64)
- log.Info("query repo_statistic size=" + fmt.Sprint(len(repoStatisticList)))
- for _, repoRecord := range repoStatisticList {
- if _, ok := repoOpenIIndexMap[repoRecord.RepoID]; !ok {
- repoOpenIIndexMap[repoRecord.RepoID] = repoRecord.RadarTotal
- }
- }
-
- sess := x.NewSession()
- defer sess.Close()
- sess.Select("id,owner_id,name").Table("repository").Where("is_fork=false")
- repoList := make([]*Repository, 0)
- sess.Find(&repoList)
-
- userMap := make(map[int64]float64)
-
- log.Info("query Repository size=" + fmt.Sprint(len(repoList)))
- for _, repoRecord := range repoList {
- if _, ok := userMap[repoRecord.OwnerID]; !ok {
- if _, ok := repoOpenIIndexMap[repoRecord.ID]; ok {
- userMap[repoRecord.OwnerID] = repoOpenIIndexMap[repoRecord.ID]
- }
- }
- }
-
- //query collaboration
- sess.Select("repo_id,user_id,mode").Table("collaboration")
- collaborationList := make([]*Collaboration, 0)
- sess.Find(&collaborationList)
-
- log.Info("query collaborationList size=" + fmt.Sprint(len(collaborationList)))
-
- for _, collaborationRecord := range collaborationList {
- if _, ok := userMap[collaborationRecord.UserID]; !ok {
- if _, ok := repoOpenIIndexMap[collaborationRecord.RepoID]; ok {
- userMap[collaborationRecord.UserID] = repoOpenIIndexMap[collaborationRecord.RepoID]
- }
- } else {
- if _, ok := repoOpenIIndexMap[collaborationRecord.RepoID]; ok {
- userMap[collaborationRecord.UserID] += repoOpenIIndexMap[collaborationRecord.RepoID]
- }
- }
- }
-
- log.Info("user openi index size=" + fmt.Sprint(len(userMap)))
-
- return userMap
- }
-
- func queryLoginCount(start_unix int64, end_unix int64) map[int64]int {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- resultMap := make(map[int64]int)
- cond := "created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
- count, err := statictisSess.Where(cond).Count(new(UserLoginLog))
- if err != nil {
- log.Info("query UserLoginLog error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- statictisSess.Select("id,u_id").Table("user_login_log").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- userLoginLogList := make([]*UserLoginLog, 0)
- statictisSess.Find(&userLoginLogList)
- log.Info("query user login size=" + fmt.Sprint(len(userLoginLogList)))
- for _, loginRecord := range userLoginLogList {
- if _, ok := resultMap[loginRecord.UId]; !ok {
- resultMap[loginRecord.UId] = 1
- } else {
- resultMap[loginRecord.UId] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- log.Info("user login size=" + fmt.Sprint(len(resultMap)))
- return resultMap
- }
-
- func queryCommitCodeSize(start_unix int64, end_unix int64) map[int64]int {
- statictisSess := xStatistic.NewSession()
- defer statictisSess.Close()
-
- resultMap := make(map[int64]int)
- cond := "count_date>=" + fmt.Sprint(start_unix) + " and count_date<=" + fmt.Sprint(end_unix)
- count, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis))
- if err != nil {
- log.Info("query commit code size error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- statictisSess.Select("id,commit_code_size").Table("user_business_analysis").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
- statictisSess.Find(&userBusinessAnalysisList)
- log.Info("query user login size=" + fmt.Sprint(len(userBusinessAnalysisList)))
- for _, analysisRecord := range userBusinessAnalysisList {
- if _, ok := resultMap[analysisRecord.ID]; !ok {
- resultMap[analysisRecord.ID] = analysisRecord.CommitCodeSize
- } else {
- resultMap[analysisRecord.ID] += analysisRecord.CommitCodeSize
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- log.Info("user commit code size=" + fmt.Sprint(len(resultMap)))
- return resultMap
- }
-
- func queryUserModel(start_unix int64, end_unix int64) map[int64]int {
- sess := x.NewSession()
- defer sess.Close()
- resultMap := make(map[int64]int)
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
- count, err := sess.Where(cond).Count(new(AiModelManage))
- if err != nil {
- log.Info("query AiModelManage error. return.")
- return resultMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,user_id").Table("ai_model_manage").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- aiModelList := make([]*AiModelManage, 0)
- sess.Find(&aiModelList)
- log.Info("query AiModelManage size=" + fmt.Sprint(len(aiModelList)))
- for _, aiModelRecord := range aiModelList {
- if _, ok := resultMap[aiModelRecord.UserId]; !ok {
- resultMap[aiModelRecord.UserId] = 1
- } else {
- resultMap[aiModelRecord.UserId] += 1
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
- return resultMap
- }
-
- func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[string]int) {
- sess := x.NewSession()
- defer sess.Close()
- resultMap := make(map[int64]int)
- resultItemMap := make(map[string]int)
-
- cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
- count, err := sess.Where(cond).Count(new(Cloudbrain))
- if err != nil {
- log.Info("query cloudbrain error. return.")
- return resultMap, resultItemMap
- }
- var indexTotal int64
- indexTotal = 0
- for {
- sess.Select("id,job_type,user_id,duration,train_job_duration,type").Table("cloudbrain").Unscoped().Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
- cloudTaskList := make([]*Cloudbrain, 0)
- sess.Find(&cloudTaskList)
- log.Info("query cloudbrain size=" + fmt.Sprint(len(cloudTaskList)))
- for _, cloudTaskRecord := range cloudTaskList {
- if _, ok := resultMap[cloudTaskRecord.UserID]; !ok {
- resultMap[cloudTaskRecord.UserID] = 1
- } else {
- resultMap[cloudTaskRecord.UserID] += 1
- }
- if cloudTaskRecord.Duration < 100000000 && cloudTaskRecord.Duration > 0 {
- setMapKey("CloudBrainRunTime", cloudTaskRecord.UserID, int(cloudTaskRecord.Duration), resultItemMap)
- }
- if cloudTaskRecord.Type == 1 { //npu
- if cloudTaskRecord.JobType == "TRAIN" {
- setMapKey("NpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
- } else if cloudTaskRecord.JobType == "INFERENCE" {
- setMapKey("NpuInferenceJob", cloudTaskRecord.UserID, 1, resultItemMap)
- } else {
- setMapKey("NpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
- }
- } else { //type=0 gpu
- if cloudTaskRecord.JobType == "TRAIN" {
- setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
- } else if cloudTaskRecord.JobType == "BENCHMARK" {
- setMapKey("GpuBenchMarkJob", cloudTaskRecord.UserID, 1, resultItemMap)
- } else {
- setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
- }
- }
- }
- indexTotal += PAGE_SIZE
- if indexTotal >= count {
- break
- }
- }
-
- return resultMap, resultItemMap
- }
- func setMapKey(key string, userId int64, value int, resultItemMap map[string]int) {
- newKey := fmt.Sprint(userId) + "_" + key
- if _, ok := resultItemMap[newKey]; !ok {
- resultItemMap[newKey] = value
- } else {
- resultItemMap[newKey] += value
- }
- }
-
- func subMonth(t1, t2 time.Time) (month int) {
- y1 := t1.Year()
- y2 := t2.Year()
- m1 := int(t1.Month())
- m2 := int(t2.Month())
- d1 := t1.Day()
- d2 := t2.Day()
-
- yearInterval := y1 - y2
- // 如果 d1的 月-日 小于 d2的 月-日 那么 yearInterval-- 这样就得到了相差的年数
- if m1 < m2 || m1 == m2 && d1 < d2 {
- yearInterval--
- }
- // 获取月数差值
- monthInterval := (m1 + 12) - m2
- if d1 < d2 {
- monthInterval--
- }
- monthInterval %= 12
- month = yearInterval*12 + monthInterval
- if month == 0 {
- month = 1
- }
- return month
- }
|