#2002 新增用户分析相关属性

Merged
ychao_1983 merged 14 commits from zouap_static into V20220428 2 years ago
  1. +350
    -46
      models/user_business_analysis.go
  2. +122
    -66
      models/user_business_struct.go
  3. +9
    -1
      options/locale/locale_en-US.ini
  4. +9
    -1
      options/locale/locale_zh-CN.ini
  5. +9
    -11
      public/home/search.js
  6. +130
    -99
      routers/repo/user_data_analysis.go
  7. +53
    -2
      web_src/js/components/UserAnalysis.vue

+ 350
- 46
models/user_business_analysis.go View File

@@ -82,17 +82,27 @@ type UserBusinessAnalysisAll struct {
DataDate string `xorm:"NULL"`

//cloudbraintask
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysis struct {
@@ -159,17 +169,27 @@ type UserBusinessAnalysis struct {

DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisQueryOptions struct {
@@ -410,8 +430,10 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("truncate all data from table: " + tableName)
statictisSess.Exec("TRUNCATE TABLE " + tableName)

log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05"))
log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05"))
StartTimeNextDay := pageStartTime.AddDate(0, 0, 1)
EndTimeNextDay := pageEndTime.AddDate(0, 0, 1)
log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05"))
log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05"))

start_unix := pageStartTime.Unix()
end_unix := pageEndTime.Unix()
@@ -426,8 +448,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
CommentCountMap := queryComment(start_unix, end_unix)
FocusRepoCountMap := queryWatch(start_unix, end_unix)
StarRepoCountMap := queryStar(start_unix, end_unix)
WatchedCountMap := queryFollow(start_unix, end_unix)
CommitCodeSizeMap := queryCommitCodeSize(start_unix, end_unix)
WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)
CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
@@ -436,6 +458,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

DataDate := currentTimeNow.Format("2006-01-02") + " 00:01"

cond := "type != 1 and is_active=true"
@@ -472,6 +500,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap)
dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap)
dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap)
dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap)
dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap)
dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap)
dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap)
@@ -496,13 +525,20 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap)
dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight)
userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndex
if maxUserIndex < dateRecordAll.UserIndex {
maxUserIndex = dateRecordAll.UserIndex
dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset)
dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset)
dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset)
dateRecordAll.CollectImage = getMapValue(dateRecordAll.ID, CollectImage)
dateRecordAll.CollectedImage = getMapValue(dateRecordAll.ID, CollectedImage)
dateRecordAll.RecommendImage = getMapValue(dateRecordAll.ID, RecommendImage)

dateRecordAll.UserIndexPrimitive = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight)
userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndexPrimitive
if maxUserIndex < dateRecordAll.UserIndexPrimitive {
maxUserIndex = dateRecordAll.UserIndexPrimitive
}
if minUserIndex > dateRecordAll.UserIndex {
minUserIndex = dateRecordAll.UserIndex
if minUserIndex > dateRecordAll.UserIndexPrimitive {
minUserIndex = dateRecordAll.UserIndexPrimitive
}
dateRecordBatch = append(dateRecordBatch, dateRecordAll)
if len(dateRecordBatch) >= BATCH_INSERT_SIZE {
@@ -552,7 +588,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static

insertBatchSql := "INSERT INTO public." + tableName +
"(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " +
"commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location) " +
"commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive) " +
"VALUES"

for i, record := range dateRecords {
@@ -560,7 +596,8 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) +
", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) +
", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) +
", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "')"
", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," +
fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ")"
if i < (len(dateRecords) - 1) {
insertBatchSql += ","
}
@@ -628,7 +665,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
CommentCountMap := queryComment(start_unix, end_unix)
FocusRepoCountMap := queryWatch(start_unix, end_unix)
StarRepoCountMap := queryStar(start_unix, end_unix)
WatchedCountMap := queryFollow(start_unix, end_unix)
WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)

CommitCodeSizeMap, err := GetAllUserKPIStats()
if err != nil {
@@ -643,6 +680,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

statictisSess := xStatistic.NewSession()
defer statictisSess.Close()

@@ -683,13 +726,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap)
dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap)
dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap)
dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap)
if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok {
dateRecord.CommitCodeSize = 0
} else {
dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines)
}

dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap)
dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap)
dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap)
@@ -715,7 +757,15 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
dateRecord.UserIndex = getUserIndex(dateRecord, ParaWeight)

dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset)
dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage)
dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage)
dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage)

dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight)
setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord)
_, err = statictisSess.Insert(&dateRecord)
if err != nil {
@@ -765,7 +815,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en
userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1
}

if dateRecord.UserIndex > 0 || dateRecord.LoginCount > 0 {
if getUserActivate(dateRecord) > 0 {
userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1
}

@@ -802,7 +852,12 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight
result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1)
codeLine := float64(dateRecord.CommitCodeSize)
limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000)
if codeLine >= limitCodeLine {
codeLine = limitCodeLine
}
result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01)
result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
@@ -810,6 +865,34 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight
result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)

result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1)
result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2)
result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1)
result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1)
result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2)

return result
}

func getUserActivate(dateRecord UserBusinessAnalysis) int {
var result int
result += dateRecord.CodeMergeCount
result += dateRecord.CommitCount
result += dateRecord.IssueCount
result += dateRecord.CommentCount
result += dateRecord.FocusRepoCount
result += dateRecord.StarRepoCount
result += dateRecord.SolveIssueCount
result += dateRecord.EncyclopediasCount
result += dateRecord.CreateRepoCount
result += dateRecord.CloudBrainTaskNum
result += dateRecord.CommitModelCount
result += dateRecord.CommitDatasetNum
result += dateRecord.FocusOtherUser
result += dateRecord.CollectDataset
result += dateRecord.CollectImage
result += dateRecord.CommitCodeSize
return result
}

@@ -831,12 +914,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64
result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
codeLine := float64(dateRecord.CommitCodeSize) / 1000
limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100)
codeLine := float64(dateRecord.CommitCodeSize)
limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000)
if codeLine >= limitCodeLine {
codeLine = limitCodeLine
}
result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1)
result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01)
result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
@@ -844,6 +927,13 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64
result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)

result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1)
result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2)
result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1)
result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1)
result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2)

return result
}

@@ -1134,17 +1224,18 @@ func queryStar(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}

func queryFollow(start_unix int64, end_unix int64) map[int64]int {
func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {

sess := x.NewSession()
defer sess.Close()
resultMap := make(map[int64]int)
resultFocusedByOtherMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)

count, err := sess.Where(cond).Count(new(Follow))
if err != nil {
log.Info("query follow error. return.")
return resultMap
return resultMap, resultFocusedByOtherMap
}
var indexTotal int64
indexTotal = 0
@@ -1160,6 +1251,11 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int {
} else {
resultMap[followRecord.FollowID] += 1
}
if _, ok := resultFocusedByOtherMap[followRecord.UserID]; !ok {
resultFocusedByOtherMap[followRecord.UserID] = 1
} else {
resultFocusedByOtherMap[followRecord.UserID] += 1
}
}

indexTotal += PAGE_SIZE
@@ -1168,7 +1264,215 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int {
}
}

return resultMap
return resultMap, resultFocusedByOtherMap
}

func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
userIdDdatasetMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true"
count, err := sess.Where(cond).Count(new(Dataset))
if err != nil {
log.Info("query recommend dataset error. return.")
return userIdDdatasetMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,user_id,recommend").Where(cond).Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
datasetList := make([]*Dataset, 0)
sess.Find(&datasetList)
log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
for _, datasetRecord := range datasetList {
if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
userIdDdatasetMap[datasetRecord.UserID] = 1
} else {
userIdDdatasetMap[datasetRecord.UserID] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return userIdDdatasetMap
}

func queryAllDataSet() (map[int64]int64, map[int64]int64) {
sess := x.NewSession()
defer sess.Close()
datasetUserIdMap := make(map[int64]int64)
userIdDdatasetMap := make(map[int64]int64)
count, err := sess.Count(new(Dataset))
if err != nil {
log.Info("query dataset error. return.")
return datasetUserIdMap, userIdDdatasetMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,user_id").Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
datasetList := make([]*Dataset, 0)
sess.Find(&datasetList)
log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
for _, datasetRecord := range datasetList {
datasetUserIdMap[datasetRecord.ID] = datasetRecord.UserID
if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
userIdDdatasetMap[datasetRecord.UserID] = 1
} else {
userIdDdatasetMap[datasetRecord.UserID] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return datasetUserIdMap, userIdDdatasetMap
}

func queryRecommedImage(start_unix int64, end_unix int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
userIdImageMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and type=5"
count, err := sess.Where(cond).Count(new(Image))
if err != nil {
log.Info("query recommend image error. return.")
return userIdImageMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uid,type").Where(cond).Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
imageList := make([]*Image, 0)
sess.Find(&imageList)
log.Info("query imageList size=" + fmt.Sprint(len(imageList)))
for _, imageRecord := range imageList {
if _, ok := userIdImageMap[imageRecord.UID]; !ok {
userIdImageMap[imageRecord.UID] = 1
} else {
userIdImageMap[imageRecord.UID] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return userIdImageMap
}

func queryAllImage() (map[int64]int64, map[int64]int64) {
sess := x.NewSession()
defer sess.Close()
imageUserIdMap := make(map[int64]int64)
userIdDImageMap := make(map[int64]int64)
count, err := sess.Count(new(Image))
if err != nil {
log.Info("query image error. return.")
return imageUserIdMap, userIdDImageMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uid").Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
imageList := make([]*Image, 0)
sess.Find(&imageList)
log.Info("query imageList size=" + fmt.Sprint(len(imageList)))
for _, imageRecord := range imageList {
imageUserIdMap[imageRecord.ID] = imageRecord.UID
if _, ok := userIdDImageMap[imageRecord.UID]; !ok {
userIdDImageMap[imageRecord.UID] = 1
} else {
userIdDImageMap[imageRecord.UID] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return imageUserIdMap, userIdDImageMap
}

func queryDatasetStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
datasetCollect := make(map[int64]int)
datasetCollected := make(map[int64]int)
datasetUserIdMap, _ := queryAllDataSet()
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(DatasetStar))
if err != nil {
log.Info("query follow error. return.")
return datasetCollect, datasetCollected
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uid,dataset_id").Table(new(DatasetStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
datasetStarList := make([]*DatasetStar, 0)
sess.Find(&datasetStarList)
log.Info("query datasetStarList size=" + fmt.Sprint(len(datasetStarList)))
for _, datasetStarRecord := range datasetStarList {
if _, ok := datasetCollect[datasetStarRecord.UID]; !ok {
datasetCollect[datasetStarRecord.UID] = 1
} else {
datasetCollect[datasetStarRecord.UID] += 1
}
if _, ok := datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]]; !ok {
datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] = 1
} else {
datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return datasetCollect, datasetCollected
}

func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
imageCollect := make(map[int64]int)
imageCollected := make(map[int64]int)
imageUserIdMap, _ := queryAllDataSet()
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(ImageStar))
if err != nil {
log.Info("query follow error. return.")
return imageCollect, imageCollected
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
imageStarList := make([]*ImageStar, 0)
sess.Find(&imageStarList)
log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList)))
for _, imageStarRecord := range imageStarList {
if _, ok := imageCollect[imageStarRecord.UID]; !ok {
imageCollect[imageStarRecord.UID] = 1
} else {
imageCollect[imageStarRecord.UID] += 1
}
if _, ok := imageCollected[imageUserIdMap[imageStarRecord.ImageID]]; !ok {
imageCollected[imageUserIdMap[imageStarRecord.ImageID]] = 1
} else {
imageCollected[imageUserIdMap[imageStarRecord.ImageID]] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return imageCollect, imageCollected
}

func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {


+ 122
- 66
models/user_business_struct.go View File

@@ -45,17 +45,26 @@ type UserBusinessAnalysisCurrentYear struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisLast30Day struct {
@@ -101,17 +110,26 @@ type UserBusinessAnalysisLast30Day struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisLastMonth struct {
@@ -157,17 +175,26 @@ type UserBusinessAnalysisLastMonth struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisCurrentMonth struct {
@@ -213,17 +240,26 @@ type UserBusinessAnalysisCurrentMonth struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisCurrentWeek struct {
@@ -269,17 +305,27 @@ type UserBusinessAnalysisCurrentWeek struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisYesterday struct {
@@ -325,17 +371,27 @@ type UserBusinessAnalysisYesterday struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserAnalysisPara struct {


+ 9
- 1
options/locale/locale_en-US.ini View File

@@ -507,8 +507,16 @@ static.CloudBrainTaskNum=CloudBrain Task Count
static.CloudBrainRunTime=CloudBrain Run Time
static.CommitDatasetNum=Commit Dataset Count
static.CommitModelCount=Commit Model Count
static.UserIndex=User Index
static.UserIndex=Normalized user index
static.UserIndexPrimitive=User Index
static.countdate=Count Date
static.FocusOtherUser=Focus Other User Count
static.CollectDataset=Collect Dataset Count
static.CollectedDataset=Collected Dataset Count
static.RecommendDataset=Recommended Dataset Count
static.CollectImage=Collect Image Count
static.CollectedImage=Collected Image Count
static.RecommendImage=Recommended Image Count
static.all=All
static.public.user_business_analysis_current_month=Current_Month
static.public.user_business_analysis_current_week=Current_Week


+ 9
- 1
options/locale/locale_zh-CN.ini View File

@@ -512,8 +512,16 @@ static.CloudBrainTaskNum=云脑任务数
static.CloudBrainRunTime=云脑运行时间(小时)
static.CommitDatasetNum=上传(提交)数据集文件数
static.CommitModelCount=提交模型数
static.UserIndex=用户指数
static.UserIndex=归一化用户指数
static.UserIndexPrimitive=用户指数
static.countdate=系统统计时间
static.FocusOtherUser=关注他人数
static.CollectDataset=收藏数据集
static.CollectedDataset=被收藏数据集
static.RecommendDataset=被推荐数据集数
static.CollectImage=收藏镜像数
static.CollectedImage=被收藏镜像数
static.RecommendImage=被推荐镜像数
static.all=所有
static.public.user_business_analysis_current_month=本月
static.public.user_business_analysis_current_week=本周


+ 9
- 11
public/home/search.js View File

@@ -810,14 +810,7 @@ var repoAndOrgEN={
function page(current){
currentPage=current;
startIndex = currentPage -1;
if(startIndex < 1){
startIndex = 1;
}
endIndex = currentPage + 2;
if(endIndex >= totalPage){
endIndex = totalPage;
}
doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel);
}
@@ -888,9 +881,14 @@ function getYPosition(e){
var html ="";
console.log("currentPage=" + currentPage);
console.log("privateTotal=" + privateTotal);
// if(totalPage==0){
// return;
// }
startIndex = currentPage -1;
if(startIndex < 1){
startIndex = 1;
}
endIndex = currentPage + 2;
if(endIndex >= totalPage){
endIndex = totalPage;
}
html += "<span class=\"item\">" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + "</span>"
if(currentPage > 1){
html += "<a class=\"item navigation\" href=\"javascript:page(1)\"><span class=\"navigation_label\">" + getLabel(isZh,"search_home_page") + "</span></a>";


+ 130
- 99
routers/repo/user_data_analysis.go View File

@@ -19,6 +19,130 @@ const (
PAGE_SIZE = 2000
)

func getExcelHeader(ctx *context.Context) map[string]string {
excelHeader := make([]string, 0)
excelHeader = append(excelHeader, ctx.Tr("user.static.id"))
excelHeader = append(excelHeader, ctx.Tr("user.static.name"))
excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndex"))
excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndexPrimitive"))
excelHeader = append(excelHeader, ctx.Tr("user.static.codemergecount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.commitcount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.issuecount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.commentcount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.focusrepocount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.starrepocount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.logincount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.watchedcount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.commitcodesize"))
excelHeader = append(excelHeader, ctx.Tr("user.static.solveissuecount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.encyclopediascount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.createrepocount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.openiindex"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainTaskNum"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount"))

excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedDataset"))
excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendDataset"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectImage"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage"))
excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage"))

excelHeader = append(excelHeader, ctx.Tr("user.static.registdate"))
excelHeader = append(excelHeader, ctx.Tr("user.static.countdate"))

excelHeaderMap := make(map[string]string, 0)
var i byte
i = 0
for _, value := range excelHeader {
excelColumn := getColumn(i) + fmt.Sprint(1)
log.Info("excelColumn=" + excelColumn)
excelHeaderMap[excelColumn] = value
i++
}
return excelHeaderMap
}

func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) {
rows := fmt.Sprint(row)
var tmp byte
tmp = 0
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive))
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex))
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage)
tmp = tmp + 1
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3])
tmp = tmp + 1

formatTime = userRecord.DataDate
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime)
}
func getColumn(tmp byte) string {
var tmpA byte
tmpA = 'A'
if tmp < 26 {
return string(tmpA + tmp)
} else {
return "A" + string(tmpA+(tmp-26))
}
}

func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) {
page := ctx.QueryInt("page")
if page <= 0 {
@@ -37,30 +161,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
sheetName := ctx.Tr("user.static.sheetname")
index := xlsx.NewSheet(sheetName)
xlsx.DeleteSheet("Sheet1")
dataHeader := map[string]string{
"A1": ctx.Tr("user.static.id"),
"B1": ctx.Tr("user.static.name"),
"C1": ctx.Tr("user.static.UserIndex"),
"D1": ctx.Tr("user.static.codemergecount"),
"E1": ctx.Tr("user.static.commitcount"),
"F1": ctx.Tr("user.static.issuecount"),
"G1": ctx.Tr("user.static.commentcount"),
"H1": ctx.Tr("user.static.focusrepocount"),
"I1": ctx.Tr("user.static.starrepocount"),
"J1": ctx.Tr("user.static.logincount"),
"K1": ctx.Tr("user.static.watchedcount"),
"L1": ctx.Tr("user.static.commitcodesize"),
"M1": ctx.Tr("user.static.solveissuecount"),
"N1": ctx.Tr("user.static.encyclopediascount"),
"O1": ctx.Tr("user.static.createrepocount"),
"P1": ctx.Tr("user.static.openiindex"),
"Q1": ctx.Tr("user.static.CloudBrainTaskNum"),
"R1": ctx.Tr("user.static.CloudBrainRunTime"),
"S1": ctx.Tr("user.static.CommitDatasetNum"),
"T1": ctx.Tr("user.static.CommitModelCount"),
"U1": ctx.Tr("user.static.registdate"),
"V1": ctx.Tr("user.static.countdate"),
}
dataHeader := getExcelHeader(ctx)
for k, v := range dataHeader {
//设置单元格的值
xlsx.SetCellValue(sheetName, k, v)
@@ -74,31 +175,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
log.Info("return count=" + fmt.Sprint(count))
for _, userRecord := range re {
row++
rows := fmt.Sprint(row)
xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID)
xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name)
xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount)
xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount)
xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount)
xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount)
xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount)
xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount)
xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount)
xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount)
xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize)
xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount)
xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount)
xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount)
xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex))
xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum)
xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum)
xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount)
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3])
formatTime = userRecord.DataDate
xlsx.SetCellValue(sheetName, "V"+rows, formatTime)
writeExcel(row, xlsx, sheetName, userRecord)
}

indexTotal += PAGE_SIZE
@@ -236,62 +313,16 @@ func QueryUserStaticDataPage(ctx *context.Context) {
sheetName := ctx.Tr("user.static.sheetname")
index := xlsx.NewSheet(sheetName)
xlsx.DeleteSheet("Sheet1")
dataHeader := map[string]string{
"A1": ctx.Tr("user.static.id"),
"B1": ctx.Tr("user.static.name"),
"C1": ctx.Tr("user.static.UserIndex"),
"D1": ctx.Tr("user.static.codemergecount"),
"E1": ctx.Tr("user.static.commitcount"),
"F1": ctx.Tr("user.static.issuecount"),
"G1": ctx.Tr("user.static.commentcount"),
"H1": ctx.Tr("user.static.focusrepocount"),
"I1": ctx.Tr("user.static.starrepocount"),
"J1": ctx.Tr("user.static.logincount"),
"K1": ctx.Tr("user.static.watchedcount"),
"L1": ctx.Tr("user.static.commitcodesize"),
"M1": ctx.Tr("user.static.solveissuecount"),
"N1": ctx.Tr("user.static.encyclopediascount"),
"O1": ctx.Tr("user.static.createrepocount"),
"P1": ctx.Tr("user.static.openiindex"),
"Q1": ctx.Tr("user.static.CloudBrainTaskNum"),
"R1": ctx.Tr("user.static.CloudBrainRunTime"),
"S1": ctx.Tr("user.static.CommitDatasetNum"),
"T1": ctx.Tr("user.static.CommitModelCount"),
"U1": ctx.Tr("user.static.registdate"),
"V1": ctx.Tr("user.static.countdate"),
}

dataHeader := getExcelHeader(ctx)
for k, v := range dataHeader {
//设置单元格的值
xlsx.SetCellValue(sheetName, k, v)
}

for i, userRecord := range re {
rows := fmt.Sprint(i + 2)

xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID)
xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name)
xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount)
xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount)
xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount)
xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount)
xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount)
xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount)
xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount)
xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount)
xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize)
xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount)
xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount)
xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount)
xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex))
xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum)
xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum)
xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount)
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3])
formatTime = userRecord.DataDate
xlsx.SetCellValue(sheetName, "V"+rows, formatTime)
row := i + 2
writeExcel(row, xlsx, sheetName, userRecord)
}

//设置默认打开的表单


+ 53
- 2
web_src/js/components/UserAnalysis.vue View File

@@ -64,13 +64,22 @@
</el-table-column>
<el-table-column
prop="UserIndex"
label="用户指数"
label="归一化用户指数"
width="120px"
align="center">
<template slot-scope="scope">
{{scope.row.UserIndex | rounding}}
</template>
</el-table-column>
<el-table-column
prop="UserIndexPrimitive"
label="用户指数"
width="120px"
align="center">
<template slot-scope="scope">
{{scope.row.UserIndexPrimitive | rounding}}
</template>
</el-table-column>
<el-table-column
prop="CodeMergeCount"
label="PR数"
@@ -160,6 +169,48 @@
label="提交模型数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="FocusOtherUser"
label="关注他人数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="CollectDataset"
label="收藏数据集"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="CollectedDataset"
label="被收藏数据集"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="RecommendDataset"
label="被推荐数据集数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="CollectImage"
label="收藏镜像数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="CollectedImage"
label="被收藏镜像数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="RecommendImage"
label="被推荐镜像数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="RegistDate"
@@ -214,7 +265,7 @@
value_time: '',
search:'',
data:'',
columns: [{title: 'ID',key: 'ID'},{title: '用户名',key: 'Name'},{title: 'PR数',key: 'CodeMergeCount'},{title: 'commit数',key:'CommitCount'},{title: '提出任务数',key: 'IssueCount'},{title: '评论数',key: 'CommentCount'},{title: '关注项目数',key: 'FocusRepoCount'},{title: '点赞项目数',key: 'StarRepoCount'},{title: '登录次数',key: 'LoginCount'},{title:'关注者数',key:'WatchedCount'},{title:'commit代码行数',key:'CommitCodeSize'},{title:'已解决任务数',key:'SolveIssueCount'},{title:'百科页面贡献次数',key:'EncyclopediasCount'},{title:'创建项目',key:'CreateRepoCount'},{title:'用户注册时间',key:'RegistDate'},{title:'云脑任务数',key:'CloudBrainTaskNum'},{title:'云脑运行时间(小时)',key:'CloudBrainRunTime'},{title:'上传(提交)数据集文件数',key:'CommitDatasetNum'},{title:'提交模型数',key:'CommitModelCount'},{title:'用户指数',key:'UserIndex'},{title:'系统统计时间',key:'CountDate'}],
columns: [{title: 'ID',key: 'ID'},{title: '用户名',key: 'Name'},{title: 'PR数',key: 'CodeMergeCount'},{title: 'commit数',key:'CommitCount'},{title: '提出任务数',key: 'IssueCount'},{title: '评论数',key: 'CommentCount'},{title: '关注项目数',key: 'FocusRepoCount'},{title: '点赞项目数',key: 'StarRepoCount'},{title: '登录次数',key: 'LoginCount'},{title:'关注者数',key:'WatchedCount'},{title:'commit代码行数',key:'CommitCodeSize'},{title:'已解决任务数',key:'SolveIssueCount'},{title:'百科页面贡献次数',key:'EncyclopediasCount'},{title:'创建项目',key:'CreateRepoCount'},{title:'用户注册时间',key:'RegistDate'},{title:'云脑任务数',key:'CloudBrainTaskNum'},{title:'云脑运行时间(小时)',key:'CloudBrainRunTime'},{title:'上传(提交)数据集文件数',key:'CommitDatasetNum'},{title:'提交模型数',key:'CommitModelCount'},{title:'归一化用户指数',key:'UserIndex'},{title:'用户指数',key:'UserIndexPrimitive'},{title:'关注他人数',key:'FocusOtherUser'},{title:'收藏数据集',key:'CollectDataset'},{title:'被收藏数据集',key:'CollectedDataset'},{title:'被推荐数据集数',key:'RecommendDataset'},{title:'收藏镜像数',key:'CollectImage'},{title:'被收藏镜像数',key:'CollectedImage'},{title:'被推荐镜像数',key:'RecommendImage'},{title:'系统统计时间',key:'CountDate'}],
blob:'',
fileName:'',
dynamic:7,


Loading…
Cancel
Save