2020-01-11 22:24:57 +08:00
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package cmd
import (
"bufio"
2020-04-06 11:44:47 +01:00
"bytes"
"context"
2020-01-11 22:24:57 +08:00
"fmt"
2020-01-29 23:00:27 -03:00
"io/ioutil"
2020-04-06 11:44:47 +01:00
golog "log"
2020-01-11 22:24:57 +08:00
"os"
"os/exec"
"path/filepath"
"strings"
2020-04-06 11:44:47 +01:00
"text/tabwriter"
2020-01-11 22:24:57 +08:00
2020-04-06 11:44:47 +01:00
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/models/migrations"
"code.gitea.io/gitea/modules/git"
2020-01-29 23:00:27 -03:00
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/options"
2020-04-06 11:44:47 +01:00
"code.gitea.io/gitea/modules/repository"
2020-01-11 22:24:57 +08:00
"code.gitea.io/gitea/modules/setting"
2020-08-11 21:05:34 +01:00
"code.gitea.io/gitea/modules/util"
2020-04-06 11:44:47 +01:00
"xorm.io/builder"
2020-01-11 22:24:57 +08:00
"github.com/urfave/cli"
)
// CmdDoctor represents the available doctor sub-command.
var CmdDoctor = cli . Command {
Name : "doctor" ,
2020-01-29 23:00:27 -03:00
Usage : "Diagnose problems" ,
Description : "A command to diagnose problems with the current Gitea instance according to the given configuration." ,
2020-01-11 22:24:57 +08:00
Action : runDoctor ,
2020-04-06 11:44:47 +01:00
Flags : [ ] cli . Flag {
cli . BoolFlag {
Name : "list" ,
Usage : "List the available checks" ,
} ,
cli . BoolFlag {
Name : "default" ,
Usage : "Run the default checks (if neither --run or --all is set, this is the default behaviour)" ,
} ,
cli . StringSliceFlag {
Name : "run" ,
Usage : "Run the provided checks - (if --default is set, the default checks will also run)" ,
} ,
cli . BoolFlag {
Name : "all" ,
Usage : "Run all the available checks" ,
} ,
cli . BoolFlag {
Name : "fix" ,
Usage : "Automatically fix what we can" ,
} ,
cli . StringFlag {
Name : "log-file" ,
Usage : ` Name of the log file (default: "doctor.log"). Set to "-" to output to stdout, set to "" to disable ` ,
} ,
} ,
2020-01-11 22:24:57 +08:00
}
type check struct {
2020-01-29 23:00:27 -03:00
title string
2020-04-06 11:44:47 +01:00
name string
isDefault bool
2020-01-29 23:00:27 -03:00
f func ( ctx * cli . Context ) ( [ ] string , error )
abortIfFailed bool
skipDatabaseInit bool
2020-01-11 22:24:57 +08:00
}
// checklist represents list for all checks
var checklist = [ ] check {
2020-01-29 23:00:27 -03:00
{
// NOTE: this check should be the first in the list
title : "Check paths and basic configuration" ,
2020-04-06 11:44:47 +01:00
name : "paths" ,
isDefault : true ,
2020-01-29 23:00:27 -03:00
f : runDoctorPathInfo ,
abortIfFailed : true ,
skipDatabaseInit : true ,
} ,
2020-01-11 22:24:57 +08:00
{
2020-04-06 11:44:47 +01:00
title : "Check Database Version" ,
2020-05-29 15:24:15 +02:00
name : "check-db-version" ,
2020-04-06 11:44:47 +01:00
isDefault : true ,
f : runDoctorCheckDBVersion ,
2020-05-29 15:24:15 +02:00
abortIfFailed : false ,
} ,
{
title : "Check consistency of database" ,
name : "check-db-consistency" ,
isDefault : false ,
f : runDoctorCheckDBConsistency ,
2020-04-06 11:44:47 +01:00
} ,
{
title : "Check if OpenSSH authorized_keys file is up-to-date" ,
name : "authorized_keys" ,
isDefault : true ,
f : runDoctorAuthorizedKeys ,
} ,
{
title : "Check if SCRIPT_TYPE is available" ,
name : "script-type" ,
isDefault : false ,
f : runDoctorScriptType ,
} ,
{
title : "Check if hook files are up-to-date and executable" ,
name : "hooks" ,
isDefault : false ,
f : runDoctorHooks ,
} ,
{
title : "Recalculate merge bases" ,
name : "recalculate_merge_bases" ,
isDefault : false ,
f : runDoctorPRMergeBase ,
2020-01-11 22:24:57 +08:00
} ,
2020-07-08 03:16:34 +08:00
{
title : "Recalculate Stars number for all user" ,
name : "recalculate_stars_number" ,
isDefault : false ,
f : runDoctorUserStarNum ,
} ,
2020-01-11 22:24:57 +08:00
// more checks please append here
}
func runDoctor ( ctx * cli . Context ) error {
2020-01-29 23:00:27 -03:00
2020-04-06 11:44:47 +01:00
// Silence the default loggers
2020-01-29 23:00:27 -03:00
log . DelNamedLogger ( "console" )
log . DelNamedLogger ( log . DEFAULT )
2020-04-06 11:44:47 +01:00
// Now setup our own
logFile := ctx . String ( "log-file" )
if ! ctx . IsSet ( "log-file" ) {
logFile = "doctor.log"
}
if len ( logFile ) == 0 {
log . NewLogger ( 1000 , "doctor" , "console" , ` { "level":"NONE","stacktracelevel":"NONE","colorize":"%t"} ` )
} else if logFile == "-" {
log . NewLogger ( 1000 , "doctor" , "console" , ` { "level":"trace","stacktracelevel":"NONE"} ` )
} else {
log . NewLogger ( 1000 , "doctor" , "file" , fmt . Sprintf ( ` { "filename":%q,"level":"trace","stacktracelevel":"NONE"} ` , logFile ) )
}
// Finally redirect the default golog to here
golog . SetFlags ( 0 )
golog . SetPrefix ( "" )
golog . SetOutput ( log . NewLoggerAsWriter ( "INFO" , log . GetLogger ( log . DEFAULT ) ) )
if ctx . IsSet ( "list" ) {
w := tabwriter . NewWriter ( os . Stdout , 0 , 8 , 0 , '\t' , 0 )
_ , _ = w . Write ( [ ] byte ( "Default\tName\tTitle\n" ) )
for _ , check := range checklist {
if check . isDefault {
_ , _ = w . Write ( [ ] byte { '*' } )
}
_ , _ = w . Write ( [ ] byte { '\t' } )
_ , _ = w . Write ( [ ] byte ( check . name ) )
_ , _ = w . Write ( [ ] byte { '\t' } )
_ , _ = w . Write ( [ ] byte ( check . title ) )
_ , _ = w . Write ( [ ] byte { '\n' } )
}
return w . Flush ( )
}
var checks [ ] check
if ctx . Bool ( "all" ) {
checks = checklist
} else if ctx . IsSet ( "run" ) {
addDefault := ctx . Bool ( "default" )
names := ctx . StringSlice ( "run" )
for i , name := range names {
names [ i ] = strings . ToLower ( strings . TrimSpace ( name ) )
}
for _ , check := range checklist {
if addDefault && check . isDefault {
checks = append ( checks , check )
continue
}
for _ , name := range names {
if name == check . name {
checks = append ( checks , check )
break
}
}
}
} else {
for _ , check := range checklist {
if check . isDefault {
checks = append ( checks , check )
}
}
}
2020-01-29 23:00:27 -03:00
dbIsInit := false
2020-01-11 22:24:57 +08:00
2020-04-06 11:44:47 +01:00
for i , check := range checks {
2020-01-29 23:00:27 -03:00
if ! dbIsInit && ! check . skipDatabaseInit {
// Only open database after the most basic configuration check
2020-04-06 11:44:47 +01:00
setting . EnableXORMLog = false
if err := initDBDisableConsole ( true ) ; err != nil {
2020-01-29 23:00:27 -03:00
fmt . Println ( err )
fmt . Println ( "Check if you are using the right config file. You can use a --config directive to specify one." )
return nil
}
dbIsInit = true
}
2020-01-11 22:24:57 +08:00
fmt . Println ( "[" , i + 1 , "]" , check . title )
2020-01-29 23:00:27 -03:00
messages , err := check . f ( ctx )
for _ , message := range messages {
fmt . Println ( "-" , message )
}
if err != nil {
2020-01-11 22:24:57 +08:00
fmt . Println ( "Error:" , err )
2020-01-29 23:00:27 -03:00
if check . abortIfFailed {
return nil
2020-01-11 22:24:57 +08:00
}
} else {
fmt . Println ( "OK." )
}
fmt . Println ( )
}
return nil
}
2020-01-29 23:00:27 -03:00
func runDoctorPathInfo ( ctx * cli . Context ) ( [ ] string , error ) {
res := make ( [ ] string , 0 , 10 )
if fi , err := os . Stat ( setting . CustomConf ) ; err != nil || ! fi . Mode ( ) . IsRegular ( ) {
res = append ( res , fmt . Sprintf ( "Failed to find configuration file at '%s'." , setting . CustomConf ) )
res = append ( res , fmt . Sprintf ( "If you've never ran Gitea yet, this is normal and '%s' will be created for you on first run." , setting . CustomConf ) )
res = append ( res , "Otherwise check that you are running this command from the correct path and/or provide a `--config` parameter." )
return res , fmt . Errorf ( "can't proceed without a configuration file" )
}
setting . NewContext ( )
fail := false
check := func ( name , path string , is_dir , required , is_write bool ) {
res = append ( res , fmt . Sprintf ( "%-25s '%s'" , name + ":" , path ) )
2020-04-06 11:44:47 +01:00
fi , err := os . Stat ( path )
if err != nil {
if os . IsNotExist ( err ) && ctx . Bool ( "fix" ) && is_dir {
if err := os . MkdirAll ( path , 0777 ) ; err != nil {
res = append ( res , fmt . Sprintf ( " ERROR: %v" , err ) )
fail = true
return
}
fi , err = os . Stat ( path )
}
}
if err != nil {
2020-01-29 23:00:27 -03:00
if required {
res = append ( res , fmt . Sprintf ( " ERROR: %v" , err ) )
fail = true
2020-04-06 11:44:47 +01:00
return
2020-01-29 23:00:27 -03:00
}
2020-04-06 11:44:47 +01:00
res = append ( res , fmt . Sprintf ( " NOTICE: not accessible (%v)" , err ) )
return
}
if is_dir && ! fi . IsDir ( ) {
2020-01-29 23:00:27 -03:00
res = append ( res , " ERROR: not a directory" )
fail = true
2020-04-06 11:44:47 +01:00
return
2020-01-29 23:00:27 -03:00
} else if ! is_dir && ! fi . Mode ( ) . IsRegular ( ) {
res = append ( res , " ERROR: not a regular file" )
fail = true
} else if is_write {
if err := runDoctorWritableDir ( path ) ; err != nil {
res = append ( res , fmt . Sprintf ( " ERROR: not writable: %v" , err ) )
fail = true
}
}
}
// Note print paths inside quotes to make any leading/trailing spaces evident
check ( "Configuration File Path" , setting . CustomConf , false , true , false )
check ( "Repository Root Path" , setting . RepoRootPath , true , true , true )
check ( "Data Root Path" , setting . AppDataPath , true , true , true )
check ( "Custom File Root Path" , setting . CustomPath , true , false , false )
check ( "Work directory" , setting . AppWorkPath , true , true , false )
check ( "Log Root Path" , setting . LogRootPath , true , true , true )
if options . IsDynamic ( ) {
// Do not check/report on StaticRootPath if data is embedded in Gitea (-tags bindata)
check ( "Static File Root Path" , setting . StaticRootPath , true , true , false )
}
if fail {
return res , fmt . Errorf ( "please check your configuration file and try again" )
}
return res , nil
}
func runDoctorWritableDir ( path string ) error {
// There's no platform-independent way of checking if a directory is writable
// https://stackoverflow.com/questions/20026320/how-to-tell-if-folder-exists-and-is-writable
tmpFile , err := ioutil . TempFile ( path , "doctors-order" )
if err != nil {
return err
}
2020-08-11 21:05:34 +01:00
if err := util . Remove ( tmpFile . Name ( ) ) ; err != nil {
2020-01-29 23:00:27 -03:00
fmt . Printf ( "Warning: can't remove temporary file: '%s'\n" , tmpFile . Name ( ) )
}
tmpFile . Close ( )
return nil
}
2020-04-06 11:44:47 +01:00
const tplCommentPrefix = ` # gitea public key `
func runDoctorAuthorizedKeys ( ctx * cli . Context ) ( [ ] string , error ) {
2020-01-11 22:24:57 +08:00
if setting . SSH . StartBuiltinServer || ! setting . SSH . CreateAuthorizedKeysFile {
return nil , nil
}
fPath := filepath . Join ( setting . SSH . RootPath , "authorized_keys" )
f , err := os . Open ( fPath )
if err != nil {
2020-04-06 11:44:47 +01:00
if ctx . Bool ( "fix" ) {
return [ ] string { fmt . Sprintf ( "Error whilst opening authorized_keys: %v. Attempting regeneration" , err ) } , models . RewriteAllPublicKeys ( )
}
2020-01-11 22:24:57 +08:00
return nil , err
}
defer f . Close ( )
2020-04-06 11:44:47 +01:00
linesInAuthorizedKeys := map [ string ] bool { }
2020-01-11 22:24:57 +08:00
scanner := bufio . NewScanner ( f )
for scanner . Scan ( ) {
2020-04-06 11:44:47 +01:00
line := scanner . Text ( )
if strings . HasPrefix ( line , tplCommentPrefix ) {
2020-01-11 22:24:57 +08:00
continue
}
2020-04-06 11:44:47 +01:00
linesInAuthorizedKeys [ line ] = true
2020-01-11 22:24:57 +08:00
}
2020-04-06 11:44:47 +01:00
f . Close ( )
2020-01-11 22:24:57 +08:00
2020-04-06 11:44:47 +01:00
// now we regenerate and check if there are any lines missing
regenerated := & bytes . Buffer { }
if err := models . RegeneratePublicKeys ( regenerated ) ; err != nil {
return nil , err
}
scanner = bufio . NewScanner ( regenerated )
for scanner . Scan ( ) {
line := scanner . Text ( )
if strings . HasPrefix ( line , tplCommentPrefix ) {
continue
}
if ok := linesInAuthorizedKeys [ line ] ; ok {
continue
}
if ctx . Bool ( "fix" ) {
return [ ] string { "authorized_keys is out of date, attempting regeneration" } , models . RewriteAllPublicKeys ( )
}
2020-04-08 06:04:19 +01:00
return nil , fmt . Errorf ( ` authorized_keys is out of date and should be regenerated with "gitea admin regenerate keys" or "gitea doctor --run authorized_keys --fix" ` )
2020-04-06 11:44:47 +01:00
}
return nil , nil
}
2020-01-11 22:24:57 +08:00
2020-04-06 11:44:47 +01:00
func runDoctorCheckDBVersion ( ctx * cli . Context ) ( [ ] string , error ) {
if err := models . NewEngine ( context . Background ( ) , migrations . EnsureUpToDate ) ; err != nil {
if ctx . Bool ( "fix" ) {
return [ ] string { fmt . Sprintf ( "WARN: Got Error %v during ensure up to date" , err ) , "Attempting to migrate to the latest DB version to fix this." } , models . NewEngine ( context . Background ( ) , migrations . Migrate )
2020-01-11 22:24:57 +08:00
}
2020-04-06 11:44:47 +01:00
return nil , err
}
return nil , nil
}
2020-01-11 22:24:57 +08:00
2020-04-06 11:44:47 +01:00
func iterateRepositories ( each func ( * models . Repository ) ( [ ] string , error ) ) ( [ ] string , error ) {
results := [ ] string { }
err := models . Iterate (
models . DefaultDBContext ( ) ,
new ( models . Repository ) ,
builder . Gt { "id" : 0 } ,
func ( idx int , bean interface { } ) error {
res , err := each ( bean . ( * models . Repository ) )
results = append ( results , res ... )
return err
} ,
)
return results , err
}
2020-01-11 22:24:57 +08:00
2020-04-06 11:44:47 +01:00
func iteratePRs ( repo * models . Repository , each func ( * models . Repository , * models . PullRequest ) ( [ ] string , error ) ) ( [ ] string , error ) {
results := [ ] string { }
err := models . Iterate (
models . DefaultDBContext ( ) ,
new ( models . PullRequest ) ,
builder . Eq { "base_repo_id" : repo . ID } ,
func ( idx int , bean interface { } ) error {
res , err := each ( repo , bean . ( * models . PullRequest ) )
results = append ( results , res ... )
return err
} ,
)
return results , err
}
func runDoctorHooks ( ctx * cli . Context ) ( [ ] string , error ) {
// Need to iterate across all of the repositories
return iterateRepositories ( func ( repo * models . Repository ) ( [ ] string , error ) {
results , err := repository . CheckDelegateHooks ( repo . RepoPath ( ) )
2020-01-11 22:24:57 +08:00
if err != nil {
return nil , err
}
2020-04-06 11:44:47 +01:00
if len ( results ) > 0 && ctx . Bool ( "fix" ) {
return [ ] string { fmt . Sprintf ( "regenerated hooks for %s" , repo . FullName ( ) ) } , repository . CreateDelegateHooks ( repo . RepoPath ( ) )
2020-01-11 22:24:57 +08:00
}
2020-04-06 11:44:47 +01:00
return results , nil
} )
}
func runDoctorPRMergeBase ( ctx * cli . Context ) ( [ ] string , error ) {
numRepos := 0
numPRs := 0
numPRsUpdated := 0
results , err := iterateRepositories ( func ( repo * models . Repository ) ( [ ] string , error ) {
numRepos ++
return iteratePRs ( repo , func ( repo * models . Repository , pr * models . PullRequest ) ( [ ] string , error ) {
numPRs ++
results := [ ] string { }
pr . BaseRepo = repo
repoPath := repo . RepoPath ( )
oldMergeBase := pr . MergeBase
if ! pr . HasMerged {
var err error
pr . MergeBase , err = git . NewCommand ( "merge-base" , "--" , pr . BaseBranch , pr . GetGitRefName ( ) ) . RunInDir ( repoPath )
if err != nil {
var err2 error
pr . MergeBase , err2 = git . NewCommand ( "rev-parse" , git . BranchPrefix + pr . BaseBranch ) . RunInDir ( repoPath )
if err2 != nil {
results = append ( results , fmt . Sprintf ( "WARN: Unable to get merge base for PR ID %d, #%d onto %s in %s/%s" , pr . ID , pr . Index , pr . BaseBranch , pr . BaseRepo . OwnerName , pr . BaseRepo . Name ) )
log . Error ( "Unable to get merge base for PR ID %d, Index %d in %s/%s. Error: %v & %v" , pr . ID , pr . Index , pr . BaseRepo . OwnerName , pr . BaseRepo . Name , err , err2 )
return results , nil
}
}
} else {
parentsString , err := git . NewCommand ( "rev-list" , "--parents" , "-n" , "1" , pr . MergedCommitID ) . RunInDir ( repoPath )
if err != nil {
results = append ( results , fmt . Sprintf ( "WARN: Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s" , pr . ID , pr . Index , pr . BaseBranch , pr . BaseRepo . OwnerName , pr . BaseRepo . Name ) )
log . Error ( "Unable to get parents for merged PR ID %d, Index %d in %s/%s. Error: %v" , pr . ID , pr . Index , pr . BaseRepo . OwnerName , pr . BaseRepo . Name , err )
return results , nil
}
parents := strings . Split ( strings . TrimSpace ( parentsString ) , " " )
if len ( parents ) < 2 {
return results , nil
}
args := append ( [ ] string { "merge-base" , "--" } , parents [ 1 : ] ... )
args = append ( args , pr . GetGitRefName ( ) )
pr . MergeBase , err = git . NewCommand ( args ... ) . RunInDir ( repoPath )
if err != nil {
results = append ( results , fmt . Sprintf ( "WARN: Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s" , pr . ID , pr . Index , pr . BaseBranch , pr . BaseRepo . OwnerName , pr . BaseRepo . Name ) )
log . Error ( "Unable to get merge base for merged PR ID %d, Index %d in %s/%s. Error: %v" , pr . ID , pr . Index , pr . BaseRepo . OwnerName , pr . BaseRepo . Name , err )
return results , nil
}
}
pr . MergeBase = strings . TrimSpace ( pr . MergeBase )
if pr . MergeBase != oldMergeBase {
if ctx . Bool ( "fix" ) {
if err := pr . UpdateCols ( "merge_base" ) ; err != nil {
return results , err
}
} else {
results = append ( results , fmt . Sprintf ( "#%d onto %s in %s/%s: MergeBase should be %s but is %s" , pr . Index , pr . BaseBranch , pr . BaseRepo . OwnerName , pr . BaseRepo . Name , oldMergeBase , pr . MergeBase ) )
}
numPRsUpdated ++
}
return results , nil
} )
} )
if ctx . Bool ( "fix" ) {
results = append ( results , fmt . Sprintf ( "%d PR mergebases updated of %d PRs total in %d repos" , numPRsUpdated , numPRs , numRepos ) )
} else {
2020-04-08 06:04:19 +01:00
if numPRsUpdated > 0 && err == nil {
return results , fmt . Errorf ( "%d PRs with incorrect mergebases of %d PRs total in %d repos" , numPRsUpdated , numPRs , numRepos )
}
2020-04-06 11:44:47 +01:00
results = append ( results , fmt . Sprintf ( "%d PRs with incorrect mergebases of %d PRs total in %d repos" , numPRsUpdated , numPRs , numRepos ) )
2020-01-11 22:24:57 +08:00
}
2020-04-06 11:44:47 +01:00
return results , err
}
2020-07-08 03:16:34 +08:00
func runDoctorUserStarNum ( ctx * cli . Context ) ( [ ] string , error ) {
return nil , models . DoctorUserStarNum ( )
}
2020-04-06 11:44:47 +01:00
func runDoctorScriptType ( ctx * cli . Context ) ( [ ] string , error ) {
path , err := exec . LookPath ( setting . ScriptType )
if err != nil {
return [ ] string { fmt . Sprintf ( "ScriptType %s is not on the current PATH" , setting . ScriptType ) } , err
}
return [ ] string { fmt . Sprintf ( "ScriptType %s is on the current PATH at %s" , setting . ScriptType , path ) } , nil
2020-01-11 22:24:57 +08:00
}
2020-05-29 15:24:15 +02:00
func runDoctorCheckDBConsistency ( ctx * cli . Context ) ( [ ] string , error ) {
var results [ ] string
// make sure DB version is uptodate
if err := models . NewEngine ( context . Background ( ) , migrations . EnsureUpToDate ) ; err != nil {
return nil , fmt . Errorf ( "model version on the database does not match the current Gitea version. Model consistency will not be checked until the database is upgraded" )
}
//find labels without existing repo or org
count , err := models . CountOrphanedLabels ( )
if err != nil {
return nil , err
}
if count > 0 {
if ctx . Bool ( "fix" ) {
if err = models . DeleteOrphanedLabels ( ) ; err != nil {
return nil , err
}
results = append ( results , fmt . Sprintf ( "%d labels without existing repository/organisation deleted" , count ) )
} else {
results = append ( results , fmt . Sprintf ( "%d labels without existing repository/organisation" , count ) )
}
}
//find issues without existing repository
count , err = models . CountOrphanedIssues ( )
if err != nil {
return nil , err
}
if count > 0 {
if ctx . Bool ( "fix" ) {
if err = models . DeleteOrphanedIssues ( ) ; err != nil {
return nil , err
}
results = append ( results , fmt . Sprintf ( "%d issues without existing repository deleted" , count ) )
} else {
results = append ( results , fmt . Sprintf ( "%d issues without existing repository" , count ) )
}
}
//find pulls without existing issues
count , err = models . CountOrphanedObjects ( "pull_request" , "issue" , "pull_request.issue_id=issue.id" )
if err != nil {
return nil , err
}
if count > 0 {
if ctx . Bool ( "fix" ) {
if err = models . DeleteOrphanedObjects ( "pull_request" , "issue" , "pull_request.issue_id=issue.id" ) ; err != nil {
return nil , err
}
results = append ( results , fmt . Sprintf ( "%d pull requests without existing issue deleted" , count ) )
} else {
results = append ( results , fmt . Sprintf ( "%d pull requests without existing issue" , count ) )
}
}
//find tracked times without existing issues/pulls
count , err = models . CountOrphanedObjects ( "tracked_time" , "issue" , "tracked_time.issue_id=issue.id" )
if err != nil {
return nil , err
}
if count > 0 {
if ctx . Bool ( "fix" ) {
if err = models . DeleteOrphanedObjects ( "tracked_time" , "issue" , "tracked_time.issue_id=issue.id" ) ; err != nil {
return nil , err
}
results = append ( results , fmt . Sprintf ( "%d tracked times without existing issue deleted" , count ) )
} else {
results = append ( results , fmt . Sprintf ( "%d tracked times without existing issue" , count ) )
}
}
2020-06-11 21:18:11 +01:00
count , err = models . CountNullArchivedRepository ( )
if err != nil {
return nil , err
}
if count > 0 {
if ctx . Bool ( "fix" ) {
updatedCount , err := models . FixNullArchivedRepository ( )
if err != nil {
return nil , err
}
results = append ( results , fmt . Sprintf ( "%d repositories with null is_archived updated" , updatedCount ) )
} else {
results = append ( results , fmt . Sprintf ( "%d repositories with null is_archived" , count ) )
}
}
2020-05-29 15:24:15 +02:00
//ToDo: function to recalc all counters
return results , nil
}