diff --git a/docs/content/usage/actions/act-runner.en-us.md b/docs/content/usage/actions/act-runner.en-us.md index 1be81c5c78d15..e2915be365149 100644 --- a/docs/content/usage/actions/act-runner.en-us.md +++ b/docs/content/usage/actions/act-runner.en-us.md @@ -81,7 +81,7 @@ docker run --entrypoint="" --rm -it gitea/act_runner:latest act_runner generate- When you are using the docker image, you can specify the configuration file by using the `CONFIG_FILE` environment variable. Make sure that the file is mounted into the container as a volume: ```bash -docker run -v $(pwd)/config.yaml:/config.yaml -e CONFIG_FILE=/config.yaml ... +docker run -v $PWD/config.yaml:/config.yaml -e CONFIG_FILE=/config.yaml ... ``` You may notice the commands above are both incomplete, because it is not the time to run the act runner yet. @@ -157,8 +157,8 @@ If you are using the docker image, behaviour will be slightly different. Registr ```bash docker run \ - -v $(pwd)/config.yaml:/config.yaml \ - -v $(pwd)/data:/data \ + -v $PWD/config.yaml:/config.yaml \ + -v $PWD/data:/data \ -v /var/run/docker.sock:/var/run/docker.sock \ -e CONFIG_FILE=/config.yaml \ -e GITEA_INSTANCE_URL= \ diff --git a/docs/content/usage/multi-factor-authentication.en-us.md b/docs/content/usage/multi-factor-authentication.en-us.md new file mode 100644 index 0000000000000..16b57b7bdca74 --- /dev/null +++ b/docs/content/usage/multi-factor-authentication.en-us.md @@ -0,0 +1,35 @@ +--- +date: "2023-08-22T14:21:00+08:00" +title: "Usage: Multi-factor Authentication (MFA)" +slug: "multi-factor-authentication" +weight: 15 +toc: false +draft: false +menu: + sidebar: + parent: "usage" + name: "Multi-factor Authentication (MFA)" + weight: 15 + identifier: "multi-factor-authentication" +--- + +# Multi-factor Authentication (MFA) + +Multi-factor Authentication (also referred to as MFA or 2FA) enhances security by requiring a time-sensitive set of credentials in addition to a password. +If a password were later to be compromised, logging into Gitea will not be possible without the additional credentials and the account would remain secure. +Gitea supports both TOTP (Time-based One-Time Password) tokens and FIDO-based hardware keys using the Webauthn API. + +MFA can be configured within the "Security" tab of the user settings page. + +## MFA Considerations + +Enabling MFA on a user does affect how the Git HTTP protocol can be used with the Git CLI. +This interface does not support MFA, and trying to use a password normally will no longer be possible whilst MFA is enabled. +If SSH is not an option for Git operations, an access token can be generated within the "Applications" tab of the user settings page. +This access token can be used as if it were a password in order to allow the Git CLI to function over HTTP. + +> **Warning** - By its very nature, an access token sidesteps the security benefits of MFA. +> It must be kept secure and should only be used as a last resort. + +The Gitea API supports providing the relevant TOTP password in the `X-Gitea-OTP` header, as described in [API Usage](development/api-usage.md). +This should be used instead of an access token where possible. diff --git a/go.mod b/go.mod index 968c0663e00d2..1996d29a84bd1 100644 --- a/go.mod +++ b/go.mod @@ -90,6 +90,7 @@ require ( github.com/prometheus/client_golang v1.16.0 github.com/quasoft/websspi v1.1.2 github.com/redis/go-redis/v9 v9.0.5 + github.com/robfig/cron/v3 v3.0.1 github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 github.com/sassoftware/go-rpmutils v0.2.0 github.com/sergi/go-diff v1.3.1 @@ -254,7 +255,6 @@ require ( github.com/rhysd/actionlint v1.6.25 // indirect github.com/rivo/uniseg v0.4.4 // indirect github.com/robfig/cron v1.2.0 // indirect - github.com/robfig/cron/v3 v3.0.1 // indirect github.com/rogpeppe/go-internal v1.11.0 // indirect github.com/rs/xid v1.5.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect diff --git a/models/actions/schedule.go b/models/actions/schedule.go new file mode 100644 index 0000000000000..b0bc40dadc7dd --- /dev/null +++ b/models/actions/schedule.go @@ -0,0 +1,120 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package actions + +import ( + "context" + "time" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/timeutil" + webhook_module "code.gitea.io/gitea/modules/webhook" + + "github.com/robfig/cron/v3" +) + +// ActionSchedule represents a schedule of a workflow file +type ActionSchedule struct { + ID int64 + Title string + Specs []string + RepoID int64 `xorm:"index"` + Repo *repo_model.Repository `xorm:"-"` + OwnerID int64 `xorm:"index"` + WorkflowID string + TriggerUserID int64 + TriggerUser *user_model.User `xorm:"-"` + Ref string + CommitSHA string + Event webhook_module.HookEventType + EventPayload string `xorm:"LONGTEXT"` + Content []byte + Created timeutil.TimeStamp `xorm:"created"` + Updated timeutil.TimeStamp `xorm:"updated"` +} + +func init() { + db.RegisterModel(new(ActionSchedule)) +} + +// GetSchedulesMapByIDs returns the schedules by given id slice. +func GetSchedulesMapByIDs(ids []int64) (map[int64]*ActionSchedule, error) { + schedules := make(map[int64]*ActionSchedule, len(ids)) + return schedules, db.GetEngine(db.DefaultContext).In("id", ids).Find(&schedules) +} + +// GetReposMapByIDs returns the repos by given id slice. +func GetReposMapByIDs(ids []int64) (map[int64]*repo_model.Repository, error) { + repos := make(map[int64]*repo_model.Repository, len(ids)) + return repos, db.GetEngine(db.DefaultContext).In("id", ids).Find(&repos) +} + +var cronParser = cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow | cron.Descriptor) + +// CreateScheduleTask creates new schedule task. +func CreateScheduleTask(ctx context.Context, rows []*ActionSchedule) error { + // Return early if there are no rows to insert + if len(rows) == 0 { + return nil + } + + // Begin transaction + ctx, committer, err := db.TxContext(ctx) + if err != nil { + return err + } + defer committer.Close() + + // Loop through each schedule row + for _, row := range rows { + // Create new schedule row + if err = db.Insert(ctx, row); err != nil { + return err + } + + // Loop through each schedule spec and create a new spec row + now := time.Now() + + for _, spec := range row.Specs { + // Parse the spec and check for errors + schedule, err := cronParser.Parse(spec) + if err != nil { + continue // skip to the next spec if there's an error + } + + // Insert the new schedule spec row + if err = db.Insert(ctx, &ActionScheduleSpec{ + RepoID: row.RepoID, + ScheduleID: row.ID, + Spec: spec, + Next: timeutil.TimeStamp(schedule.Next(now).Unix()), + }); err != nil { + return err + } + } + } + + // Commit transaction + return committer.Commit() +} + +func DeleteScheduleTaskByRepo(ctx context.Context, id int64) error { + ctx, committer, err := db.TxContext(ctx) + if err != nil { + return err + } + defer committer.Close() + + if _, err := db.GetEngine(ctx).Delete(&ActionSchedule{RepoID: id}); err != nil { + return err + } + + if _, err := db.GetEngine(ctx).Delete(&ActionScheduleSpec{RepoID: id}); err != nil { + return err + } + + return committer.Commit() +} diff --git a/models/actions/schedule_list.go b/models/actions/schedule_list.go new file mode 100644 index 0000000000000..e873c05ec3179 --- /dev/null +++ b/models/actions/schedule_list.go @@ -0,0 +1,94 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package actions + +import ( + "context" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" + + "xorm.io/builder" +) + +type ScheduleList []*ActionSchedule + +// GetUserIDs returns a slice of user's id +func (schedules ScheduleList) GetUserIDs() []int64 { + ids := make(container.Set[int64], len(schedules)) + for _, schedule := range schedules { + ids.Add(schedule.TriggerUserID) + } + return ids.Values() +} + +func (schedules ScheduleList) GetRepoIDs() []int64 { + ids := make(container.Set[int64], len(schedules)) + for _, schedule := range schedules { + ids.Add(schedule.RepoID) + } + return ids.Values() +} + +func (schedules ScheduleList) LoadTriggerUser(ctx context.Context) error { + userIDs := schedules.GetUserIDs() + users := make(map[int64]*user_model.User, len(userIDs)) + if err := db.GetEngine(ctx).In("id", userIDs).Find(&users); err != nil { + return err + } + for _, schedule := range schedules { + if schedule.TriggerUserID == user_model.ActionsUserID { + schedule.TriggerUser = user_model.NewActionsUser() + } else { + schedule.TriggerUser = users[schedule.TriggerUserID] + } + } + return nil +} + +func (schedules ScheduleList) LoadRepos() error { + repoIDs := schedules.GetRepoIDs() + repos, err := repo_model.GetRepositoriesMapByIDs(repoIDs) + if err != nil { + return err + } + for _, schedule := range schedules { + schedule.Repo = repos[schedule.RepoID] + } + return nil +} + +type FindScheduleOptions struct { + db.ListOptions + RepoID int64 + OwnerID int64 +} + +func (opts FindScheduleOptions) toConds() builder.Cond { + cond := builder.NewCond() + if opts.RepoID > 0 { + cond = cond.And(builder.Eq{"repo_id": opts.RepoID}) + } + if opts.OwnerID > 0 { + cond = cond.And(builder.Eq{"owner_id": opts.OwnerID}) + } + + return cond +} + +func FindSchedules(ctx context.Context, opts FindScheduleOptions) (ScheduleList, int64, error) { + e := db.GetEngine(ctx).Where(opts.toConds()) + if !opts.ListAll && opts.PageSize > 0 && opts.Page >= 1 { + e.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) + } + var schedules ScheduleList + total, err := e.Desc("id").FindAndCount(&schedules) + return schedules, total, err +} + +func CountSchedules(ctx context.Context, opts FindScheduleOptions) (int64, error) { + return db.GetEngine(ctx).Where(opts.toConds()).Count(new(ActionSchedule)) +} diff --git a/models/actions/schedule_spec.go b/models/actions/schedule_spec.go new file mode 100644 index 0000000000000..91240459a044f --- /dev/null +++ b/models/actions/schedule_spec.go @@ -0,0 +1,50 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package actions + +import ( + "context" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/timeutil" + + "github.com/robfig/cron/v3" +) + +// ActionScheduleSpec represents a schedule spec of a workflow file +type ActionScheduleSpec struct { + ID int64 + RepoID int64 `xorm:"index"` + Repo *repo_model.Repository `xorm:"-"` + ScheduleID int64 `xorm:"index"` + Schedule *ActionSchedule `xorm:"-"` + + // Next time the job will run, or the zero time if Cron has not been + // started or this entry's schedule is unsatisfiable + Next timeutil.TimeStamp `xorm:"index"` + // Prev is the last time this job was run, or the zero time if never. + Prev timeutil.TimeStamp + Spec string + + Created timeutil.TimeStamp `xorm:"created"` + Updated timeutil.TimeStamp `xorm:"updated"` +} + +func (s *ActionScheduleSpec) Parse() (cron.Schedule, error) { + return cronParser.Parse(s.Spec) +} + +func init() { + db.RegisterModel(new(ActionScheduleSpec)) +} + +func UpdateScheduleSpec(ctx context.Context, spec *ActionScheduleSpec, cols ...string) error { + sess := db.GetEngine(ctx).ID(spec.ID) + if len(cols) > 0 { + sess.Cols(cols...) + } + _, err := sess.Update(spec) + return err +} diff --git a/models/actions/schedule_spec_list.go b/models/actions/schedule_spec_list.go new file mode 100644 index 0000000000000..d379490b4e91c --- /dev/null +++ b/models/actions/schedule_spec_list.go @@ -0,0 +1,106 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package actions + +import ( + "context" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/container" + + "xorm.io/builder" +) + +type SpecList []*ActionScheduleSpec + +func (specs SpecList) GetScheduleIDs() []int64 { + ids := make(container.Set[int64], len(specs)) + for _, spec := range specs { + ids.Add(spec.ScheduleID) + } + return ids.Values() +} + +func (specs SpecList) LoadSchedules() error { + scheduleIDs := specs.GetScheduleIDs() + schedules, err := GetSchedulesMapByIDs(scheduleIDs) + if err != nil { + return err + } + for _, spec := range specs { + spec.Schedule = schedules[spec.ScheduleID] + } + + repoIDs := specs.GetRepoIDs() + repos, err := GetReposMapByIDs(repoIDs) + if err != nil { + return err + } + for _, spec := range specs { + spec.Repo = repos[spec.RepoID] + } + + return nil +} + +func (specs SpecList) GetRepoIDs() []int64 { + ids := make(container.Set[int64], len(specs)) + for _, spec := range specs { + ids.Add(spec.RepoID) + } + return ids.Values() +} + +func (specs SpecList) LoadRepos() error { + repoIDs := specs.GetRepoIDs() + repos, err := repo_model.GetRepositoriesMapByIDs(repoIDs) + if err != nil { + return err + } + for _, spec := range specs { + spec.Repo = repos[spec.RepoID] + } + return nil +} + +type FindSpecOptions struct { + db.ListOptions + RepoID int64 + Next int64 +} + +func (opts FindSpecOptions) toConds() builder.Cond { + cond := builder.NewCond() + if opts.RepoID > 0 { + cond = cond.And(builder.Eq{"repo_id": opts.RepoID}) + } + + if opts.Next > 0 { + cond = cond.And(builder.Lte{"next": opts.Next}) + } + + return cond +} + +func FindSpecs(ctx context.Context, opts FindSpecOptions) (SpecList, int64, error) { + e := db.GetEngine(ctx).Where(opts.toConds()) + if opts.PageSize > 0 && opts.Page >= 1 { + e.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) + } + var specs SpecList + total, err := e.Desc("id").FindAndCount(&specs) + if err != nil { + return nil, 0, err + } + + if err := specs.LoadSchedules(); err != nil { + return nil, 0, err + } + return specs, total, nil +} + +func CountSpecs(ctx context.Context, opts FindSpecOptions) (int64, error) { + return db.GetEngine(ctx).Where(opts.toConds()).Count(new(ActionScheduleSpec)) +} diff --git a/models/issues/comment.go b/models/issues/comment.go index c2825208c4482..1601109687bad 100644 --- a/models/issues/comment.go +++ b/models/issues/comment.go @@ -23,6 +23,7 @@ import ( "code.gitea.io/gitea/modules/references" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/translation" "code.gitea.io/gitea/modules/util" "xorm.io/builder" @@ -181,40 +182,32 @@ func (t CommentType) HasAttachmentSupport() bool { return false } -// RoleDescriptor defines comment tag type -type RoleDescriptor int +// RoleInRepo presents the user's participation in the repo +type RoleInRepo string + +// RoleDescriptor defines comment "role" tags +type RoleDescriptor struct { + IsPoster bool + RoleInRepo RoleInRepo +} // Enumerate all the role tags. const ( - RoleDescriptorNone RoleDescriptor = iota - RoleDescriptorPoster - RoleDescriptorWriter - RoleDescriptorOwner + RoleRepoOwner RoleInRepo = "owner" + RoleRepoMember RoleInRepo = "member" + RoleRepoCollaborator RoleInRepo = "collaborator" + RoleRepoFirstTimeContributor RoleInRepo = "first_time_contributor" + RoleRepoContributor RoleInRepo = "contributor" ) -// WithRole enable a specific tag on the RoleDescriptor. -func (rd RoleDescriptor) WithRole(role RoleDescriptor) RoleDescriptor { - return rd | (1 << role) -} - -func stringToRoleDescriptor(role string) RoleDescriptor { - switch role { - case "Poster": - return RoleDescriptorPoster - case "Writer": - return RoleDescriptorWriter - case "Owner": - return RoleDescriptorOwner - default: - return RoleDescriptorNone - } +// LocaleString returns the locale string name of the role +func (r RoleInRepo) LocaleString(lang translation.Locale) string { + return lang.Tr("repo.issues.role." + string(r)) } -// HasRole returns if a certain role is enabled on the RoleDescriptor. -func (rd RoleDescriptor) HasRole(role string) bool { - roleDescriptor := stringToRoleDescriptor(role) - bitValue := rd & (1 << roleDescriptor) - return (bitValue > 0) +// LocaleHelper returns the locale tooltip of the role +func (r RoleInRepo) LocaleHelper(lang translation.Locale) string { + return lang.Tr("repo.issues.role." + string(r) + "_helper") } // Comment represents a comment in commit and issue page. diff --git a/models/issues/label.go b/models/issues/label.go index d7a116337e379..98fdd7b8146e9 100644 --- a/models/issues/label.go +++ b/models/issues/label.go @@ -115,10 +115,11 @@ func (l *Label) CalOpenIssues() { // SetArchived set the label as archived func (l *Label) SetArchived(isArchived bool) { - if isArchived && l.ArchivedUnix.IsZero() { - l.ArchivedUnix = timeutil.TimeStampNow() - } else { + if !isArchived { l.ArchivedUnix = timeutil.TimeStamp(0) + } else if isArchived && l.ArchivedUnix.IsZero() { + // Only change the date when it is newly archived. + l.ArchivedUnix = timeutil.TimeStampNow() } } diff --git a/models/issues/pull_list.go b/models/issues/pull_list.go index 3b2416900b5de..c4506ef15085e 100644 --- a/models/issues/pull_list.go +++ b/models/issues/pull_list.go @@ -199,3 +199,16 @@ func (prs PullRequestList) GetIssueIDs() []int64 { } return issueIDs } + +// HasMergedPullRequestInRepo returns whether the user(poster) has merged pull-request in the repo +func HasMergedPullRequestInRepo(ctx context.Context, repoID, posterID int64) (bool, error) { + return db.GetEngine(ctx). + Join("INNER", "pull_request", "pull_request.issue_id = issue.id"). + Where("repo_id=?", repoID). + And("poster_id=?", posterID). + And("is_pull=?", true). + And("pull_request.has_merged=?", true). + Select("issue.id"). + Limit(1). + Get(new(Issue)) +} diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go index 87c597b573a0d..9f4acda23699a 100644 --- a/models/migrations/migrations.go +++ b/models/migrations/migrations.go @@ -526,6 +526,8 @@ var migrations = []Migration{ NewMigration("Allow archiving labels", v1_21.AddArchivedUnixColumInLabelTable), // v272 -> v273 NewMigration("Add Version to ActionRun table", v1_21.AddVersionToActionRunTable), + // v273 -> v274 + NewMigration("Add Action Schedule Table", v1_21.AddActionScheduleTable), } // GetCurrentDBVersion returns the current db version diff --git a/models/migrations/v1_21/v273.go b/models/migrations/v1_21/v273.go new file mode 100644 index 0000000000000..61c79f4a763d3 --- /dev/null +++ b/models/migrations/v1_21/v273.go @@ -0,0 +1,45 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_21 //nolint +import ( + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/xorm" +) + +func AddActionScheduleTable(x *xorm.Engine) error { + type ActionSchedule struct { + ID int64 + Title string + Specs []string + RepoID int64 `xorm:"index"` + OwnerID int64 `xorm:"index"` + WorkflowID string + TriggerUserID int64 + Ref string + CommitSHA string + Event string + EventPayload string `xorm:"LONGTEXT"` + Content []byte + Created timeutil.TimeStamp `xorm:"created"` + Updated timeutil.TimeStamp `xorm:"updated"` + } + + type ActionScheduleSpec struct { + ID int64 + RepoID int64 `xorm:"index"` + ScheduleID int64 `xorm:"index"` + Spec string + Next timeutil.TimeStamp `xorm:"index"` + Prev timeutil.TimeStamp + + Created timeutil.TimeStamp `xorm:"created"` + Updated timeutil.TimeStamp `xorm:"updated"` + } + + return x.Sync( + new(ActionSchedule), + new(ActionScheduleSpec), + ) +} diff --git a/models/repo.go b/models/repo.go index 7579d2ad7348b..74a88d4c48b13 100644 --- a/models/repo.go +++ b/models/repo.go @@ -170,6 +170,8 @@ func DeleteRepository(doer *user_model.User, uid, repoID int64) error { &actions_model.ActionRunJob{RepoID: repoID}, &actions_model.ActionRun{RepoID: repoID}, &actions_model.ActionRunner{RepoID: repoID}, + &actions_model.ActionScheduleSpec{RepoID: repoID}, + &actions_model.ActionSchedule{RepoID: repoID}, &actions_model.ActionArtifact{RepoID: repoID}, ); err != nil { return fmt.Errorf("deleteBeans: %w", err) diff --git a/models/repo/release.go b/models/repo/release.go index a00585111e126..191475d541b94 100644 --- a/models/repo/release.go +++ b/models/repo/release.go @@ -133,6 +133,11 @@ func (r *Release) HTMLURL() string { return r.Repo.HTMLURL() + "/releases/tag/" + util.PathEscapeSegments(r.TagName) } +// APIUploadURL the api url to upload assets to a release. release must have attributes loaded +func (r *Release) APIUploadURL() string { + return r.APIURL() + "/assets" +} + // Link the relative url for a release on the web UI. release must have attributes loaded func (r *Release) Link() string { return r.Repo.Link() + "/releases/tag/" + util.PathEscapeSegments(r.TagName) diff --git a/models/repo/repo_list.go b/models/repo/repo_list.go index b8427bec4eb45..a0485ed8d417d 100644 --- a/models/repo/repo_list.go +++ b/models/repo/repo_list.go @@ -130,6 +130,10 @@ type SearchRepoOptions struct { // True -> include just collaborative // False -> include just non-collaborative Collaborate util.OptionalBool + // What type of unit the user can be collaborative in, + // it is ignored if Collaborate is False. + // TypeInvalid means any unit type. + UnitType unit.Type // None -> include forks AND non-forks // True -> include just forks // False -> include just non-forks @@ -382,19 +386,25 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { if opts.Collaborate != util.OptionalBoolFalse { // A Collaboration is: - collaborateCond := builder.And( - // 1. Repository we don't own - builder.Neq{"owner_id": opts.OwnerID}, - // 2. But we can see because of: - builder.Or( - // A. We have unit independent access - UserAccessRepoCond("`repository`.id", opts.OwnerID), - // B. We are in a team for - UserOrgTeamRepoCond("`repository`.id", opts.OwnerID), - // C. Public repositories in organizations that we are member of - userOrgPublicRepoCondPrivate(opts.OwnerID), - ), - ) + + collaborateCond := builder.NewCond() + // 1. Repository we don't own + collaborateCond = collaborateCond.And(builder.Neq{"owner_id": opts.OwnerID}) + // 2. But we can see because of: + { + userAccessCond := builder.NewCond() + // A. We have unit independent access + userAccessCond = userAccessCond.Or(UserAccessRepoCond("`repository`.id", opts.OwnerID)) + // B. We are in a team for + if opts.UnitType == unit.TypeInvalid { + userAccessCond = userAccessCond.Or(UserOrgTeamRepoCond("`repository`.id", opts.OwnerID)) + } else { + userAccessCond = userAccessCond.Or(userOrgTeamUnitRepoCond("`repository`.id", opts.OwnerID, opts.UnitType)) + } + // C. Public repositories in organizations that we are member of + userAccessCond = userAccessCond.Or(userOrgPublicRepoCondPrivate(opts.OwnerID)) + collaborateCond = collaborateCond.And(userAccessCond) + } if !opts.Private { collaborateCond = collaborateCond.And(builder.Expr("owner_id NOT IN (SELECT org_id FROM org_user WHERE org_user.uid = ? AND org_user.is_public = ?)", opts.OwnerID, false)) } diff --git a/models/secret/secret.go b/models/secret/secret.go index c9c95e82d3024..410cb3770ebdf 100644 --- a/models/secret/secret.go +++ b/models/secret/secret.go @@ -6,12 +6,14 @@ package secret import ( "context" "errors" + "fmt" "strings" "code.gitea.io/gitea/models/db" secret_module "code.gitea.io/gitea/modules/secret" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "xorm.io/builder" ) @@ -26,6 +28,25 @@ type Secret struct { CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"` } +// ErrSecretNotFound represents a "secret not found" error. +type ErrSecretNotFound struct { + Name string +} + +// IsErrSecretNotFound checks if an error is a ErrSecretNotFound. +func IsErrSecretNotFound(err error) bool { + _, ok := err.(ErrSecretNotFound) + return ok +} + +func (err ErrSecretNotFound) Error() string { + return fmt.Sprintf("secret was not found [name: %s]", err.Name) +} + +func (err ErrSecretNotFound) Unwrap() error { + return util.ErrNotExist +} + // newSecret Creates a new already encrypted secret func newSecret(ownerID, repoID int64, name, data string) *Secret { return &Secret{ @@ -93,3 +114,49 @@ func FindSecrets(ctx context.Context, opts FindSecretsOptions) ([]*Secret, error func CountSecrets(ctx context.Context, opts *FindSecretsOptions) (int64, error) { return db.GetEngine(ctx).Where(opts.toConds()).Count(new(Secret)) } + +// UpdateSecret changes org or user reop secret. +func UpdateSecret(ctx context.Context, orgID, repoID int64, name, data string) error { + sc := new(Secret) + name = strings.ToUpper(name) + has, err := db.GetEngine(ctx). + Where("owner_id=?", orgID). + And("repo_id=?", repoID). + And("name=?", name). + Get(sc) + if err != nil { + return err + } else if !has { + return ErrSecretNotFound{Name: name} + } + + encrypted, err := secret_module.EncryptSecret(setting.SecretKey, data) + if err != nil { + return err + } + + sc.Data = encrypted + _, err = db.GetEngine(ctx).ID(sc.ID).Cols("data").Update(sc) + return err +} + +// DeleteSecret deletes secret from an organization. +func DeleteSecret(ctx context.Context, orgID, repoID int64, name string) error { + sc := new(Secret) + has, err := db.GetEngine(ctx). + Where("owner_id=?", orgID). + And("repo_id=?", repoID). + And("name=?", strings.ToUpper(name)). + Get(sc) + if err != nil { + return err + } else if !has { + return ErrSecretNotFound{Name: name} + } + + if _, err := db.GetEngine(ctx).ID(sc.ID).Delete(new(Secret)); err != nil { + return fmt.Errorf("Delete: %w", err) + } + + return nil +} diff --git a/modules/actions/workflows.go b/modules/actions/workflows.go index de340a74ec9ca..408fdb8f8ef22 100644 --- a/modules/actions/workflows.go +++ b/modules/actions/workflows.go @@ -95,18 +95,25 @@ func GetEventsFromContent(content []byte) ([]*jobparser.Event, error) { return events, nil } -func DetectWorkflows(gitRepo *git.Repository, commit *git.Commit, triggedEvent webhook_module.HookEventType, payload api.Payloader) ([]*DetectedWorkflow, error) { +func DetectWorkflows( + gitRepo *git.Repository, + commit *git.Commit, + triggedEvent webhook_module.HookEventType, + payload api.Payloader, +) ([]*DetectedWorkflow, []*DetectedWorkflow, error) { entries, err := ListWorkflows(commit) if err != nil { - return nil, err + return nil, nil, err } workflows := make([]*DetectedWorkflow, 0, len(entries)) + schedules := make([]*DetectedWorkflow, 0, len(entries)) for _, entry := range entries { content, err := GetContentFromEntry(entry) if err != nil { - return nil, err + return nil, nil, err } + events, err := GetEventsFromContent(content) if err != nil { log.Warn("ignore invalid workflow %q: %v", entry.Name(), err) @@ -114,6 +121,14 @@ func DetectWorkflows(gitRepo *git.Repository, commit *git.Commit, triggedEvent w } for _, evt := range events { log.Trace("detect workflow %q for event %#v matching %q", entry.Name(), evt, triggedEvent) + if evt.IsSchedule() { + dwf := &DetectedWorkflow{ + EntryName: entry.Name(), + TriggerEvent: evt.Name, + Content: content, + } + schedules = append(schedules, dwf) + } if detectMatched(gitRepo, commit, triggedEvent, payload, evt) { dwf := &DetectedWorkflow{ EntryName: entry.Name(), @@ -125,7 +140,7 @@ func DetectWorkflows(gitRepo *git.Repository, commit *git.Commit, triggedEvent w } } - return workflows, nil + return workflows, schedules, nil } func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent webhook_module.HookEventType, payload api.Payloader, evt *jobparser.Event) bool { diff --git a/modules/git/commit.go b/modules/git/commit.go index c44882d886171..b09be25ba0997 100644 --- a/modules/git/commit.go +++ b/modules/git/commit.go @@ -495,7 +495,7 @@ func GetCommitFileStatus(ctx context.Context, repoPath, commitID string) (*Commi }() stderr := new(bytes.Buffer) - err := NewCommand(ctx, "log", "--name-status", "-c", "--pretty=format:", "--parents", "--no-renames", "-z", "-1").AddDynamicArguments(commitID).Run(&RunOpts{ + err := NewCommand(ctx, "log", "--name-status", "-m", "--pretty=format:", "--first-parent", "--no-renames", "-z", "-1").AddDynamicArguments(commitID).Run(&RunOpts{ Dir: repoPath, Stdout: w, Stderr: stderr, diff --git a/modules/git/commit_test.go b/modules/git/commit_test.go index acf4beb029fc0..ac586fdf09d3e 100644 --- a/modules/git/commit_test.go +++ b/modules/git/commit_test.go @@ -255,3 +255,26 @@ func TestParseCommitFileStatus(t *testing.T) { assert.Equal(t, kase.modified, fileStatus.Modified) } } + +func TestGetCommitFileStatusMerges(t *testing.T) { + bareRepo1Path := filepath.Join(testReposDir, "repo6_merge") + + commitFileStatus, err := GetCommitFileStatus(DefaultContext, bareRepo1Path, "022f4ce6214973e018f02bf363bf8a2e3691f699") + assert.NoError(t, err) + + expected := CommitFileStatus{ + []string{ + "add_file.txt", + }, + []string{ + "to_remove.txt", + }, + []string{ + "to_modify.txt", + }, + } + + assert.Equal(t, commitFileStatus.Added, expected.Added) + assert.Equal(t, commitFileStatus.Removed, expected.Removed) + assert.Equal(t, commitFileStatus.Modified, expected.Modified) +} diff --git a/modules/git/tests/repos/repo6_merge/HEAD b/modules/git/tests/repos/repo6_merge/HEAD new file mode 100644 index 0000000000000..b870d82622c1a --- /dev/null +++ b/modules/git/tests/repos/repo6_merge/HEAD @@ -0,0 +1 @@ +ref: refs/heads/main diff --git a/modules/git/tests/repos/repo6_merge/objects/02/2f4ce6214973e018f02bf363bf8a2e3691f699 b/modules/git/tests/repos/repo6_merge/objects/02/2f4ce6214973e018f02bf363bf8a2e3691f699 new file mode 100644 index 0000000000000..0778a1c88c74c Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/02/2f4ce6214973e018f02bf363bf8a2e3691f699 differ diff --git a/modules/git/tests/repos/repo6_merge/objects/05/45879290cc368a8becebc4aa34002c52d5fecc b/modules/git/tests/repos/repo6_merge/objects/05/45879290cc368a8becebc4aa34002c52d5fecc new file mode 100644 index 0000000000000..c71794f3291ea Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/05/45879290cc368a8becebc4aa34002c52d5fecc differ diff --git a/modules/git/tests/repos/repo6_merge/objects/1e/5d0a65fe099ef12d24b28f783896e4b8172576 b/modules/git/tests/repos/repo6_merge/objects/1e/5d0a65fe099ef12d24b28f783896e4b8172576 new file mode 100644 index 0000000000000..365f368d35bc6 Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/1e/5d0a65fe099ef12d24b28f783896e4b8172576 differ diff --git a/modules/git/tests/repos/repo6_merge/objects/37/d35c7ed39e4e16d0b579a5b995b7e30b0e9411 b/modules/git/tests/repos/repo6_merge/objects/37/d35c7ed39e4e16d0b579a5b995b7e30b0e9411 new file mode 100644 index 0000000000000..83890b5d0b1e8 --- /dev/null +++ b/modules/git/tests/repos/repo6_merge/objects/37/d35c7ed39e4e16d0b579a5b995b7e30b0e9411 @@ -0,0 +1,2 @@ +xM +1 a=E$LAī'iRg~_u1N@mZ)g2Dj*_fs 4amnp>>!#1;p]xuyIwN4 \ No newline at end of file diff --git a/modules/git/tests/repos/repo6_merge/objects/38/ec3e0cdc88bde01014bda4a5dd9fc835f41439 b/modules/git/tests/repos/repo6_merge/objects/38/ec3e0cdc88bde01014bda4a5dd9fc835f41439 new file mode 100644 index 0000000000000..582d98ce306f3 --- /dev/null +++ b/modules/git/tests/repos/repo6_merge/objects/38/ec3e0cdc88bde01014bda4a5dd9fc835f41439 @@ -0,0 +1,2 @@ +xM +0a9M2 U =yv/ 0:@$UѬ.[> lIsx 8'TRĤS,$x. ֚=n[נus!+9BGvfm ur> \ No newline at end of file diff --git a/modules/git/tests/repos/repo6_merge/objects/66/7e0fbc6bc02c2285d17f542e89b23c0fa5482b b/modules/git/tests/repos/repo6_merge/objects/66/7e0fbc6bc02c2285d17f542e89b23c0fa5482b new file mode 100644 index 0000000000000..d7faff6bf2494 Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/66/7e0fbc6bc02c2285d17f542e89b23c0fa5482b differ diff --git a/modules/git/tests/repos/repo6_merge/objects/9f/d90b1d524c0fea776ed5e6476da02ea1740597 b/modules/git/tests/repos/repo6_merge/objects/9f/d90b1d524c0fea776ed5e6476da02ea1740597 new file mode 100644 index 0000000000000..8ac2814c0fca2 Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/9f/d90b1d524c0fea776ed5e6476da02ea1740597 differ diff --git a/modules/git/tests/repos/repo6_merge/objects/ae/4b035e7c4afbc000576cee3f713ea0c2f1e1e2 b/modules/git/tests/repos/repo6_merge/objects/ae/4b035e7c4afbc000576cee3f713ea0c2f1e1e2 new file mode 100644 index 0000000000000..c0f6b1334ff0a --- /dev/null +++ b/modules/git/tests/repos/repo6_merge/objects/ae/4b035e7c4afbc000576cee3f713ea0c2f1e1e2 @@ -0,0 +1,5 @@ +xM +1 @a=E?i*mǑxO=x.lORZ80\[*%b +&q IȚ灝 +7ԋF쨜wcuzx?0 +1 :m6LS>& \ No newline at end of file diff --git a/modules/git/tests/repos/repo6_merge/objects/ba/2906d0666cf726c7eaadd2cd3db615dedfdf3a b/modules/git/tests/repos/repo6_merge/objects/ba/2906d0666cf726c7eaadd2cd3db615dedfdf3a new file mode 100644 index 0000000000000..edef2a7f0cb64 Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/ba/2906d0666cf726c7eaadd2cd3db615dedfdf3a differ diff --git a/modules/git/tests/repos/repo6_merge/objects/c1/a95c2eff8151c6d1437a0d5d3322a73ff38fb8 b/modules/git/tests/repos/repo6_merge/objects/c1/a95c2eff8151c6d1437a0d5d3322a73ff38fb8 new file mode 100644 index 0000000000000..353d86ccdb76e Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/c1/a95c2eff8151c6d1437a0d5d3322a73ff38fb8 differ diff --git a/modules/git/tests/repos/repo6_merge/objects/cc/d1d4d594029e68c388ecef5aa3063fa1055831 b/modules/git/tests/repos/repo6_merge/objects/cc/d1d4d594029e68c388ecef5aa3063fa1055831 new file mode 100644 index 0000000000000..5449d726cd885 Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/cc/d1d4d594029e68c388ecef5aa3063fa1055831 differ diff --git a/modules/git/tests/repos/repo6_merge/objects/cd/fc1aaf7a149151cb7bff639fafe05668d4bbd2 b/modules/git/tests/repos/repo6_merge/objects/cd/fc1aaf7a149151cb7bff639fafe05668d4bbd2 new file mode 100644 index 0000000000000..030eb980f6efa Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/cd/fc1aaf7a149151cb7bff639fafe05668d4bbd2 differ diff --git a/modules/git/tests/repos/repo6_merge/objects/d1/792641396ff7630d35fbb0b74b86b0c71bca77 b/modules/git/tests/repos/repo6_merge/objects/d1/792641396ff7630d35fbb0b74b86b0c71bca77 new file mode 100644 index 0000000000000..867e4c0b866fd Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/d1/792641396ff7630d35fbb0b74b86b0c71bca77 differ diff --git a/modules/git/tests/repos/repo6_merge/objects/ec/d11d8da0f25eaa99f64a37a82da98685f381e2 b/modules/git/tests/repos/repo6_merge/objects/ec/d11d8da0f25eaa99f64a37a82da98685f381e2 new file mode 100644 index 0000000000000..52d300cdf184d Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/ec/d11d8da0f25eaa99f64a37a82da98685f381e2 differ diff --git a/modules/git/tests/repos/repo6_merge/objects/fa/49b077972391ad58037050f2a75f74e3671e92 b/modules/git/tests/repos/repo6_merge/objects/fa/49b077972391ad58037050f2a75f74e3671e92 new file mode 100644 index 0000000000000..112998d425717 Binary files /dev/null and b/modules/git/tests/repos/repo6_merge/objects/fa/49b077972391ad58037050f2a75f74e3671e92 differ diff --git a/modules/git/tests/repos/repo6_merge/refs/heads/main b/modules/git/tests/repos/repo6_merge/refs/heads/main new file mode 100644 index 0000000000000..adf9e86717a6b --- /dev/null +++ b/modules/git/tests/repos/repo6_merge/refs/heads/main @@ -0,0 +1 @@ +022f4ce6214973e018f02bf363bf8a2e3691f699 diff --git a/modules/git/tests/repos/repo6_merge/refs/heads/merge/add_file b/modules/git/tests/repos/repo6_merge/refs/heads/merge/add_file new file mode 100644 index 0000000000000..035ad11863aa0 --- /dev/null +++ b/modules/git/tests/repos/repo6_merge/refs/heads/merge/add_file @@ -0,0 +1 @@ +ae4b035e7c4afbc000576cee3f713ea0c2f1e1e2 diff --git a/modules/git/tests/repos/repo6_merge/refs/heads/merge/modify_file b/modules/git/tests/repos/repo6_merge/refs/heads/merge/modify_file new file mode 100644 index 0000000000000..f0d5105f54faf --- /dev/null +++ b/modules/git/tests/repos/repo6_merge/refs/heads/merge/modify_file @@ -0,0 +1 @@ +d1792641396ff7630d35fbb0b74b86b0c71bca77 diff --git a/modules/git/tests/repos/repo6_merge/refs/heads/merge/remove_file b/modules/git/tests/repos/repo6_merge/refs/heads/merge/remove_file new file mode 100644 index 0000000000000..ada3c8b3f4e60 --- /dev/null +++ b/modules/git/tests/repos/repo6_merge/refs/heads/merge/remove_file @@ -0,0 +1 @@ +38ec3e0cdc88bde01014bda4a5dd9fc835f41439 diff --git a/modules/indexer/issues/indexer.go b/modules/indexer/issues/indexer.go index 6619949104f49..020659c82b541 100644 --- a/modules/indexer/issues/indexer.go +++ b/modules/indexer/issues/indexer.go @@ -13,6 +13,7 @@ import ( db_model "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/indexer/issues/bleve" "code.gitea.io/gitea/modules/indexer/issues/db" @@ -277,7 +278,7 @@ func IsAvailable(ctx context.Context) bool { } // SearchOptions indicates the options for searching issues -type SearchOptions internal.SearchOptions +type SearchOptions = internal.SearchOptions const ( SortByCreatedDesc = internal.SortByCreatedDesc @@ -291,7 +292,6 @@ const ( ) // SearchIssues search issues by options. -// It returns issue ids and a bool value indicates if the result is imprecise. func SearchIssues(ctx context.Context, opts *SearchOptions) ([]int64, int64, error) { indexer := *globalIndexer.Load() @@ -305,7 +305,7 @@ func SearchIssues(ctx context.Context, opts *SearchOptions) ([]int64, int64, err indexer = db.NewIndexer() } - result, err := indexer.Search(ctx, (*internal.SearchOptions)(opts)) + result, err := indexer.Search(ctx, opts) if err != nil { return nil, 0, err } @@ -317,3 +317,38 @@ func SearchIssues(ctx context.Context, opts *SearchOptions) ([]int64, int64, err return ret, result.Total, nil } + +// CountIssues counts issues by options. It is a shortcut of SearchIssues(ctx, opts) but only returns the total count. +func CountIssues(ctx context.Context, opts *SearchOptions) (int64, error) { + opts = opts.Copy(func(options *SearchOptions) { opts.Paginator = &db_model.ListOptions{PageSize: 0} }) + + _, total, err := SearchIssues(ctx, opts) + return total, err +} + +// CountIssuesByRepo counts issues by options and group by repo id. +// It's not a complete implementation, since it requires the caller should provide the repo ids. +// That means opts.RepoIDs must be specified, and opts.AllPublic must be false. +// It's good enough for the current usage, and it can be improved if needed. +// TODO: use "group by" of the indexer engines to implement it. +func CountIssuesByRepo(ctx context.Context, opts *SearchOptions) (map[int64]int64, error) { + if len(opts.RepoIDs) == 0 { + return nil, fmt.Errorf("opts.RepoIDs must be specified") + } + if opts.AllPublic { + return nil, fmt.Errorf("opts.AllPublic must be false") + } + + repoIDs := container.SetOf(opts.RepoIDs...).Values() + ret := make(map[int64]int64, len(repoIDs)) + // TODO: it could be faster if do it in parallel for some indexer engines. Improve it if users report it's slow. + for _, repoID := range repoIDs { + count, err := CountIssues(ctx, opts.Copy(func(o *internal.SearchOptions) { o.RepoIDs = []int64{repoID} })) + if err != nil { + return nil, err + } + ret[repoID] = count + } + + return ret, nil +} diff --git a/modules/indexer/issues/internal/model.go b/modules/indexer/issues/internal/model.go index 2de1e0e2bf20b..031745dd2fcc1 100644 --- a/modules/indexer/issues/internal/model.go +++ b/modules/indexer/issues/internal/model.go @@ -109,6 +109,19 @@ type SearchOptions struct { SortBy SortBy // sort by field } +// Copy returns a copy of the options. +// Be careful, it's not a deep copy, so `SearchOptions.RepoIDs = {...}` is OK while `SearchOptions.RepoIDs[0] = ...` is not. +func (o *SearchOptions) Copy(edit ...func(options *SearchOptions)) *SearchOptions { + if o == nil { + return nil + } + v := *o + for _, e := range edit { + e(&v) + } + return &v +} + type SortBy string const ( diff --git a/modules/structs/release.go b/modules/structs/release.go index 3fe40389b1f4a..c7378645c28d2 100644 --- a/modules/structs/release.go +++ b/modules/structs/release.go @@ -18,6 +18,7 @@ type Release struct { HTMLURL string `json:"html_url"` TarURL string `json:"tarball_url"` ZipURL string `json:"zipball_url"` + UploadURL string `json:"upload_url"` IsDraft bool `json:"draft"` IsPrerelease bool `json:"prerelease"` // swagger:strfmt date-time diff --git a/modules/structs/secret.go b/modules/structs/secret.go index c707eb2278b2b..52221b51f0a38 100644 --- a/modules/structs/secret.go +++ b/modules/structs/secret.go @@ -25,3 +25,12 @@ type CreateSecretOption struct { // Data of the secret to create Data string `json:"data" binding:"Required"` } + +// UpdateSecretOption options when updating secret +// swagger:model +type UpdateSecretOption struct { + // Data of the secret to update + // + // required: true + Data string `json:"data" binding:"Required"` +} diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 587a2b14bcf21..e32399dd89c7b 100644 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -1480,9 +1480,18 @@ issues.ref_reopening_from = `referenced a pull request %[4]s tha issues.ref_closed_from = `closed this issue %[4]s %[2]s` issues.ref_reopened_from = `reopened this issue %[4]s %[2]s` issues.ref_from = `from %[1]s` -issues.poster = Poster -issues.collaborator = Collaborator -issues.owner = Owner +issues.author = Author +issues.author_helper = This user is the author. +issues.role.owner = Owner +issues.role.owner_helper = This user is the owner of this repository. +issues.role.member = Member +issues.role.member_helper = This user is a member of the organization owning this repository. +issues.role.collaborator = Collaborator +issues.role.collaborator_helper = This user has been invited to collaborate on the repository. +issues.role.first_time_contributor = First-time contributor +issues.role.first_time_contributor_helper = This is the first contribution of this user to the repository. +issues.role.contributor = Contributor +issues.role.contributor_helper = This user has previously committed to the repository. issues.re_request_review=Re-request review issues.is_stale = There have been changes to this PR since this review issues.remove_request_review=Remove review request @@ -2756,6 +2765,7 @@ dashboard.gc_lfs = Garbage collect LFS meta objects dashboard.stop_zombie_tasks = Stop zombie tasks dashboard.stop_endless_tasks = Stop endless tasks dashboard.cancel_abandoned_jobs = Cancel abandoned jobs +dashboard.start_schedule_tasks = Start schedule tasks dashboard.sync_branch.started = Branches Sync started dashboard.rebuild_issue_indexer = Rebuild issue indexer diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 9613bd610dc58..2d644507d5f71 100644 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -1301,6 +1301,9 @@ func Routes() *web.Route { m.Group("/actions/secrets", func() { m.Get("", reqToken(), reqOrgOwnership(), org.ListActionsSecrets) m.Post("", reqToken(), reqOrgOwnership(), bind(api.CreateSecretOption{}), org.CreateOrgSecret) + m.Combo("/{secretname}"). + Put(reqToken(), reqOrgOwnership(), bind(api.UpdateSecretOption{}), org.UpdateOrgSecret). + Delete(reqToken(), reqOrgOwnership(), org.DeleteOrgSecret) }) m.Group("/public_members", func() { m.Get("", org.ListPublicMembers) diff --git a/routers/api/v1/org/action.go b/routers/api/v1/org/action.go index 7659191946f32..9697a11363dec 100644 --- a/routers/api/v1/org/action.go +++ b/routers/api/v1/org/action.go @@ -103,6 +103,10 @@ func CreateOrgSecret(ctx *context.APIContext) { // "403": // "$ref": "#/responses/forbidden" opt := web.GetForm(ctx).(*api.CreateSecretOption) + if err := actions.NameRegexMatch(opt.Name); err != nil { + ctx.Error(http.StatusBadRequest, "CreateOrgSecret", err) + return + } s, err := secret_model.InsertEncryptedSecret( ctx, ctx.Org.Organization.ID, 0, opt.Name, actions.ReserveLineBreakForTextarea(opt.Data), ) @@ -113,3 +117,90 @@ func CreateOrgSecret(ctx *context.APIContext) { ctx.JSON(http.StatusCreated, convert.ToSecret(s)) } + +// UpdateOrgSecret update one secret of the organization +func UpdateOrgSecret(ctx *context.APIContext) { + // swagger:operation PUT /orgs/{org}/actions/secrets/{secretname} organization updateOrgSecret + // --- + // summary: Update a secret value in an organization + // consumes: + // - application/json + // produces: + // - application/json + // parameters: + // - name: org + // in: path + // description: name of organization + // type: string + // required: true + // - name: secretname + // in: path + // description: name of the secret + // type: string + // required: true + // - name: body + // in: body + // schema: + // "$ref": "#/definitions/UpdateSecretOption" + // responses: + // "204": + // description: update one secret of the organization + // "403": + // "$ref": "#/responses/forbidden" + secretName := ctx.Params(":secretname") + opt := web.GetForm(ctx).(*api.UpdateSecretOption) + err := secret_model.UpdateSecret( + ctx, ctx.Org.Organization.ID, 0, secretName, opt.Data, + ) + if secret_model.IsErrSecretNotFound(err) { + ctx.NotFound(err) + return + } + if err != nil { + ctx.Error(http.StatusInternalServerError, "UpdateSecret", err) + return + } + + ctx.Status(http.StatusNoContent) +} + +// DeleteOrgSecret delete one secret of the organization +func DeleteOrgSecret(ctx *context.APIContext) { + // swagger:operation DELETE /orgs/{org}/actions/secrets/{secretname} organization deleteOrgSecret + // --- + // summary: Delete a secret in an organization + // consumes: + // - application/json + // produces: + // - application/json + // parameters: + // - name: org + // in: path + // description: name of organization + // type: string + // required: true + // - name: secretname + // in: path + // description: name of the secret + // type: string + // required: true + // responses: + // "204": + // description: delete one secret of the organization + // "403": + // "$ref": "#/responses/forbidden" + secretName := ctx.Params(":secretname") + err := secret_model.DeleteSecret( + ctx, ctx.Org.Organization.ID, 0, secretName, + ) + if secret_model.IsErrSecretNotFound(err) { + ctx.NotFound(err) + return + } + if err != nil { + ctx.Error(http.StatusInternalServerError, "DeleteSecret", err) + return + } + + ctx.Status(http.StatusNoContent) +} diff --git a/routers/api/v1/repo/branch.go b/routers/api/v1/repo/branch.go index 577776dadd36d..cdc176b8e4573 100644 --- a/routers/api/v1/repo/branch.go +++ b/routers/api/v1/repo/branch.go @@ -768,7 +768,8 @@ func EditBranchProtection(ctx *context.APIContext) { if form.EnableStatusCheck != nil { protectBranch.EnableStatusCheck = *form.EnableStatusCheck } - if protectBranch.EnableStatusCheck { + + if form.StatusCheckContexts != nil { protectBranch.StatusCheckContexts = form.StatusCheckContexts } diff --git a/routers/api/v1/swagger/options.go b/routers/api/v1/swagger/options.go index 8e7e6ec3dfbb6..e41ee667760ab 100644 --- a/routers/api/v1/swagger/options.go +++ b/routers/api/v1/swagger/options.go @@ -190,4 +190,7 @@ type swaggerParameterBodies struct { // in:body CreateSecretOption api.CreateSecretOption + + // in:body + UpdateSecretOption api.UpdateSecretOption } diff --git a/routers/web/admin/users.go b/routers/web/admin/users.go index 03c89bdab1935..e560a88b4cffd 100644 --- a/routers/web/admin/users.go +++ b/routers/web/admin/users.go @@ -480,5 +480,5 @@ func DeleteAvatar(ctx *context.Context) { ctx.Flash.Error(err.Error()) } - ctx.Redirect(setting.AppSubURL + "/admin/users/" + strconv.FormatInt(u.ID, 10)) + ctx.JSONRedirect(setting.AppSubURL + "/admin/users/" + strconv.FormatInt(u.ID, 10)) } diff --git a/routers/web/org/setting.go b/routers/web/org/setting.go index 5ae61c79befe2..957daab646630 100644 --- a/routers/web/org/setting.go +++ b/routers/web/org/setting.go @@ -156,7 +156,7 @@ func SettingsDeleteAvatar(ctx *context.Context) { ctx.Flash.Error(err.Error()) } - ctx.Redirect(ctx.Org.OrgLink + "/settings") + ctx.JSONRedirect(ctx.Org.OrgLink + "/settings") } // SettingsDelete response for deleting an organization diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go index 7513f9360b1d9..9a620f6d37226 100644 --- a/routers/web/repo/commit.go +++ b/routers/web/repo/commit.go @@ -81,7 +81,6 @@ func Commits(ctx *context.Context) { ctx.Data["Username"] = ctx.Repo.Owner.Name ctx.Data["Reponame"] = ctx.Repo.Repository.Name ctx.Data["CommitCount"] = commitsCount - ctx.Data["RefName"] = ctx.Repo.RefName pager := context.NewPagination(int(commitsCount), pageSize, page, 5) pager.SetDefaultParams(ctx) @@ -157,7 +156,7 @@ func Graph(ctx *context.Context) { ctx.Data["Username"] = ctx.Repo.Owner.Name ctx.Data["Reponame"] = ctx.Repo.Repository.Name ctx.Data["CommitCount"] = commitsCount - ctx.Data["RefName"] = ctx.Repo.RefName + paginator := context.NewPagination(int(graphCommitsCount), setting.UI.GraphMaxCommitNum, page, 5) paginator.AddParam(ctx, "mode", "Mode") paginator.AddParam(ctx, "hide-pr-refs", "HidePRRefs") @@ -203,7 +202,6 @@ func SearchCommits(ctx *context.Context) { } ctx.Data["Username"] = ctx.Repo.Owner.Name ctx.Data["Reponame"] = ctx.Repo.Repository.Name - ctx.Data["RefName"] = ctx.Repo.RefName ctx.HTML(http.StatusOK, tplCommits) } @@ -247,7 +245,6 @@ func FileHistory(ctx *context.Context) { ctx.Data["Reponame"] = ctx.Repo.Repository.Name ctx.Data["FileName"] = fileName ctx.Data["CommitCount"] = commitsCount - ctx.Data["RefName"] = ctx.Repo.RefName pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5) pager.SetDefaultParams(ctx) diff --git a/routers/web/repo/issue.go b/routers/web/repo/issue.go index b04802e4520df..9a2add14524c1 100644 --- a/routers/web/repo/issue.go +++ b/routers/web/repo/issue.go @@ -1228,47 +1228,70 @@ func NewIssuePost(ctx *context.Context) { } } -// roleDescriptor returns the Role Descriptor for a comment in/with the given repo, poster and issue +// roleDescriptor returns the role descriptor for a comment in/with the given repo, poster and issue func roleDescriptor(ctx stdCtx.Context, repo *repo_model.Repository, poster *user_model.User, issue *issues_model.Issue, hasOriginalAuthor bool) (issues_model.RoleDescriptor, error) { + roleDescriptor := issues_model.RoleDescriptor{} + if hasOriginalAuthor { - return issues_model.RoleDescriptorNone, nil + return roleDescriptor, nil } perm, err := access_model.GetUserRepoPermission(ctx, repo, poster) if err != nil { - return issues_model.RoleDescriptorNone, err + return roleDescriptor, err } - // By default the poster has no roles on the comment. - roleDescriptor := issues_model.RoleDescriptorNone + // If the poster is the actual poster of the issue, enable Poster role. + roleDescriptor.IsPoster = issue.IsPoster(poster.ID) // Check if the poster is owner of the repo. if perm.IsOwner() { - // If the poster isn't a admin, enable the owner role. + // If the poster isn't an admin, enable the owner role. if !poster.IsAdmin { - roleDescriptor = roleDescriptor.WithRole(issues_model.RoleDescriptorOwner) - } else { + roleDescriptor.RoleInRepo = issues_model.RoleRepoOwner + return roleDescriptor, nil + } - // Otherwise check if poster is the real repo admin. - ok, err := access_model.IsUserRealRepoAdmin(repo, poster) - if err != nil { - return issues_model.RoleDescriptorNone, err - } - if ok { - roleDescriptor = roleDescriptor.WithRole(issues_model.RoleDescriptorOwner) - } + // Otherwise check if poster is the real repo admin. + ok, err := access_model.IsUserRealRepoAdmin(repo, poster) + if err != nil { + return roleDescriptor, err + } + if ok { + roleDescriptor.RoleInRepo = issues_model.RoleRepoOwner + return roleDescriptor, nil } } - // Is the poster can write issues or pulls to the repo, enable the Writer role. - // Only enable this if the poster doesn't have the owner role already. - if !roleDescriptor.HasRole("Owner") && perm.CanWriteIssuesOrPulls(issue.IsPull) { - roleDescriptor = roleDescriptor.WithRole(issues_model.RoleDescriptorWriter) + // If repo is organization, check Member role + if err := repo.LoadOwner(ctx); err != nil { + return roleDescriptor, err + } + if repo.Owner.IsOrganization() { + if isMember, err := organization.IsOrganizationMember(ctx, repo.Owner.ID, poster.ID); err != nil { + return roleDescriptor, err + } else if isMember { + roleDescriptor.RoleInRepo = issues_model.RoleRepoMember + return roleDescriptor, nil + } } - // If the poster is the actual poster of the issue, enable Poster role. - if issue.IsPoster(poster.ID) { - roleDescriptor = roleDescriptor.WithRole(issues_model.RoleDescriptorPoster) + // If the poster is the collaborator of the repo + if isCollaborator, err := repo_model.IsCollaborator(ctx, repo.ID, poster.ID); err != nil { + return roleDescriptor, err + } else if isCollaborator { + roleDescriptor.RoleInRepo = issues_model.RoleRepoCollaborator + return roleDescriptor, nil + } + + hasMergedPR, err := issues_model.HasMergedPullRequestInRepo(ctx, repo.ID, poster.ID) + if err != nil { + return roleDescriptor, err + } else if hasMergedPR { + roleDescriptor.RoleInRepo = issues_model.RoleRepoContributor + } else { + // only display first time contributor in the first opening pull request + roleDescriptor.RoleInRepo = issues_model.RoleRepoFirstTimeContributor } return roleDescriptor, nil diff --git a/routers/web/repo/setting/avatar.go b/routers/web/repo/setting/avatar.go index ec673ca28868b..ae80f1db01a30 100644 --- a/routers/web/repo/setting/avatar.go +++ b/routers/web/repo/setting/avatar.go @@ -72,5 +72,5 @@ func SettingsDeleteAvatar(ctx *context.Context) { if err := repo_service.DeleteAvatar(ctx, ctx.Repo.Repository); err != nil { ctx.Flash.Error(fmt.Sprintf("DeleteAvatar: %v", err)) } - ctx.Redirect(ctx.Repo.RepoLink + "/settings") + ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings") } diff --git a/routers/web/user/home.go b/routers/web/user/home.go index 8c1447f707863..d1a4877e6d4b6 100644 --- a/routers/web/user/home.go +++ b/routers/web/user/home.go @@ -448,21 +448,26 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { // - Team org's owns the repository. // - Team has read permission to repository. repoOpts := &repo_model.SearchRepoOptions{ - Actor: ctx.Doer, - OwnerID: ctx.Doer.ID, - Private: true, - AllPublic: false, - AllLimited: false, + Actor: ctx.Doer, + OwnerID: ctx.Doer.ID, + Private: true, + AllPublic: false, + AllLimited: false, + Collaborate: util.OptionalBoolNone, + UnitType: unitType, + Archived: util.OptionalBoolFalse, } if team != nil { repoOpts.TeamID = team.ID } + accessibleRepos := container.Set[int64]{} { ids, _, err := repo_model.SearchRepositoryIDs(repoOpts) if err != nil { ctx.ServerError("SearchRepositoryIDs", err) return } + accessibleRepos.AddMultiple(ids...) opts.RepoIDs = ids if len(opts.RepoIDs) == 0 { // no repos found, don't let the indexer return all repos @@ -489,40 +494,16 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { keyword := strings.Trim(ctx.FormString("q"), " ") ctx.Data["Keyword"] = keyword - accessibleRepos := container.Set[int64]{} - { - ids, err := issues_model.GetRepoIDsForIssuesOptions(opts, ctxUser) - if err != nil { - ctx.ServerError("GetRepoIDsForIssuesOptions", err) - return - } - for _, id := range ids { - accessibleRepos.Add(id) - } - } - // Educated guess: Do or don't show closed issues. isShowClosed := ctx.FormString("state") == "closed" opts.IsClosed = util.OptionalBoolOf(isShowClosed) // Filter repos and count issues in them. Count will be used later. // USING NON-FINAL STATE OF opts FOR A QUERY. - var issueCountByRepo map[int64]int64 - { - issueIDs, err := issueIDsFromSearch(ctx, keyword, opts) - if err != nil { - ctx.ServerError("issueIDsFromSearch", err) - return - } - if len(issueIDs) > 0 { // else, no issues found, just leave issueCountByRepo empty - opts.IssueIDs = issueIDs - issueCountByRepo, err = issues_model.CountIssuesByRepo(ctx, opts) - if err != nil { - ctx.ServerError("CountIssuesByRepo", err) - return - } - opts.IssueIDs = nil // reset, the opts will be used later - } + issueCountByRepo, err := issue_indexer.CountIssuesByRepo(ctx, issue_indexer.ToSearchOptions(keyword, opts)) + if err != nil { + ctx.ServerError("CountIssuesByRepo", err) + return } // Make sure page number is at least 1. Will be posted to ctx.Data. @@ -551,13 +532,13 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { // Parse ctx.FormString("repos") and remember matched repo IDs for later. // Gets set when clicking filters on the issues overview page. - repoIDs := getRepoIDs(ctx.FormString("repos")) - if len(repoIDs) > 0 { - // Remove repo IDs that are not accessible to the user. - repoIDs = util.SliceRemoveAllFunc(repoIDs, func(v int64) bool { - return !accessibleRepos.Contains(v) - }) - opts.RepoIDs = repoIDs + selectedRepoIDs := getRepoIDs(ctx.FormString("repos")) + // Remove repo IDs that are not accessible to the user. + selectedRepoIDs = util.SliceRemoveAllFunc(selectedRepoIDs, func(v int64) bool { + return !accessibleRepos.Contains(v) + }) + if len(selectedRepoIDs) > 0 { + opts.RepoIDs = selectedRepoIDs } // ------------------------------ @@ -568,7 +549,7 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { // USING FINAL STATE OF opts FOR A QUERY. var issues issues_model.IssueList { - issueIDs, err := issueIDsFromSearch(ctx, keyword, opts) + issueIDs, _, err := issue_indexer.SearchIssues(ctx, issue_indexer.ToSearchOptions(keyword, opts)) if err != nil { ctx.ServerError("issueIDsFromSearch", err) return @@ -584,6 +565,18 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { // Add repository pointers to Issues. // ---------------------------------- + // Remove repositories that should not be shown, + // which are repositories that have no issues and are not selected by the user. + selectedReposMap := make(map[int64]struct{}, len(selectedRepoIDs)) + for _, repoID := range selectedRepoIDs { + selectedReposMap[repoID] = struct{}{} + } + for k, v := range issueCountByRepo { + if _, ok := selectedReposMap[k]; !ok && v == 0 { + delete(issueCountByRepo, k) + } + } + // showReposMap maps repository IDs to their Repository pointers. showReposMap, err := loadRepoByIDs(ctxUser, issueCountByRepo, unitType) if err != nil { @@ -615,44 +608,10 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { // ------------------------------- // Fill stats to post to ctx.Data. // ------------------------------- - var issueStats *issues_model.IssueStats - { - statsOpts := issues_model.IssuesOptions{ - RepoIDs: repoIDs, - User: ctx.Doer, - IsPull: util.OptionalBoolOf(isPullList), - IsClosed: util.OptionalBoolOf(isShowClosed), - IssueIDs: nil, - IsArchived: util.OptionalBoolFalse, - LabelIDs: opts.LabelIDs, - Org: org, - Team: team, - RepoCond: opts.RepoCond, - } - - if keyword != "" { - statsOpts.RepoIDs = opts.RepoIDs - allIssueIDs, err := issueIDsFromSearch(ctx, keyword, &statsOpts) - if err != nil { - ctx.ServerError("issueIDsFromSearch", err) - return - } - statsOpts.IssueIDs = allIssueIDs - } - - if keyword != "" && len(statsOpts.IssueIDs) == 0 { - // So it did search with the keyword, but no issue found. - // Just set issueStats to empty. - issueStats = &issues_model.IssueStats{} - } else { - // So it did search with the keyword, and found some issues. It needs to get issueStats of these issues. - // Or the keyword is empty, so it doesn't need issueIDs as filter, just get issueStats with statsOpts. - issueStats, err = issues_model.GetUserIssueStats(filterMode, statsOpts) - if err != nil { - ctx.ServerError("GetUserIssueStats", err) - return - } - } + issueStats, err := getUserIssueStats(ctx, filterMode, issue_indexer.ToSearchOptions(keyword, opts), ctx.Doer.ID) + if err != nil { + ctx.ServerError("getUserIssueStats", err) + return } // Will be posted to ctx.Data. @@ -722,7 +681,7 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { ctx.Data["IssueStats"] = issueStats ctx.Data["ViewType"] = viewType ctx.Data["SortType"] = sortType - ctx.Data["RepoIDs"] = opts.RepoIDs + ctx.Data["RepoIDs"] = selectedRepoIDs ctx.Data["IsShowClosed"] = isShowClosed ctx.Data["SelectLabels"] = selectedLabels @@ -777,14 +736,6 @@ func getRepoIDs(reposQuery string) []int64 { return repoIDs } -func issueIDsFromSearch(ctx *context.Context, keyword string, opts *issues_model.IssuesOptions) ([]int64, error) { - ids, _, err := issue_indexer.SearchIssues(ctx, issue_indexer.ToSearchOptions(keyword, opts)) - if err != nil { - return nil, fmt.Errorf("SearchIssues: %w", err) - } - return ids, nil -} - func loadRepoByIDs(ctxUser *user_model.User, issueCountByRepo map[int64]int64, unitType unit.Type) (map[int64]*repo_model.Repository, error) { totalRes := make(map[int64]*repo_model.Repository, len(issueCountByRepo)) repoIDs := make([]int64, 0, 500) @@ -913,3 +864,71 @@ func UsernameSubRoute(ctx *context.Context) { } } } + +func getUserIssueStats(ctx *context.Context, filterMode int, opts *issue_indexer.SearchOptions, doerID int64) (*issues_model.IssueStats, error) { + opts = opts.Copy(func(o *issue_indexer.SearchOptions) { + o.AssigneeID = nil + o.PosterID = nil + o.MentionID = nil + o.ReviewRequestedID = nil + o.ReviewedID = nil + }) + + var ( + err error + ret = &issues_model.IssueStats{} + ) + + { + openClosedOpts := opts.Copy() + switch filterMode { + case issues_model.FilterModeAll, issues_model.FilterModeYourRepositories: + case issues_model.FilterModeAssign: + openClosedOpts.AssigneeID = &doerID + case issues_model.FilterModeCreate: + openClosedOpts.PosterID = &doerID + case issues_model.FilterModeMention: + openClosedOpts.MentionID = &doerID + case issues_model.FilterModeReviewRequested: + openClosedOpts.ReviewRequestedID = &doerID + case issues_model.FilterModeReviewed: + openClosedOpts.ReviewedID = &doerID + } + openClosedOpts.IsClosed = util.OptionalBoolFalse + ret.OpenCount, err = issue_indexer.CountIssues(ctx, openClosedOpts) + if err != nil { + return nil, err + } + openClosedOpts.IsClosed = util.OptionalBoolTrue + ret.ClosedCount, err = issue_indexer.CountIssues(ctx, openClosedOpts) + if err != nil { + return nil, err + } + } + + ret.YourRepositoriesCount, err = issue_indexer.CountIssues(ctx, opts) + if err != nil { + return nil, err + } + ret.AssignCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.AssigneeID = &doerID })) + if err != nil { + return nil, err + } + ret.CreateCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.PosterID = &doerID })) + if err != nil { + return nil, err + } + ret.MentionCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.MentionID = &doerID })) + if err != nil { + return nil, err + } + ret.ReviewRequestedCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.ReviewRequestedID = &doerID })) + if err != nil { + return nil, err + } + ret.ReviewedCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.ReviewedID = &doerID })) + if err != nil { + return nil, err + } + return ret, nil +} diff --git a/routers/web/user/setting/profile.go b/routers/web/user/setting/profile.go index ab7d2e58b391b..61089d0947b5f 100644 --- a/routers/web/user/setting/profile.go +++ b/routers/web/user/setting/profile.go @@ -194,7 +194,7 @@ func DeleteAvatar(ctx *context.Context) { ctx.Flash.Error(err.Error()) } - ctx.Redirect(setting.AppSubURL + "/user/settings") + ctx.JSONRedirect(setting.AppSubURL + "/user/settings") } // Organization render all the organization of the user diff --git a/services/actions/notifier_helper.go b/services/actions/notifier_helper.go index 75c99ff19cd2b..ff00e48c644d1 100644 --- a/services/actions/notifier_helper.go +++ b/services/actions/notifier_helper.go @@ -4,6 +4,7 @@ package actions import ( + "bytes" "context" "fmt" "strings" @@ -24,6 +25,7 @@ import ( "code.gitea.io/gitea/services/convert" "github.com/nektos/act/pkg/jobparser" + "github.com/nektos/act/pkg/model" ) var methodCtxKey struct{} @@ -143,15 +145,15 @@ func notify(ctx context.Context, input *notifyInput) error { } var detectedWorkflows []*actions_module.DetectedWorkflow - workflows, err := actions_module.DetectWorkflows(gitRepo, commit, input.Event, input.Payload) + actionsConfig := input.Repo.MustGetUnit(ctx, unit_model.TypeActions).ActionsConfig() + workflows, schedules, err := actions_module.DetectWorkflows(gitRepo, commit, input.Event, input.Payload) if err != nil { return fmt.Errorf("DetectWorkflows: %w", err) } + if len(workflows) == 0 { log.Trace("repo %s with commit %s couldn't find workflows", input.Repo.RepoPath(), commit.ID) } else { - actionsConfig := input.Repo.MustGetUnit(ctx, unit_model.TypeActions).ActionsConfig() - for _, wf := range workflows { if actionsConfig.IsWorkflowDisabled(wf.EntryName) { log.Trace("repo %s has disable workflows %s", input.Repo.RepoPath(), wf.EntryName) @@ -171,7 +173,7 @@ func notify(ctx context.Context, input *notifyInput) error { if err != nil { return fmt.Errorf("gitRepo.GetCommit: %w", err) } - baseWorkflows, err := actions_module.DetectWorkflows(gitRepo, baseCommit, input.Event, input.Payload) + baseWorkflows, _, err := actions_module.DetectWorkflows(gitRepo, baseCommit, input.Event, input.Payload) if err != nil { return fmt.Errorf("DetectWorkflows: %w", err) } @@ -186,7 +188,22 @@ func notify(ctx context.Context, input *notifyInput) error { } } + if err := handleSchedules(ctx, schedules, commit, input); err != nil { + return err + } + + return handleWorkflows(ctx, detectedWorkflows, commit, input, ref) +} + +func handleWorkflows( + ctx context.Context, + detectedWorkflows []*actions_module.DetectedWorkflow, + commit *git.Commit, + input *notifyInput, + ref string, +) error { if len(detectedWorkflows) == 0 { + log.Trace("repo %s with commit %s couldn't find workflows", input.Repo.RepoPath(), commit.ID) return nil } @@ -350,3 +367,86 @@ func ifNeedApproval(ctx context.Context, run *actions_model.ActionRun, repo *rep log.Trace("need approval because it's the first time user %d triggered actions", user.ID) return true, nil } + +func handleSchedules( + ctx context.Context, + detectedWorkflows []*actions_module.DetectedWorkflow, + commit *git.Commit, + input *notifyInput, +) error { + if len(detectedWorkflows) == 0 { + log.Trace("repo %s with commit %s couldn't find schedules", input.Repo.RepoPath(), commit.ID) + return nil + } + + branch, err := commit.GetBranchName() + if err != nil { + return err + } + if branch != input.Repo.DefaultBranch { + log.Trace("commit branch is not default branch in repo") + return nil + } + + rows, _, err := actions_model.FindSchedules(ctx, actions_model.FindScheduleOptions{RepoID: input.Repo.ID}) + if err != nil { + log.Error("FindCrons: %v", err) + return err + } + + if len(rows) > 0 { + if err := actions_model.DeleteScheduleTaskByRepo(ctx, input.Repo.ID); err != nil { + log.Error("DeleteCronTaskByRepo: %v", err) + } + } + + p, err := json.Marshal(input.Payload) + if err != nil { + return fmt.Errorf("json.Marshal: %w", err) + } + + crons := make([]*actions_model.ActionSchedule, 0, len(detectedWorkflows)) + for _, dwf := range detectedWorkflows { + // Check cron job condition. Only working in default branch + workflow, err := model.ReadWorkflow(bytes.NewReader(dwf.Content)) + if err != nil { + log.Error("ReadWorkflow: %v", err) + continue + } + schedules := workflow.OnSchedule() + if len(schedules) == 0 { + log.Warn("no schedule event") + continue + } + + run := &actions_model.ActionSchedule{ + Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0], + RepoID: input.Repo.ID, + OwnerID: input.Repo.OwnerID, + WorkflowID: dwf.EntryName, + TriggerUserID: input.Doer.ID, + Ref: input.Ref, + CommitSHA: commit.ID.String(), + Event: input.Event, + EventPayload: string(p), + Specs: schedules, + Content: dwf.Content, + } + + // cancel running jobs if the event is push + if run.Event == webhook_module.HookEventPush { + // cancel running jobs of the same workflow + if err := actions_model.CancelRunningJobs( + ctx, + run.RepoID, + run.Ref, + run.WorkflowID, + ); err != nil { + log.Error("CancelRunningJobs: %v", err) + } + } + crons = append(crons, run) + } + + return actions_model.CreateScheduleTask(ctx, crons) +} diff --git a/services/actions/schedule_tasks.go b/services/actions/schedule_tasks.go new file mode 100644 index 0000000000000..87131e0aab3cf --- /dev/null +++ b/services/actions/schedule_tasks.go @@ -0,0 +1,135 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package actions + +import ( + "context" + "fmt" + "time" + + actions_model "code.gitea.io/gitea/models/actions" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/timeutil" + webhook_module "code.gitea.io/gitea/modules/webhook" + + "github.com/nektos/act/pkg/jobparser" +) + +// StartScheduleTasks start the task +func StartScheduleTasks(ctx context.Context) error { + return startTasks(ctx) +} + +// startTasks retrieves specifications in pages, creates a schedule task for each specification, +// and updates the specification's next run time and previous run time. +// The function returns an error if there's an issue with finding or updating the specifications. +func startTasks(ctx context.Context) error { + // Set the page size + pageSize := 50 + + // Retrieve specs in pages until all specs have been retrieved + now := time.Now() + for page := 1; ; page++ { + // Retrieve the specs for the current page + specs, _, err := actions_model.FindSpecs(ctx, actions_model.FindSpecOptions{ + ListOptions: db.ListOptions{ + Page: page, + PageSize: pageSize, + }, + Next: now.Unix(), + }) + if err != nil { + return fmt.Errorf("find specs: %w", err) + } + + // Loop through each spec and create a schedule task for it + for _, row := range specs { + // cancel running jobs if the event is push + if row.Schedule.Event == webhook_module.HookEventPush { + // cancel running jobs of the same workflow + if err := actions_model.CancelRunningJobs( + ctx, + row.RepoID, + row.Schedule.Ref, + row.Schedule.WorkflowID, + ); err != nil { + log.Error("CancelRunningJobs: %v", err) + } + } + + if err := CreateScheduleTask(ctx, row.Schedule); err != nil { + log.Error("CreateScheduleTask: %v", err) + return err + } + + // Parse the spec + schedule, err := row.Parse() + if err != nil { + log.Error("Parse: %v", err) + return err + } + + // Update the spec's next run time and previous run time + row.Prev = row.Next + row.Next = timeutil.TimeStamp(schedule.Next(now.Add(1 * time.Minute)).Unix()) + if err := actions_model.UpdateScheduleSpec(ctx, row, "prev", "next"); err != nil { + log.Error("UpdateScheduleSpec: %v", err) + return err + } + } + + // Stop if all specs have been retrieved + if len(specs) < pageSize { + break + } + } + + return nil +} + +// CreateScheduleTask creates a scheduled task from a cron action schedule. +// It creates an action run based on the schedule, inserts it into the database, and creates commit statuses for each job. +func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule) error { + // Create a new action run based on the schedule + run := &actions_model.ActionRun{ + Title: cron.Title, + RepoID: cron.RepoID, + OwnerID: cron.OwnerID, + WorkflowID: cron.WorkflowID, + TriggerUserID: cron.TriggerUserID, + Ref: cron.Ref, + CommitSHA: cron.CommitSHA, + Event: cron.Event, + EventPayload: cron.EventPayload, + Status: actions_model.StatusWaiting, + } + + // Parse the workflow specification from the cron schedule + workflows, err := jobparser.Parse(cron.Content) + if err != nil { + return err + } + + // Insert the action run and its associated jobs into the database + if err := actions_model.InsertRun(ctx, run, workflows); err != nil { + return err + } + + // Retrieve the jobs for the newly created action run + jobs, _, err := actions_model.FindRunJobs(ctx, actions_model.FindRunJobOptions{RunID: run.ID}) + if err != nil { + return err + } + + // Create commit statuses for each job + for _, job := range jobs { + if err := createCommitStatus(ctx, job); err != nil { + return err + } + } + + // Return nil if no errors occurred + return nil +} diff --git a/services/convert/release.go b/services/convert/release.go index d8aa46d4326a7..bfff53e62f496 100644 --- a/services/convert/release.go +++ b/services/convert/release.go @@ -22,6 +22,7 @@ func ToAPIRelease(ctx context.Context, repo *repo_model.Repository, r *repo_mode HTMLURL: r.HTMLURL(), TarURL: r.TarURL(), ZipURL: r.ZipURL(), + UploadURL: r.APIUploadURL(), IsDraft: r.IsDraft, IsPrerelease: r.IsPrerelease, CreatedAt: r.CreatedUnix.AsTime(), diff --git a/services/convert/release_test.go b/services/convert/release_test.go new file mode 100644 index 0000000000000..201b27e16d964 --- /dev/null +++ b/services/convert/release_test.go @@ -0,0 +1,28 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package convert + +import ( + "testing" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + + "github.com/stretchr/testify/assert" +) + +func TestRelease_ToRelease(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) + release1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Release{ID: 1}) + release1.LoadAttributes(db.DefaultContext) + + apiRelease := ToAPIRelease(db.DefaultContext, repo1, release1) + assert.NotNil(t, apiRelease) + assert.EqualValues(t, 1, apiRelease.ID) + assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL) + assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL) +} diff --git a/services/cron/tasks_actions.go b/services/cron/tasks_actions.go index 30e8749a5e189..0875792503d52 100644 --- a/services/cron/tasks_actions.go +++ b/services/cron/tasks_actions.go @@ -18,6 +18,7 @@ func initActionsTasks() { registerStopZombieTasks() registerStopEndlessTasks() registerCancelAbandonedJobs() + registerScheduleTasks() } func registerStopZombieTasks() { @@ -49,3 +50,16 @@ func registerCancelAbandonedJobs() { return actions_service.CancelAbandonedJobs(ctx) }) } + +// registerScheduleTasks registers a scheduled task that runs every minute to start any due schedule tasks. +func registerScheduleTasks() { + // Register the task with a unique name, enabled status, and schedule for every minute. + RegisterTaskFatal("start_schedule_tasks", &BaseConfig{ + Enabled: true, + RunAtStart: false, + Schedule: "@every 1m", + }, func(ctx context.Context, _ *user_model.User, cfg Config) error { + // Call the function to start schedule tasks and pass the context. + return actions_service.StartScheduleTasks(ctx) + }) +} diff --git a/services/webhook/feishu.go b/services/webhook/feishu.go index 885cde3bc5195..089f51952fd78 100644 --- a/services/webhook/feishu.go +++ b/services/webhook/feishu.go @@ -97,23 +97,40 @@ func (f *FeishuPayload) Push(p *api.PushPayload) (api.Payloader, error) { // Issue implements PayloadConvertor Issue method func (f *FeishuPayload) Issue(p *api.IssuePayload) (api.Payloader, error) { - text, issueTitle, attachmentText, _ := getIssuesPayloadInfo(p, noneLinkFormatter, true) - - return newFeishuTextPayload(issueTitle + "\r\n" + text + "\r\n\r\n" + attachmentText), nil + title, link, by, operator, result, assignees := getIssuesInfo(p) + var res api.Payloader + if assignees != "" { + if p.Action == api.HookIssueAssigned || p.Action == api.HookIssueUnassigned || p.Action == api.HookIssueMilestoned { + res = newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, result, assignees, p.Issue.Body)) + } else { + res = newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, assignees, p.Issue.Body)) + } + } else { + res = newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, p.Issue.Body)) + } + return res, nil } // IssueComment implements PayloadConvertor IssueComment method func (f *FeishuPayload) IssueComment(p *api.IssueCommentPayload) (api.Payloader, error) { - text, issueTitle, _ := getIssueCommentPayloadInfo(p, noneLinkFormatter, true) - - return newFeishuTextPayload(issueTitle + "\r\n" + text + "\r\n\r\n" + p.Comment.Body), nil + title, link, by, operator := getIssuesCommentInfo(p) + return newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, p.Comment.Body)), nil } // PullRequest implements PayloadConvertor PullRequest method func (f *FeishuPayload) PullRequest(p *api.PullRequestPayload) (api.Payloader, error) { - text, issueTitle, attachmentText, _ := getPullRequestPayloadInfo(p, noneLinkFormatter, true) - - return newFeishuTextPayload(issueTitle + "\r\n" + text + "\r\n\r\n" + attachmentText), nil + title, link, by, operator, result, assignees := getPullRequestInfo(p) + var res api.Payloader + if assignees != "" { + if p.Action == api.HookIssueAssigned || p.Action == api.HookIssueUnassigned || p.Action == api.HookIssueMilestoned { + res = newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, result, assignees, p.PullRequest.Body)) + } else { + res = newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, assignees, p.PullRequest.Body)) + } + } else { + res = newFeishuTextPayload(fmt.Sprintf("%s\n%s\n%s\n%s\n\n%s", title, link, by, operator, p.PullRequest.Body)) + } + return res, nil } // Review implements PayloadConvertor Review method diff --git a/services/webhook/feishu_test.go b/services/webhook/feishu_test.go index 84549c1fa5764..a3182e82b09af 100644 --- a/services/webhook/feishu_test.go +++ b/services/webhook/feishu_test.go @@ -72,7 +72,7 @@ func TestFeishuPayload(t *testing.T) { require.NotNil(t, pl) require.IsType(t, &FeishuPayload{}, pl) - assert.Equal(t, "#2 crash\r\n[test/repo] Issue opened: #2 crash by user1\r\n\r\nissue body", pl.(*FeishuPayload).Content.Text) + assert.Equal(t, "[Issue-test/repo #2]: opened\ncrash\nhttp://localhost:3000/test/repo/issues/2\nIssue by user1\nOperator: user1\nAssignees: user1\n\nissue body", pl.(*FeishuPayload).Content.Text) p.Action = api.HookIssueClosed pl, err = d.Issue(p) @@ -80,7 +80,7 @@ func TestFeishuPayload(t *testing.T) { require.NotNil(t, pl) require.IsType(t, &FeishuPayload{}, pl) - assert.Equal(t, "#2 crash\r\n[test/repo] Issue closed: #2 crash by user1", pl.(*FeishuPayload).Content.Text) + assert.Equal(t, "[Issue-test/repo #2]: closed\ncrash\nhttp://localhost:3000/test/repo/issues/2\nIssue by user1\nOperator: user1\nAssignees: user1\n\nissue body", pl.(*FeishuPayload).Content.Text) }) t.Run("IssueComment", func(t *testing.T) { @@ -92,7 +92,7 @@ func TestFeishuPayload(t *testing.T) { require.NotNil(t, pl) require.IsType(t, &FeishuPayload{}, pl) - assert.Equal(t, "#2 crash\r\n[test/repo] New comment on issue #2 crash by user1\r\n\r\nmore info needed", pl.(*FeishuPayload).Content.Text) + assert.Equal(t, "[Comment-test/repo #2]: created\ncrash\nhttp://localhost:3000/test/repo/issues/2\nIssue by user1\nOperator: user1\n\nmore info needed", pl.(*FeishuPayload).Content.Text) }) t.Run("PullRequest", func(t *testing.T) { @@ -104,7 +104,7 @@ func TestFeishuPayload(t *testing.T) { require.NotNil(t, pl) require.IsType(t, &FeishuPayload{}, pl) - assert.Equal(t, "#12 Fix bug\r\n[test/repo] Pull request opened: #12 Fix bug by user1\r\n\r\nfixes bug #2", pl.(*FeishuPayload).Content.Text) + assert.Equal(t, "[PullRequest-test/repo #12]: opened\nFix bug\nhttp://localhost:3000/test/repo/pulls/12\nPullRequest by user1\nOperator: user1\nAssignees: user1\n\nfixes bug #2", pl.(*FeishuPayload).Content.Text) }) t.Run("PullRequestComment", func(t *testing.T) { @@ -116,7 +116,7 @@ func TestFeishuPayload(t *testing.T) { require.NotNil(t, pl) require.IsType(t, &FeishuPayload{}, pl) - assert.Equal(t, "#12 Fix bug\r\n[test/repo] New comment on pull request #12 Fix bug by user1\r\n\r\nchanges requested", pl.(*FeishuPayload).Content.Text) + assert.Equal(t, "[Comment-test/repo #12]: created\nFix bug\nhttp://localhost:3000/test/repo/pulls/12\nPullRequest by user1\nOperator: user1\n\nchanges requested", pl.(*FeishuPayload).Content.Text) }) t.Run("Review", func(t *testing.T) { diff --git a/services/webhook/general.go b/services/webhook/general.go index b9cc3dc8457be..986467bc99dba 100644 --- a/services/webhook/general.go +++ b/services/webhook/general.go @@ -28,6 +28,69 @@ func htmlLinkFormatter(url, text string) string { return fmt.Sprintf(`%s`, html.EscapeString(url), html.EscapeString(text)) } +// getPullRequestInfo gets the information for a pull request +func getPullRequestInfo(p *api.PullRequestPayload) (title, link, by, operator, operateResult, assignees string) { + title = fmt.Sprintf("[PullRequest-%s #%d]: %s\n%s", p.Repository.FullName, p.PullRequest.Index, p.Action, p.PullRequest.Title) + assignList := p.PullRequest.Assignees + assignStringList := make([]string, len(assignList)) + + for i, user := range assignList { + assignStringList[i] = user.UserName + } + if p.Action == api.HookIssueAssigned { + operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName) + } else if p.Action == api.HookIssueUnassigned { + operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName) + } else if p.Action == api.HookIssueMilestoned { + operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.PullRequest.Milestone.ID) + } + link = p.PullRequest.HTMLURL + by = fmt.Sprintf("PullRequest by %s", p.PullRequest.Poster.UserName) + if len(assignStringList) > 0 { + assignees = fmt.Sprintf("Assignees: %s", strings.Join(assignStringList, ", ")) + } + operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + return title, link, by, operator, operateResult, assignees +} + +// getIssuesInfo gets the information for an issue +func getIssuesInfo(p *api.IssuePayload) (issueTitle, link, by, operator, operateResult, assignees string) { + issueTitle = fmt.Sprintf("[Issue-%s #%d]: %s\n%s", p.Repository.FullName, p.Issue.Index, p.Action, p.Issue.Title) + assignList := p.Issue.Assignees + assignStringList := make([]string, len(assignList)) + + for i, user := range assignList { + assignStringList[i] = user.UserName + } + if p.Action == api.HookIssueAssigned { + operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName) + } else if p.Action == api.HookIssueUnassigned { + operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName) + } else if p.Action == api.HookIssueMilestoned { + operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.Issue.Milestone.ID) + } + link = p.Issue.HTMLURL + by = fmt.Sprintf("Issue by %s", p.Issue.Poster.UserName) + if len(assignStringList) > 0 { + assignees = fmt.Sprintf("Assignees: %s", strings.Join(assignStringList, ", ")) + } + operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + return issueTitle, link, by, operator, operateResult, assignees +} + +// getIssuesCommentInfo gets the information for a comment +func getIssuesCommentInfo(p *api.IssueCommentPayload) (title, link, by, operator string) { + title = fmt.Sprintf("[Comment-%s #%d]: %s\n%s", p.Repository.FullName, p.Issue.Index, p.Action, p.Issue.Title) + link = p.Issue.HTMLURL + if p.IsPull { + by = fmt.Sprintf("PullRequest by %s", p.Issue.Poster.UserName) + } else { + by = fmt.Sprintf("Issue by %s", p.Issue.Poster.UserName) + } + operator = fmt.Sprintf("Operator: %s", p.Sender.UserName) + return title, link, by, operator +} + func getIssuesPayloadInfo(p *api.IssuePayload, linkFormatter linkFormatter, withSender bool) (string, string, string, int) { repoLink := linkFormatter(p.Repository.HTMLURL, p.Repository.FullName) issueTitle := fmt.Sprintf("#%d %s", p.Index, p.Issue.Title) diff --git a/services/webhook/general_test.go b/services/webhook/general_test.go index ba58ca4f90d6e..64bd72f5a05a4 100644 --- a/services/webhook/general_test.go +++ b/services/webhook/general_test.go @@ -123,6 +123,10 @@ func issueTestPayload() *api.IssuePayload { HTMLURL: "http://localhost:3000/test/repo/issues/2", Title: "crash", Body: "issue body", + Poster: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, Assignees: []*api.User{ { UserName: "user1", @@ -161,7 +165,11 @@ func issueCommentTestPayload() *api.IssueCommentPayload { URL: "http://localhost:3000/api/v1/repos/test/repo/issues/2", HTMLURL: "http://localhost:3000/test/repo/issues/2", Title: "crash", - Body: "this happened", + Poster: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, + Body: "this happened", }, } } @@ -190,6 +198,10 @@ func pullRequestCommentTestPayload() *api.IssueCommentPayload { HTMLURL: "http://localhost:3000/test/repo/pulls/12", Title: "Fix bug", Body: "fixes bug #2", + Poster: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, }, IsPull: true, } @@ -254,6 +266,10 @@ func pullRequestTestPayload() *api.PullRequestPayload { Title: "Fix bug", Body: "fixes bug #2", Mergeable: true, + Poster: &api.User{ + UserName: "user1", + AvatarURL: "http://localhost:3000/user1/avatar", + }, Assignees: []*api.User{ { UserName: "user1", diff --git a/templates/admin/user/edit.tmpl b/templates/admin/user/edit.tmpl index 4fea418e6f9bb..e99a4532d3b8d 100644 --- a/templates/admin/user/edit.tmpl +++ b/templates/admin/user/edit.tmpl @@ -186,7 +186,7 @@
- +
diff --git a/templates/devtest/flex-list.tmpl b/templates/devtest/flex-list.tmpl index 37f3f04004b9c..f9087a5714446 100644 --- a/templates/devtest/flex-list.tmpl +++ b/templates/devtest/flex-list.tmpl @@ -48,6 +48,7 @@
Very loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong content + Truncate very loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong content
diff --git a/templates/org/settings/options.tmpl b/templates/org/settings/options.tmpl index 683b88646770f..03827e4c3f999 100644 --- a/templates/org/settings/options.tmpl +++ b/templates/org/settings/options.tmpl @@ -94,7 +94,7 @@
- +
diff --git a/templates/repo/branch_dropdown.tmpl b/templates/repo/branch_dropdown.tmpl index 4ec2fd557c76a..a011111d5b7cb 100644 --- a/templates/repo/branch_dropdown.tmpl +++ b/templates/repo/branch_dropdown.tmpl @@ -67,7 +67,7 @@
{{/* show dummy elements before Vue componment is mounted, this code must match the code in BranchTagSelector.vue */}} -