Merge branch 'main' into allspice/official-reviews-ui

This commit is contained in:
william-allspice 2024-08-30 10:22:24 -05:00 committed by GitHub
commit 06053b89aa
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
35 changed files with 4045 additions and 730 deletions

View File

@ -27,20 +27,20 @@ func DefaultLocker() Locker {
// Lock tries to acquire a lock for the given key, it uses the default locker. // Lock tries to acquire a lock for the given key, it uses the default locker.
// Read the documentation of Locker.Lock for more information about the behavior. // Read the documentation of Locker.Lock for more information about the behavior.
func Lock(ctx context.Context, key string) (context.Context, ReleaseFunc, error) { func Lock(ctx context.Context, key string) (ReleaseFunc, error) {
return DefaultLocker().Lock(ctx, key) return DefaultLocker().Lock(ctx, key)
} }
// TryLock tries to acquire a lock for the given key, it uses the default locker. // TryLock tries to acquire a lock for the given key, it uses the default locker.
// Read the documentation of Locker.TryLock for more information about the behavior. // Read the documentation of Locker.TryLock for more information about the behavior.
func TryLock(ctx context.Context, key string) (bool, context.Context, ReleaseFunc, error) { func TryLock(ctx context.Context, key string) (bool, ReleaseFunc, error) {
return DefaultLocker().TryLock(ctx, key) return DefaultLocker().TryLock(ctx, key)
} }
// LockAndDo tries to acquire a lock for the given key and then calls the given function. // LockAndDo tries to acquire a lock for the given key and then calls the given function.
// It uses the default locker, and it will return an error if failed to acquire the lock. // It uses the default locker, and it will return an error if failed to acquire the lock.
func LockAndDo(ctx context.Context, key string, f func(context.Context) error) error { func LockAndDo(ctx context.Context, key string, f func(context.Context) error) error {
ctx, release, err := Lock(ctx, key) release, err := Lock(ctx, key)
if err != nil { if err != nil {
return err return err
} }
@ -52,7 +52,7 @@ func LockAndDo(ctx context.Context, key string, f func(context.Context) error) e
// TryLockAndDo tries to acquire a lock for the given key and then calls the given function. // TryLockAndDo tries to acquire a lock for the given key and then calls the given function.
// It uses the default locker, and it will return false if failed to acquire the lock. // It uses the default locker, and it will return false if failed to acquire the lock.
func TryLockAndDo(ctx context.Context, key string, f func(context.Context) error) (bool, error) { func TryLockAndDo(ctx context.Context, key string, f func(context.Context) error) (bool, error) {
ok, ctx, release, err := TryLock(ctx, key) ok, release, err := TryLock(ctx, key)
if err != nil { if err != nil {
return false, err return false, err
} }

View File

@ -5,56 +5,34 @@ package globallock
import ( import (
"context" "context"
"fmt"
) )
type Locker interface { type Locker interface {
// Lock tries to acquire a lock for the given key, it blocks until the lock is acquired or the context is canceled. // Lock tries to acquire a lock for the given key, it blocks until the lock is acquired or the context is canceled.
// //
// Lock returns a new context which should be used in the following code.
// The new context will be canceled when the lock is released or lost - yes, it's possible to lose a lock.
// For example, it lost the connection to the redis server while holding the lock.
// If it fails to acquire the lock, the returned context will be the same as the input context.
//
// Lock returns a ReleaseFunc to release the lock, it cannot be nil. // Lock returns a ReleaseFunc to release the lock, it cannot be nil.
// It's always safe to call this function even if it fails to acquire the lock, and it will do nothing in that case. // It's always safe to call this function even if it fails to acquire the lock, and it will do nothing in that case.
// And it's also safe to call it multiple times, but it will only release the lock once. // And it's also safe to call it multiple times, but it will only release the lock once.
// That's why it's called ReleaseFunc, not UnlockFunc. // That's why it's called ReleaseFunc, not UnlockFunc.
// But be aware that it's not safe to not call it at all; it could lead to a memory leak. // But be aware that it's not safe to not call it at all; it could lead to a memory leak.
// So a recommended pattern is to use defer to call it: // So a recommended pattern is to use defer to call it:
// ctx, release, err := locker.Lock(ctx, "key") // release, err := locker.Lock(ctx, "key")
// if err != nil { // if err != nil {
// return err // return err
// } // }
// defer release() // defer release()
// The ReleaseFunc will return the original context which was used to acquire the lock.
// It's useful when you want to continue to do something after releasing the lock.
// At that time, the ctx will be canceled, and you can use the returned context by the ReleaseFunc to continue:
// ctx, release, err := locker.Lock(ctx, "key")
// if err != nil {
// return err
// }
// defer release()
// doSomething(ctx)
// ctx = release()
// doSomethingElse(ctx)
// Please ignore it and use `defer release()` instead if you don't need this, to avoid forgetting to release the lock.
// //
// Lock returns an error if failed to acquire the lock. // Lock returns an error if failed to acquire the lock.
// Be aware that even the context is not canceled, it's still possible to fail to acquire the lock. // Be aware that even the context is not canceled, it's still possible to fail to acquire the lock.
// For example, redis is down, or it reached the maximum number of tries. // For example, redis is down, or it reached the maximum number of tries.
Lock(ctx context.Context, key string) (context.Context, ReleaseFunc, error) Lock(ctx context.Context, key string) (ReleaseFunc, error)
// TryLock tries to acquire a lock for the given key, it returns immediately. // TryLock tries to acquire a lock for the given key, it returns immediately.
// It follows the same pattern as Lock, but it doesn't block. // It follows the same pattern as Lock, but it doesn't block.
// And if it fails to acquire the lock because it's already locked, not other reasons like redis is down, // And if it fails to acquire the lock because it's already locked, not other reasons like redis is down,
// it will return false without any error. // it will return false without any error.
TryLock(ctx context.Context, key string) (bool, context.Context, ReleaseFunc, error) TryLock(ctx context.Context, key string) (bool, ReleaseFunc, error)
} }
// ReleaseFunc is a function that releases a lock. // ReleaseFunc is a function that releases a lock.
// It returns the original context which was used to acquire the lock. type ReleaseFunc func()
type ReleaseFunc func() context.Context
// ErrLockReleased is used as context cause when a lock is released
var ErrLockReleased = fmt.Errorf("lock released")

View File

@ -47,27 +47,24 @@ func TestLocker(t *testing.T) {
func testLocker(t *testing.T, locker Locker) { func testLocker(t *testing.T, locker Locker) {
t.Run("lock", func(t *testing.T) { t.Run("lock", func(t *testing.T) {
parentCtx := context.Background() parentCtx := context.Background()
ctx, release, err := locker.Lock(parentCtx, "test") release, err := locker.Lock(parentCtx, "test")
defer release() defer release()
assert.NotEqual(t, parentCtx, ctx) // new context should be returned
assert.NoError(t, err) assert.NoError(t, err)
func() { func() {
parentCtx, cancel := context.WithTimeout(context.Background(), time.Second) ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel() defer cancel()
ctx, release, err := locker.Lock(parentCtx, "test") release, err := locker.Lock(ctx, "test")
defer release() defer release()
assert.Error(t, err) assert.Error(t, err)
assert.Equal(t, parentCtx, ctx) // should return the same context
}() }()
release() release()
assert.Error(t, ctx.Err())
func() { func() {
_, release, err := locker.Lock(context.Background(), "test") release, err := locker.Lock(context.Background(), "test")
defer release() defer release()
assert.NoError(t, err) assert.NoError(t, err)
@ -76,29 +73,26 @@ func testLocker(t *testing.T, locker Locker) {
t.Run("try lock", func(t *testing.T) { t.Run("try lock", func(t *testing.T) {
parentCtx := context.Background() parentCtx := context.Background()
ok, ctx, release, err := locker.TryLock(parentCtx, "test") ok, release, err := locker.TryLock(parentCtx, "test")
defer release() defer release()
assert.True(t, ok) assert.True(t, ok)
assert.NotEqual(t, parentCtx, ctx) // new context should be returned
assert.NoError(t, err) assert.NoError(t, err)
func() { func() {
parentCtx, cancel := context.WithTimeout(context.Background(), time.Second) ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel() defer cancel()
ok, ctx, release, err := locker.TryLock(parentCtx, "test") ok, release, err := locker.TryLock(ctx, "test")
defer release() defer release()
assert.False(t, ok) assert.False(t, ok)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, parentCtx, ctx) // should return the same context
}() }()
release() release()
assert.Error(t, ctx.Err())
func() { func() {
ok, _, release, _ := locker.TryLock(context.Background(), "test") ok, release, _ := locker.TryLock(context.Background(), "test")
defer release() defer release()
assert.True(t, ok) assert.True(t, ok)
@ -107,7 +101,7 @@ func testLocker(t *testing.T, locker Locker) {
t.Run("wait and acquired", func(t *testing.T) { t.Run("wait and acquired", func(t *testing.T) {
ctx := context.Background() ctx := context.Background()
_, release, err := locker.Lock(ctx, "test") release, err := locker.Lock(ctx, "test")
require.NoError(t, err) require.NoError(t, err)
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
@ -115,7 +109,7 @@ func testLocker(t *testing.T, locker Locker) {
go func() { go func() {
defer wg.Done() defer wg.Done()
started := time.Now() started := time.Now()
_, release, err := locker.Lock(context.Background(), "test") // should be blocked for seconds release, err := locker.Lock(context.Background(), "test") // should be blocked for seconds
defer release() defer release()
assert.Greater(t, time.Since(started), time.Second) assert.Greater(t, time.Since(started), time.Second)
assert.NoError(t, err) assert.NoError(t, err)
@ -127,34 +121,15 @@ func testLocker(t *testing.T, locker Locker) {
wg.Wait() wg.Wait()
}) })
t.Run("continue after release", func(t *testing.T) {
ctx := context.Background()
ctxBeforeLock := ctx
ctx, release, err := locker.Lock(ctx, "test")
require.NoError(t, err)
assert.NoError(t, ctx.Err())
assert.NotEqual(t, ctxBeforeLock, ctx)
ctxBeforeRelease := ctx
ctx = release()
assert.NoError(t, ctx.Err())
assert.Error(t, ctxBeforeRelease.Err())
// so it can continue with ctx to do more work
})
t.Run("multiple release", func(t *testing.T) { t.Run("multiple release", func(t *testing.T) {
ctx := context.Background() ctx := context.Background()
_, release1, err := locker.Lock(ctx, "test") release1, err := locker.Lock(ctx, "test")
require.NoError(t, err) require.NoError(t, err)
release1() release1()
_, release2, err := locker.Lock(ctx, "test") release2, err := locker.Lock(ctx, "test")
defer release2() defer release2()
require.NoError(t, err) require.NoError(t, err)
@ -163,7 +138,7 @@ func testLocker(t *testing.T, locker Locker) {
// and it shouldn't affect the other lock // and it shouldn't affect the other lock
release1() release1()
ok, _, release3, err := locker.TryLock(ctx, "test") ok, release3, err := locker.TryLock(ctx, "test")
defer release3() defer release3()
require.NoError(t, err) require.NoError(t, err)
// It should be able to acquire the lock; // It should be able to acquire the lock;
@ -184,28 +159,23 @@ func testRedisLocker(t *testing.T, locker *redisLocker) {
// Otherwise, it will affect other tests. // Otherwise, it will affect other tests.
t.Run("close", func(t *testing.T) { t.Run("close", func(t *testing.T) {
assert.NoError(t, locker.Close()) assert.NoError(t, locker.Close())
_, _, err := locker.Lock(context.Background(), "test") _, err := locker.Lock(context.Background(), "test")
assert.Error(t, err) assert.Error(t, err)
}) })
}() }()
t.Run("failed extend", func(t *testing.T) { t.Run("failed extend", func(t *testing.T) {
ctx, release, err := locker.Lock(context.Background(), "test") release, err := locker.Lock(context.Background(), "test")
defer release() defer release()
require.NoError(t, err) require.NoError(t, err)
// It simulates that there are some problems with extending like network issues or redis server down. // It simulates that there are some problems with extending like network issues or redis server down.
v, ok := locker.mutexM.Load("test") v, ok := locker.mutexM.Load("test")
require.True(t, ok) require.True(t, ok)
m := v.(*redisMutex) m := v.(*redsync.Mutex)
_, _ = m.mutex.Unlock() // release it to make it impossible to extend _, _ = m.Unlock() // release it to make it impossible to extend
select { // In current design, callers can't know the lock can't be extended.
case <-time.After(redisLockExpiry + time.Second): // Just keep this case to improve the test coverage.
t.Errorf("lock should be expired")
case <-ctx.Done():
var errTaken *redsync.ErrTaken
assert.ErrorAs(t, context.Cause(ctx), &errTaken)
}
}) })
} }

View File

@ -19,18 +19,13 @@ func NewMemoryLocker() Locker {
return &memoryLocker{} return &memoryLocker{}
} }
func (l *memoryLocker) Lock(ctx context.Context, key string) (context.Context, ReleaseFunc, error) { func (l *memoryLocker) Lock(ctx context.Context, key string) (ReleaseFunc, error) {
originalCtx := ctx
if l.tryLock(key) { if l.tryLock(key) {
ctx, cancel := context.WithCancelCause(ctx)
releaseOnce := sync.Once{} releaseOnce := sync.Once{}
return ctx, func() context.Context { return func() {
releaseOnce.Do(func() { releaseOnce.Do(func() {
l.locks.Delete(key) l.locks.Delete(key)
cancel(ErrLockReleased)
}) })
return originalCtx
}, nil }, nil
} }
@ -39,39 +34,31 @@ func (l *memoryLocker) Lock(ctx context.Context, key string) (context.Context, R
for { for {
select { select {
case <-ctx.Done(): case <-ctx.Done():
return ctx, func() context.Context { return originalCtx }, ctx.Err() return func() {}, ctx.Err()
case <-ticker.C: case <-ticker.C:
if l.tryLock(key) { if l.tryLock(key) {
ctx, cancel := context.WithCancelCause(ctx)
releaseOnce := sync.Once{} releaseOnce := sync.Once{}
return ctx, func() context.Context { return func() {
releaseOnce.Do(func() { releaseOnce.Do(func() {
l.locks.Delete(key) l.locks.Delete(key)
cancel(ErrLockReleased)
}) })
return originalCtx
}, nil }, nil
} }
} }
} }
} }
func (l *memoryLocker) TryLock(ctx context.Context, key string) (bool, context.Context, ReleaseFunc, error) { func (l *memoryLocker) TryLock(_ context.Context, key string) (bool, ReleaseFunc, error) {
originalCtx := ctx
if l.tryLock(key) { if l.tryLock(key) {
ctx, cancel := context.WithCancelCause(ctx)
releaseOnce := sync.Once{} releaseOnce := sync.Once{}
return true, ctx, func() context.Context { return true, func() {
releaseOnce.Do(func() { releaseOnce.Do(func() {
cancel(ErrLockReleased)
l.locks.Delete(key) l.locks.Delete(key)
}) })
return originalCtx
}, nil }, nil
} }
return false, ctx, func() context.Context { return originalCtx }, nil return false, func() {}, nil
} }
func (l *memoryLocker) tryLock(key string) bool { func (l *memoryLocker) tryLock(key string) bool {

View File

@ -48,21 +48,21 @@ func NewRedisLocker(connection string) Locker {
return l return l
} }
func (l *redisLocker) Lock(ctx context.Context, key string) (context.Context, ReleaseFunc, error) { func (l *redisLocker) Lock(ctx context.Context, key string) (ReleaseFunc, error) {
return l.lock(ctx, key, 0) return l.lock(ctx, key, 0)
} }
func (l *redisLocker) TryLock(ctx context.Context, key string) (bool, context.Context, ReleaseFunc, error) { func (l *redisLocker) TryLock(ctx context.Context, key string) (bool, ReleaseFunc, error) {
ctx, f, err := l.lock(ctx, key, 1) f, err := l.lock(ctx, key, 1)
var ( var (
errTaken *redsync.ErrTaken errTaken *redsync.ErrTaken
errNodeTaken *redsync.ErrNodeTaken errNodeTaken *redsync.ErrNodeTaken
) )
if errors.As(err, &errTaken) || errors.As(err, &errNodeTaken) { if errors.As(err, &errTaken) || errors.As(err, &errNodeTaken) {
return false, ctx, f, nil return false, f, nil
} }
return err == nil, ctx, f, err return err == nil, f, err
} }
// Close closes the locker. // Close closes the locker.
@ -76,18 +76,11 @@ func (l *redisLocker) Close() error {
return nil return nil
} }
type redisMutex struct { func (l *redisLocker) lock(ctx context.Context, key string, tries int) (ReleaseFunc, error) {
mutex *redsync.Mutex
cancel context.CancelCauseFunc
}
func (l *redisLocker) lock(ctx context.Context, key string, tries int) (context.Context, ReleaseFunc, error) {
if l.closed.Load() { if l.closed.Load() {
return ctx, func() context.Context { return ctx }, fmt.Errorf("locker is closed") return func() {}, fmt.Errorf("locker is closed")
} }
originalCtx := ctx
options := []redsync.Option{ options := []redsync.Option{
redsync.WithExpiry(redisLockExpiry), redsync.WithExpiry(redisLockExpiry),
} }
@ -96,18 +89,13 @@ func (l *redisLocker) lock(ctx context.Context, key string, tries int) (context.
} }
mutex := l.rs.NewMutex(redisLockKeyPrefix+key, options...) mutex := l.rs.NewMutex(redisLockKeyPrefix+key, options...)
if err := mutex.LockContext(ctx); err != nil { if err := mutex.LockContext(ctx); err != nil {
return ctx, func() context.Context { return originalCtx }, err return func() {}, err
} }
ctx, cancel := context.WithCancelCause(ctx) l.mutexM.Store(key, mutex)
l.mutexM.Store(key, &redisMutex{
mutex: mutex,
cancel: cancel,
})
releaseOnce := sync.Once{} releaseOnce := sync.Once{}
return ctx, func() context.Context { return func() {
releaseOnce.Do(func() { releaseOnce.Do(func() {
l.mutexM.Delete(key) l.mutexM.Delete(key)
@ -115,10 +103,7 @@ func (l *redisLocker) lock(ctx context.Context, key string, tries int) (context.
// if it failed to unlock, it will be released automatically after the lock expires. // if it failed to unlock, it will be released automatically after the lock expires.
// Do not call mutex.UnlockContext(ctx) here, or it will fail to release when ctx has timed out. // Do not call mutex.UnlockContext(ctx) here, or it will fail to release when ctx has timed out.
_, _ = mutex.Unlock() _, _ = mutex.Unlock()
cancel(ErrLockReleased)
}) })
return originalCtx
}, nil }, nil
} }
@ -128,16 +113,15 @@ func (l *redisLocker) startExtend() {
return return
} }
toExtend := make([]*redisMutex, 0) toExtend := make([]*redsync.Mutex, 0)
l.mutexM.Range(func(_, value any) bool { l.mutexM.Range(func(_, value any) bool {
m := value.(*redisMutex) m := value.(*redsync.Mutex)
// Extend the lock if it is not expired. // Extend the lock if it is not expired.
// Although the mutex will be removed from the map before it is released, // Although the mutex will be removed from the map before it is released,
// it still can be expired because of a failed extension. // it still can be expired because of a failed extension.
// If it happens, the cancel function should have been called, // If it happens, it does not need to be extended anymore.
// so it does not need to be extended anymore. if time.Now().After(m.Until()) {
if time.Now().After(m.mutex.Until()) {
return true return true
} }
@ -145,9 +129,8 @@ func (l *redisLocker) startExtend() {
return true return true
}) })
for _, v := range toExtend { for _, v := range toExtend {
if ok, err := v.mutex.Extend(); !ok { // If it failed to extend, it will be released automatically after the lock expires.
v.cancel(err) _, _ = v.Extend()
}
} }
time.AfterFunc(redisLockExpiry/2, l.startExtend) time.AfterFunc(redisLockExpiry/2, l.startExtend)

View File

@ -628,6 +628,7 @@ org_still_own_repo=組織はまだ1つ以上のリポジトリを所有してい
org_still_own_packages=組織はまだ1つ以上のパッケージを所有しています。 先にそれらを削除してください。 org_still_own_packages=組織はまだ1つ以上のパッケージを所有しています。 先にそれらを削除してください。
target_branch_not_exist=ターゲットのブランチが存在していません。 target_branch_not_exist=ターゲットのブランチが存在していません。
target_ref_not_exist=ターゲットの ref が存在しません %s
admin_cannot_delete_self=あなたが管理者である場合、自分自身を削除することはできません。最初に管理者権限を削除してください。 admin_cannot_delete_self=あなたが管理者である場合、自分自身を削除することはできません。最初に管理者権限を削除してください。
@ -1273,6 +1274,7 @@ commit_graph.color=カラー
commit.contained_in=このコミットが含まれているのは: commit.contained_in=このコミットが含まれているのは:
commit.contained_in_default_branch=このコミットはデフォルトブランチに含まれています commit.contained_in_default_branch=このコミットはデフォルトブランチに含まれています
commit.load_referencing_branches_and_tags=このコミットを参照しているブランチやタグを取得 commit.load_referencing_branches_and_tags=このコミットを参照しているブランチやタグを取得
commit.load_tags_failed=内部エラーによりタグの読み込みに失敗しました
blame=Blame blame=Blame
download_file=ファイルをダウンロード download_file=ファイルをダウンロード
normal_view=通常表示 normal_view=通常表示
@ -2185,6 +2187,7 @@ settings.transfer_in_progress=現在進行中の移転があります。この
settings.transfer_notices_1=- 個人ユーザーに移転すると、あなたはリポジトリへのアクセス権を失います。 settings.transfer_notices_1=- 個人ユーザーに移転すると、あなたはリポジトリへのアクセス権を失います。
settings.transfer_notices_2=- あなたが所有(または共同で所有)している組織に移転すると、リポジトリへのアクセス権は維持されます。 settings.transfer_notices_2=- あなたが所有(または共同で所有)している組織に移転すると、リポジトリへのアクセス権は維持されます。
settings.transfer_notices_3=- プライベートリポジトリを個人ユーザーに移転した場合は、最低限そのユーザーが読み取り権限を持つよう設定されます (必要に応じて権限が変更されます)。 settings.transfer_notices_3=- プライベートリポジトリを個人ユーザーに移転した場合は、最低限そのユーザーが読み取り権限を持つよう設定されます (必要に応じて権限が変更されます)。
settings.transfer_notices_4=- リポジトリが組織に属しており、それを他の組織や個人に移転した場合、そのリポジトリのイシューと組織のプロジェクトボードとのリンクが失われます。
settings.transfer_owner=新しいオーナー settings.transfer_owner=新しいオーナー
settings.transfer_perform=転送を実行 settings.transfer_perform=転送を実行
settings.transfer_started=このリポジトリは転送のためにマークされており、「%s」からの確認を待っています settings.transfer_started=このリポジトリは転送のためにマークされており、「%s」からの確認を待っています
@ -3699,6 +3702,11 @@ workflow.disable_success=ワークフロー '%s' が無効になりました。
workflow.enable=ワークフローを有効にする workflow.enable=ワークフローを有効にする
workflow.enable_success=ワークフロー '%s' が有効になりました。 workflow.enable_success=ワークフロー '%s' が有効になりました。
workflow.disabled=ワークフローは無効です。 workflow.disabled=ワークフローは無効です。
workflow.run=ワークフローを実行
workflow.not_found=ワークフロー '%s' が見つかりません。
workflow.run_success=ワークフロー '%s' は正常に実行されました。
workflow.from_ref=使用するワークフローの取得元
workflow.has_workflow_dispatch=このワークフローには workflow_dispatch イベントトリガーがあります。
need_approval_desc=フォークプルリクエストのワークフローを実行するには承認が必要です。 need_approval_desc=フォークプルリクエストのワークフローを実行するには承認が必要です。

View File

@ -206,7 +206,7 @@ buttons.list.unordered.tooltip=添加待办清单
buttons.list.ordered.tooltip=添加编号列表 buttons.list.ordered.tooltip=添加编号列表
buttons.list.task.tooltip=添加任务列表 buttons.list.task.tooltip=添加任务列表
buttons.mention.tooltip=提及用户或团队 buttons.mention.tooltip=提及用户或团队
buttons.ref.tooltip=引用一个问题或拉取请求 buttons.ref.tooltip=引用一个问题或合并请求
buttons.switch_to_legacy.tooltip=使用旧版编辑器 buttons.switch_to_legacy.tooltip=使用旧版编辑器
buttons.enable_monospace_font=启用等宽字体 buttons.enable_monospace_font=启用等宽字体
buttons.disable_monospace_font=禁用等宽字体 buttons.disable_monospace_font=禁用等宽字体
@ -1752,8 +1752,9 @@ compare.compare_head=比较
pulls.desc=启用合并请求和代码评审。 pulls.desc=启用合并请求和代码评审。
pulls.new=创建合并请求 pulls.new=创建合并请求
pulls.new.blocked_user=无法创建合并请求,因为您已被仓库所有者屏蔽。 pulls.new.blocked_user=无法创建合并请求,因为您已被仓库所有者屏蔽。
pulls.new.must_collaborator=您必须是仓库的协作者才能创建合并请求。
pulls.edit.already_changed=无法保存对合并请求的更改。其内容似乎已被其他用户更改。 请刷新页面并重新编辑以避免覆盖他们的更改 pulls.edit.already_changed=无法保存对合并请求的更改。其内容似乎已被其他用户更改。 请刷新页面并重新编辑以避免覆盖他们的更改
pulls.view=查看拉取请求 pulls.view=查看合并请求
pulls.compare_changes=创建合并请求 pulls.compare_changes=创建合并请求
pulls.allow_edits_from_maintainers=允许维护者编辑 pulls.allow_edits_from_maintainers=允许维护者编辑
pulls.allow_edits_from_maintainers_desc=对基础分支有写入权限的用户也可以推送到此分支 pulls.allow_edits_from_maintainers_desc=对基础分支有写入权限的用户也可以推送到此分支
@ -1830,8 +1831,8 @@ pulls.wrong_commit_id=提交 id 必须在目标分支 上
pulls.no_merge_desc=由于未启用合并选项,此合并请求无法被合并。 pulls.no_merge_desc=由于未启用合并选项,此合并请求无法被合并。
pulls.no_merge_helper=在仓库设置中启用合并选项或者手工合并请求。 pulls.no_merge_helper=在仓库设置中启用合并选项或者手工合并请求。
pulls.no_merge_wip=这个合并请求无法合并,因为被标记为尚未完成的工作。 pulls.no_merge_wip=这个合并请求无法合并,因为被标记为尚未完成的工作。
pulls.no_merge_not_ready=拉取请求尚未准备好合并,请检查审核状态和状态检查。 pulls.no_merge_not_ready=合并请求尚未准备好合并,请检查审核状态和状态检查。
pulls.no_merge_access=您无权合并此拉取请求。 pulls.no_merge_access=您无权合并此合并请求。
pulls.merge_pull_request=创建合并提交 pulls.merge_pull_request=创建合并提交
pulls.rebase_merge_pull_request=变基后快进 pulls.rebase_merge_pull_request=变基后快进
pulls.rebase_merge_commit_pull_request=变基后创建合并提交 pulls.rebase_merge_commit_pull_request=变基后创建合并提交
@ -1876,6 +1877,7 @@ pulls.cmd_instruction_checkout_title=检出
pulls.cmd_instruction_checkout_desc=从你的仓库中检出一个新的分支并测试变更。 pulls.cmd_instruction_checkout_desc=从你的仓库中检出一个新的分支并测试变更。
pulls.cmd_instruction_merge_title=合并 pulls.cmd_instruction_merge_title=合并
pulls.cmd_instruction_merge_desc=合并变更并更新到 Gitea 上 pulls.cmd_instruction_merge_desc=合并变更并更新到 Gitea 上
pulls.cmd_instruction_merge_warning=警告:此操作不能合并该合并请求,因为“自动检测手动合并”未启用
pulls.clear_merge_message=清除合并信息 pulls.clear_merge_message=清除合并信息
pulls.clear_merge_message_hint=清除合并消息只会删除提交消息内容,并保留生成的 git 附加内容如“Co-Authored-By …”。 pulls.clear_merge_message_hint=清除合并消息只会删除提交消息内容,并保留生成的 git 附加内容如“Co-Authored-By …”。
@ -1888,11 +1890,11 @@ pulls.auto_merge_cancel_schedule=取消自动合并
pulls.auto_merge_not_scheduled=此合并请求没有计划自动合并。 pulls.auto_merge_not_scheduled=此合并请求没有计划自动合并。
pulls.auto_merge_canceled_schedule=此合并请求的自动合并已取消。 pulls.auto_merge_canceled_schedule=此合并请求的自动合并已取消。
pulls.auto_merge_newly_scheduled_comment=`已于 %[1]s 设置此拉取请求在所有检查成功后自动合并` pulls.auto_merge_newly_scheduled_comment=`已于 %[1]s 设置此合并请求在所有检查成功后自动合并`
pulls.auto_merge_canceled_schedule_comment=`已于 %[1]s 取消了自动合并设置 ` pulls.auto_merge_canceled_schedule_comment=`已于 %[1]s 取消了自动合并设置 `
pulls.delete.title=删除此拉取请求? pulls.delete.title=删除此合并请求?
pulls.delete.text=你真的要删除这个拉取请求吗? (这将永久删除所有内容。如果你打算将内容存档,请考虑关闭它) pulls.delete.text=你真的要删除这个合并请求吗? (这将永久删除所有内容。如果你打算将内容存档,请考虑关闭它)
pulls.recently_pushed_new_branches=您已经于%[2]s推送了分支 <strong>%[1]s</strong> pulls.recently_pushed_new_branches=您已经于%[2]s推送了分支 <strong>%[1]s</strong>
@ -2125,7 +2127,7 @@ settings.allow_only_contributors_to_track_time=仅允许成员跟踪时间
settings.pulls_desc=启用合并请求 settings.pulls_desc=启用合并请求
settings.pulls.ignore_whitespace=忽略空白冲突 settings.pulls.ignore_whitespace=忽略空白冲突
settings.pulls.enable_autodetect_manual_merge=启用自动检测手动合并 (注意:在某些特殊情况下可能发生错误判断) settings.pulls.enable_autodetect_manual_merge=启用自动检测手动合并 (注意:在某些特殊情况下可能发生错误判断)
settings.pulls.allow_rebase_update=允许通过变基更新拉取请求分支 settings.pulls.allow_rebase_update=允许通过变基更新合并请求分支
settings.pulls.default_delete_branch_after_merge=默认合并后删除合并请求分支 settings.pulls.default_delete_branch_after_merge=默认合并后删除合并请求分支
settings.pulls.default_allow_edits_from_maintainers=默认开启允许维护者编辑 settings.pulls.default_allow_edits_from_maintainers=默认开启允许维护者编辑
settings.releases_desc=启用发布 settings.releases_desc=启用发布
@ -2375,7 +2377,7 @@ settings.protect_status_check_matched=匹配
settings.protect_invalid_status_check_pattern=无效的状态检查规则:“%s”。 settings.protect_invalid_status_check_pattern=无效的状态检查规则:“%s”。
settings.protect_no_valid_status_check_patterns=没有有效的状态检查规则。 settings.protect_no_valid_status_check_patterns=没有有效的状态检查规则。
settings.protect_required_approvals=所需的批准: settings.protect_required_approvals=所需的批准:
settings.protect_required_approvals_desc=只允许合并有足够审核人数的拉取请求。 settings.protect_required_approvals_desc=只允许合并有足够审核人数的合并请求。
settings.dismiss_stale_approvals=取消过时的批准 settings.dismiss_stale_approvals=取消过时的批准
settings.dismiss_stale_approvals_desc=当新的提交更改合并请求内容被推送到分支时,旧的批准将被撤销。 settings.dismiss_stale_approvals_desc=当新的提交更改合并请求内容被推送到分支时,旧的批准将被撤销。
settings.ignore_stale_approvals=忽略过期批准 settings.ignore_stale_approvals=忽略过期批准
@ -2400,7 +2402,7 @@ settings.block_rejected_reviews=拒绝审核阻止了此合并
settings.block_rejected_reviews_desc=如果官方审查人员要求作出改动,即使有足够的批准,合并也不允许。 settings.block_rejected_reviews_desc=如果官方审查人员要求作出改动,即使有足够的批准,合并也不允许。
settings.block_on_official_review_requests=有官方审核阻止了代码合并 settings.block_on_official_review_requests=有官方审核阻止了代码合并
settings.block_on_official_review_requests_desc=处于评审状态时,即使有足够的批准,也不能合并。 settings.block_on_official_review_requests_desc=处于评审状态时,即使有足够的批准,也不能合并。
settings.block_outdated_branch=如果拉取请求已经过时,阻止合并 settings.block_outdated_branch=如果合并请求已经过时,阻止合并
settings.block_outdated_branch_desc=当头部分支落后基础分支时,不能合并。 settings.block_outdated_branch_desc=当头部分支落后基础分支时,不能合并。
settings.default_branch_desc=请选择一个默认的分支用于合并请求和提交: settings.default_branch_desc=请选择一个默认的分支用于合并请求和提交:
settings.merge_style_desc=合并方式 settings.merge_style_desc=合并方式

4130
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -17,12 +17,12 @@
"add-asset-webpack-plugin": "3.0.0", "add-asset-webpack-plugin": "3.0.0",
"ansi_up": "6.0.2", "ansi_up": "6.0.2",
"asciinema-player": "3.8.0", "asciinema-player": "3.8.0",
"chart.js": "4.4.3", "chart.js": "4.4.4",
"chartjs-adapter-dayjs-4": "1.0.4", "chartjs-adapter-dayjs-4": "1.0.4",
"chartjs-plugin-zoom": "2.0.1", "chartjs-plugin-zoom": "2.0.1",
"clippie": "4.1.3", "clippie": "4.1.3",
"css-loader": "7.1.2", "css-loader": "7.1.2",
"dayjs": "1.11.12", "dayjs": "1.11.13",
"dropzone": "6.0.0-beta.2", "dropzone": "6.0.0-beta.2",
"easymde": "2.18.0", "easymde": "2.18.0",
"esbuild-loader": "4.2.2", "esbuild-loader": "4.2.2",
@ -34,17 +34,17 @@
"katex": "0.16.11", "katex": "0.16.11",
"license-checker-webpack-plugin": "0.2.1", "license-checker-webpack-plugin": "0.2.1",
"mermaid": "11.0.2", "mermaid": "11.0.2",
"mini-css-extract-plugin": "2.9.0", "mini-css-extract-plugin": "2.9.1",
"minimatch": "10.0.1", "minimatch": "10.0.1",
"monaco-editor": "0.50.0", "monaco-editor": "0.51.0",
"monaco-editor-webpack-plugin": "7.1.0", "monaco-editor-webpack-plugin": "7.1.0",
"pdfobject": "2.3.0", "pdfobject": "2.3.0",
"postcss": "8.4.40", "postcss": "8.4.41",
"postcss-loader": "8.1.1", "postcss-loader": "8.1.1",
"postcss-nesting": "12.1.5", "postcss-nesting": "13.0.0",
"sortablejs": "1.15.2", "sortablejs": "1.15.2",
"swagger-ui-dist": "5.17.14", "swagger-ui-dist": "5.17.14",
"tailwindcss": "3.4.7", "tailwindcss": "3.4.10",
"temporal-polyfill": "0.2.5", "temporal-polyfill": "0.2.5",
"throttle-debounce": "5.0.2", "throttle-debounce": "5.0.2",
"tinycolor2": "1.6.0", "tinycolor2": "1.6.0",
@ -54,20 +54,20 @@
"typescript": "5.5.4", "typescript": "5.5.4",
"uint8-to-base64": "0.2.0", "uint8-to-base64": "0.2.0",
"vanilla-colorful": "0.7.2", "vanilla-colorful": "0.7.2",
"vue": "3.4.35", "vue": "3.4.38",
"vue-bar-graph": "2.1.0", "vue-bar-graph": "2.1.0",
"vue-chartjs": "5.3.1", "vue-chartjs": "5.3.1",
"vue-loader": "17.4.2", "vue-loader": "17.4.2",
"webpack": "5.93.0", "webpack": "5.94.0",
"webpack-cli": "5.1.4", "webpack-cli": "5.1.4",
"wrap-ansi": "9.0.0" "wrap-ansi": "9.0.0"
}, },
"devDependencies": { "devDependencies": {
"@eslint-community/eslint-plugin-eslint-comments": "4.3.0", "@eslint-community/eslint-plugin-eslint-comments": "4.4.0",
"@playwright/test": "1.45.3", "@playwright/test": "1.46.1",
"@stoplight/spectral-cli": "6.11.1", "@stoplight/spectral-cli": "6.11.1",
"@stylistic/eslint-plugin-js": "2.6.1", "@stylistic/eslint-plugin-js": "2.6.5",
"@stylistic/stylelint-plugin": "3.0.0", "@stylistic/stylelint-plugin": "3.0.1",
"@types/dropzone": "5.7.8", "@types/dropzone": "5.7.8",
"@types/jquery": "3.5.30", "@types/jquery": "3.5.30",
"@types/katex": "0.16.7", "@types/katex": "0.16.7",
@ -78,11 +78,11 @@
"@types/throttle-debounce": "5.0.2", "@types/throttle-debounce": "5.0.2",
"@types/tinycolor2": "1.4.6", "@types/tinycolor2": "1.4.6",
"@types/toastify-js": "1.12.3", "@types/toastify-js": "1.12.3",
"@typescript-eslint/eslint-plugin": "8.0.0", "@typescript-eslint/eslint-plugin": "8.3.0",
"@typescript-eslint/parser": "8.0.0", "@typescript-eslint/parser": "8.3.0",
"@vitejs/plugin-vue": "5.1.2", "@vitejs/plugin-vue": "5.1.2",
"eslint": "8.57.0", "eslint": "8.57.0",
"eslint-import-resolver-typescript": "3.6.1", "eslint-import-resolver-typescript": "3.6.3",
"eslint-plugin-array-func": "4.0.0", "eslint-plugin-array-func": "4.0.0",
"eslint-plugin-deprecation": "3.0.0", "eslint-plugin-deprecation": "3.0.0",
"eslint-plugin-github": "5.0.1", "eslint-plugin-github": "5.0.1",
@ -91,14 +91,14 @@
"eslint-plugin-no-use-extend-native": "0.5.0", "eslint-plugin-no-use-extend-native": "0.5.0",
"eslint-plugin-playwright": "1.6.2", "eslint-plugin-playwright": "1.6.2",
"eslint-plugin-regexp": "2.6.0", "eslint-plugin-regexp": "2.6.0",
"eslint-plugin-sonarjs": "1.0.4", "eslint-plugin-sonarjs": "2.0.1",
"eslint-plugin-unicorn": "55.0.0", "eslint-plugin-unicorn": "55.0.0",
"eslint-plugin-vitest": "0.4.1", "eslint-plugin-vitest": "0.4.1",
"eslint-plugin-vitest-globals": "1.5.0", "eslint-plugin-vitest-globals": "1.5.0",
"eslint-plugin-vue": "9.27.0", "eslint-plugin-vue": "9.27.0",
"eslint-plugin-vue-scoped-css": "2.8.1", "eslint-plugin-vue-scoped-css": "2.8.1",
"eslint-plugin-wc": "2.1.0", "eslint-plugin-wc": "2.1.1",
"happy-dom": "14.12.3", "happy-dom": "15.3.1",
"markdownlint-cli": "0.41.0", "markdownlint-cli": "0.41.0",
"nolyfill": "1.0.39", "nolyfill": "1.0.39",
"postcss-html": "1.7.0", "postcss-html": "1.7.0",
@ -107,8 +107,8 @@
"stylelint-declaration-strict-value": "1.10.6", "stylelint-declaration-strict-value": "1.10.6",
"stylelint-value-no-unknown-custom-properties": "6.0.1", "stylelint-value-no-unknown-custom-properties": "6.0.1",
"svgo": "3.3.2", "svgo": "3.3.2",
"type-fest": "4.23.0", "type-fest": "4.26.0",
"updates": "16.3.7", "updates": "16.4.0",
"vite-string-plugin": "1.3.4", "vite-string-plugin": "1.3.4",
"vitest": "2.0.5" "vitest": "2.0.5"
}, },
@ -131,6 +131,10 @@
"object.values": "npm:@nolyfill/object.values@^1", "object.values": "npm:@nolyfill/object.values@^1",
"safe-regex-test": "npm:@nolyfill/safe-regex-test@^1", "safe-regex-test": "npm:@nolyfill/safe-regex-test@^1",
"string.prototype.includes": "npm:@nolyfill/string.prototype.includes@^1", "string.prototype.includes": "npm:@nolyfill/string.prototype.includes@^1",
"is-core-module": "npm:@nolyfill/is-core-module@^1" "is-core-module": "npm:@nolyfill/is-core-module@^1",
"array.prototype.findlast": "npm:@nolyfill/array.prototype.findlast@^1",
"array.prototype.tosorted": "npm:@nolyfill/array.prototype.tosorted@^1",
"string.prototype.matchall": "npm:@nolyfill/string.prototype.matchall@^1",
"string.prototype.repeat": "npm:@nolyfill/string.prototype.repeat@^1"
} }
} }

10
poetry.lock generated
View File

@ -42,13 +42,13 @@ six = ">=1.13.0"
[[package]] [[package]]
name = "djlint" name = "djlint"
version = "1.34.1" version = "1.34.2"
description = "HTML Template Linter and Formatter" description = "HTML Template Linter and Formatter"
optional = false optional = false
python-versions = ">=3.8.0,<4.0.0" python-versions = "<4.0.0,>=3.8.0"
files = [ files = [
{file = "djlint-1.34.1-py3-none-any.whl", hash = "sha256:96ff1c464fb6f061130ebc88663a2ea524d7ec51f4b56221a2b3f0320a3cfce8"}, {file = "djlint-1.34.2-py3-none-any.whl", hash = "sha256:4825389e395eb77371857c77f547fa5ebd1a644b1bc4fe9fed19d49a2786b9e5"},
{file = "djlint-1.34.1.tar.gz", hash = "sha256:db93fa008d19eaadb0454edf1704931d14469d48508daba2df9941111f408346"}, {file = "djlint-1.34.2.tar.gz", hash = "sha256:db9b2e59203a452b83532499bc243c749279090b905cc1f657973f78e7a31ddd"},
] ]
[package.dependencies] [package.dependencies]
@ -357,4 +357,4 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "cd2ff218e9f27a464dfbc8ec2387824a90f4360e04c3f2e58cc375796b7df33a" content-hash = "af89bce0c442463621b6e536f9b94c31e188e1662c2caa84372c0858a2ee7d5c"

View File

@ -5,7 +5,7 @@ package-mode = false
python = "^3.10" python = "^3.10"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
djlint = "1.34.1" djlint = "1.34.2"
yamllint = "1.35.1" yamllint = "1.35.1"
[tool.djlint] [tool.djlint]

View File

@ -13,7 +13,6 @@ import (
"code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/organization"
repo_model "code.gitea.io/gitea/models/repo" repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/markup/markdown"
@ -42,6 +41,14 @@ func Home(ctx *context.Context) {
return return
} }
home(ctx, false)
}
func Repositories(ctx *context.Context) {
home(ctx, true)
}
func home(ctx *context.Context, viewRepositories bool) {
org := ctx.Org.Organization org := ctx.Org.Organization
ctx.Data["PageIsUserProfile"] = true ctx.Data["PageIsUserProfile"] = true
@ -101,10 +108,34 @@ func Home(ctx *context.Context) {
private := ctx.FormOptionalBool("private") private := ctx.FormOptionalBool("private")
ctx.Data["IsPrivate"] = private ctx.Data["IsPrivate"] = private
err := shared_user.LoadHeaderCount(ctx)
if err != nil {
ctx.ServerError("LoadHeaderCount", err)
return
}
opts := &organization.FindOrgMembersOpts{
OrgID: org.ID,
PublicOnly: ctx.Org.PublicMemberOnly,
ListOptions: db.ListOptions{Page: 1, PageSize: 25},
}
members, _, err := organization.FindOrgMembers(ctx, opts)
if err != nil {
ctx.ServerError("FindOrgMembers", err)
return
}
ctx.Data["Members"] = members
ctx.Data["Teams"] = ctx.Org.Teams
ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
ctx.Data["ShowMemberAndTeamTab"] = ctx.Org.IsMember || len(members) > 0
if !prepareOrgProfileReadme(ctx, viewRepositories) {
ctx.Data["PageIsViewRepositories"] = true
}
var ( var (
repos []*repo_model.Repository repos []*repo_model.Repository
count int64 count int64
err error
) )
repos, count, err = repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{ repos, count, err = repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
ListOptions: db.ListOptions{ ListOptions: db.ListOptions{
@ -129,29 +160,8 @@ func Home(ctx *context.Context) {
return return
} }
opts := &organization.FindOrgMembersOpts{
OrgID: org.ID,
PublicOnly: ctx.Org.PublicMemberOnly,
ListOptions: db.ListOptions{Page: 1, PageSize: 25},
}
members, _, err := organization.FindOrgMembers(ctx, opts)
if err != nil {
ctx.ServerError("FindOrgMembers", err)
return
}
ctx.Data["Repos"] = repos ctx.Data["Repos"] = repos
ctx.Data["Total"] = count ctx.Data["Total"] = count
ctx.Data["Members"] = members
ctx.Data["Teams"] = ctx.Org.Teams
ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
ctx.Data["PageIsViewRepositories"] = true
err = shared_user.LoadHeaderCount(ctx)
if err != nil {
ctx.ServerError("LoadHeaderCount", err)
return
}
pager := context.NewPagination(int(count), setting.UI.User.RepoPagingNum, page, 5) pager := context.NewPagination(int(count), setting.UI.User.RepoPagingNum, page, 5)
pager.SetDefaultParams(ctx) pager.SetDefaultParams(ctx)
@ -173,18 +183,16 @@ func Home(ctx *context.Context) {
} }
ctx.Data["Page"] = pager ctx.Data["Page"] = pager
ctx.Data["ShowMemberAndTeamTab"] = ctx.Org.IsMember || len(members) > 0
profileDbRepo, profileGitRepo, profileReadmeBlob, profileClose := shared_user.FindUserProfileReadme(ctx, ctx.Doer)
defer profileClose()
prepareOrgProfileReadme(ctx, profileGitRepo, profileDbRepo, profileReadmeBlob)
ctx.HTML(http.StatusOK, tplOrgHome) ctx.HTML(http.StatusOK, tplOrgHome)
} }
func prepareOrgProfileReadme(ctx *context.Context, profileGitRepo *git.Repository, profileDbRepo *repo_model.Repository, profileReadme *git.Blob) { func prepareOrgProfileReadme(ctx *context.Context, viewRepositories bool) bool {
if profileGitRepo == nil || profileReadme == nil { profileDbRepo, profileGitRepo, profileReadme, profileClose := shared_user.FindUserProfileReadme(ctx, ctx.Doer)
return defer profileClose()
ctx.Data["HasProfileReadme"] = profileReadme != nil
if profileGitRepo == nil || profileReadme == nil || viewRepositories {
return false
} }
if bytes, err := profileReadme.GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil { if bytes, err := profileReadme.GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil {
@ -206,4 +214,7 @@ func prepareOrgProfileReadme(ctx *context.Context, profileGitRepo *git.Repositor
ctx.Data["ProfileReadme"] = profileContent ctx.Data["ProfileReadme"] = profileContent
} }
} }
ctx.Data["PageIsViewOverview"] = true
return true
} }

View File

@ -54,9 +54,9 @@ func Members(ctx *context.Context) {
return return
} }
err = shared_user.LoadHeaderCount(ctx) err = shared_user.RenderOrgHeader(ctx)
if err != nil { if err != nil {
ctx.ServerError("LoadHeaderCount", err) ctx.ServerError("RenderOrgHeader", err)
return return
} }

View File

@ -59,9 +59,9 @@ func Teams(ctx *context.Context) {
} }
ctx.Data["Teams"] = ctx.Org.Teams ctx.Data["Teams"] = ctx.Org.Teams
err := shared_user.LoadHeaderCount(ctx) err := shared_user.RenderOrgHeader(ctx)
if err != nil { if err != nil {
ctx.ServerError("LoadHeaderCount", err) ctx.ServerError("RenderOrgHeader", err)
return return
} }

View File

@ -162,3 +162,15 @@ func LoadHeaderCount(ctx *context.Context) error {
return nil return nil
} }
func RenderOrgHeader(ctx *context.Context) error {
if err := LoadHeaderCount(ctx); err != nil {
return err
}
_, _, profileReadmeBlob, profileClose := FindUserProfileReadme(ctx, ctx.Doer)
defer profileClose()
ctx.Data["HasProfileReadme"] = profileReadmeBlob != nil
return nil
}

View File

@ -883,10 +883,15 @@ func registerRoutes(m *web.Router) {
m.Post("/teams/{team}/action/repo/{action}", org.TeamsRepoAction) m.Post("/teams/{team}/action/repo/{action}", org.TeamsRepoAction)
}, context.OrgAssignment(true, false, true)) }, context.OrgAssignment(true, false, true))
// require admin permission
m.Group("/{org}", func() {
m.Get("/teams/-/search", org.SearchTeam)
}, context.OrgAssignment(true, false, false, true))
// require owner permission
m.Group("/{org}", func() { m.Group("/{org}", func() {
m.Get("/teams/new", org.NewTeam) m.Get("/teams/new", org.NewTeam)
m.Post("/teams/new", web.Bind(forms.CreateTeamForm{}), org.NewTeamPost) m.Post("/teams/new", web.Bind(forms.CreateTeamForm{}), org.NewTeamPost)
m.Get("/teams/-/search", org.SearchTeam)
m.Get("/teams/{team}/edit", org.EditTeam) m.Get("/teams/{team}/edit", org.EditTeam)
m.Post("/teams/{team}/edit", web.Bind(forms.CreateTeamForm{}), org.EditTeamPost) m.Post("/teams/{team}/edit", web.Bind(forms.CreateTeamForm{}), org.EditTeamPost)
m.Post("/teams/{team}/delete", org.DeleteTeam) m.Post("/teams/{team}/delete", org.DeleteTeam)
@ -995,6 +1000,8 @@ func registerRoutes(m *web.Router) {
}, context.PackageAssignment(), reqPackageAccess(perm.AccessModeRead)) }, context.PackageAssignment(), reqPackageAccess(perm.AccessModeRead))
} }
m.Get("/repositories", org.Repositories)
m.Group("/projects", func() { m.Group("/projects", func() {
m.Group("", func() { m.Group("", func() {
m.Get("", org.Projects) m.Get("", org.Projects)

View File

@ -1,7 +1,12 @@
<div class="ui container"> <div class="ui container">
<overflow-menu class="ui secondary pointing tabular borderless menu tw-mb-4"> <overflow-menu class="ui secondary pointing tabular borderless menu tw-mb-4">
<div class="overflow-menu-items"> <div class="overflow-menu-items">
<a class="{{if .PageIsViewRepositories}}active {{end}}item" href="{{$.Org.HomeLink}}"> {{if .HasProfileReadme}}
<a class="{{if .PageIsViewOverview}}active {{end}}item" href="{{$.Org.HomeLink}}">
{{svg "octicon-info"}} {{ctx.Locale.Tr "user.overview"}}
</a>
{{end}}
<a class="{{if .PageIsViewRepositories}}active {{end}}item" href="{{$.Org.HomeLink}}{{if .HasProfileReadme}}/-/repositories{{end}}">
{{svg "octicon-repo"}} {{ctx.Locale.Tr "user.repositories"}} {{svg "octicon-repo"}} {{ctx.Locale.Tr "user.repositories"}}
{{if .RepoCount}} {{if .RepoCount}}
<div class="ui small label">{{.RepoCount}}</div> <div class="ui small label">{{.RepoCount}}</div>

View File

@ -29,7 +29,7 @@
"sourceMap": true, "sourceMap": true,
"types": [ "types": [
"vitest/globals", "vitest/globals",
"./types.d.ts", "./web_src/js/globals.d.ts",
], ],
} }
} }

31
types.d.ts vendored
View File

@ -1,31 +0,0 @@
declare module '*.svg' {
const value: string;
export default value;
}
declare module '*.css' {
const value: string;
export default value;
}
declare let __webpack_public_path__: string;
interface Window {
config: import('./web_src/js/types.ts').Config;
$: typeof import('@types/jquery'),
jQuery: typeof import('@types/jquery'),
htmx: typeof import('htmx.org'),
_globalHandlerErrors: Array<ErrorEvent & PromiseRejectionEvent> & {
_inited: boolean,
push: (e: ErrorEvent & PromiseRejectionEvent) => void | number,
},
}
declare module 'htmx.org/dist/htmx.esm.js' {
const value = await import('htmx.org');
export default value;
}
interface Element {
_tippy: import('tippy.js').Instance;
}

61
web_src/js/globals.d.ts vendored Normal file
View File

@ -0,0 +1,61 @@
declare module '*.svg' {
const value: string;
export default value;
}
declare module '*.css' {
const value: string;
export default value;
}
declare let __webpack_public_path__: string;
declare module 'htmx.org/dist/htmx.esm.js' {
const value = await import('htmx.org');
export default value;
}
declare module 'uint8-to-base64' {
export function encode(arrayBuffer: ArrayBuffer): string;
export function decode(base64str: string): ArrayBuffer;
}
declare module 'swagger-ui-dist/swagger-ui-es-bundle.js' {
const value = await import('swagger-ui-dist');
export default value.SwaggerUIBundle;
}
interface JQuery {
api: any, // fomantic
areYouSure: any, // jquery.are-you-sure
dimmer: any, // fomantic
dropdown: any; // fomantic
modal: any; // fomantic
tab: any; // fomantic
transition: any, // fomantic
}
interface JQueryStatic {
api: any, // fomantic
}
interface Element {
_tippy: import('tippy.js').Instance;
}
type Writable<T> = { -readonly [K in keyof T]: T[K] };
interface Window {
config: import('./web_src/js/types.ts').Config;
$: typeof import('@types/jquery'),
jQuery: typeof import('@types/jquery'),
htmx: Omit<typeof import('htmx.org/dist/htmx.esm.js').default, 'config'> & {
config?: Writable<typeof import('htmx.org').default.config>,
},
ui?: any,
_globalHandlerErrors: Array<ErrorEvent & PromiseRejectionEvent> & {
_inited: boolean,
push: (e: ErrorEvent & PromiseRejectionEvent) => void | number,
},
__webpack_public_path__: string;
}

View File

@ -1,20 +1,21 @@
import {showErrorToast} from './modules/toast.ts'; import {showErrorToast} from './modules/toast.ts';
import 'idiomorph/dist/idiomorph-ext.js'; // https://github.com/bigskysoftware/idiomorph#htmx
import type {HtmxResponseInfo} from 'htmx.org';
// https://github.com/bigskysoftware/idiomorph#htmx type HtmxEvent = Event & {detail: HtmxResponseInfo};
import 'idiomorph/dist/idiomorph-ext.js';
// https://htmx.org/reference/#config // https://htmx.org/reference/#config
window.htmx.config.requestClass = 'is-loading'; window.htmx.config.requestClass = 'is-loading';
window.htmx.config.scrollIntoViewOnBoost = false; window.htmx.config.scrollIntoViewOnBoost = false;
// https://htmx.org/events/#htmx:sendError // https://htmx.org/events/#htmx:sendError
document.body.addEventListener('htmx:sendError', (event) => { document.body.addEventListener('htmx:sendError', (event: HtmxEvent) => {
// TODO: add translations // TODO: add translations
showErrorToast(`Network error when calling ${event.detail.requestConfig.path}`); showErrorToast(`Network error when calling ${event.detail.requestConfig.path}`);
}); });
// https://htmx.org/events/#htmx:responseError // https://htmx.org/events/#htmx:responseError
document.body.addEventListener('htmx:responseError', (event) => { document.body.addEventListener('htmx:responseError', (event: HtmxEvent) => {
// TODO: add translations // TODO: add translations
showErrorToast(`Error ${event.detail.xhr.status} when calling ${event.detail.requestConfig.path}`); showErrorToast(`Error ${event.detail.xhr.status} when calling ${event.detail.requestConfig.path}`);
}); });

View File

@ -98,12 +98,12 @@ initGiteaFomantic();
initDirAuto(); initDirAuto();
initSubmitEventPolyfill(); initSubmitEventPolyfill();
function callInitFunctions(functions) { function callInitFunctions(functions: (() => any)[]) {
// Start performance trace by accessing a URL by "https://localhost/?_ui_performance_trace=1" or "https://localhost/?key=value&_ui_performance_trace=1" // Start performance trace by accessing a URL by "https://localhost/?_ui_performance_trace=1" or "https://localhost/?key=value&_ui_performance_trace=1"
// It is a quick check, no side effect so no need to do slow URL parsing. // It is a quick check, no side effect so no need to do slow URL parsing.
const initStart = performance.now(); const initStart = performance.now();
if (window.location.search.includes('_ui_performance_trace=1')) { if (window.location.search.includes('_ui_performance_trace=1')) {
let results = []; let results: {name: string, dur: number}[] = [];
for (const func of functions) { for (const func of functions) {
const start = performance.now(); const start = performance.now();
func(); func();

View File

@ -1,12 +1,12 @@
import {AnsiUp} from 'ansi_up'; import {AnsiUp} from 'ansi_up';
const replacements = [ const replacements: Array<[RegExp, string]> = [
[/\x1b\[\d+[A-H]/g, ''], // Move cursor, treat them as no-op [/\x1b\[\d+[A-H]/g, ''], // Move cursor, treat them as no-op
[/\x1b\[\d?[JK]/g, '\r'], // Erase display/line, treat them as a Carriage Return [/\x1b\[\d?[JK]/g, '\r'], // Erase display/line, treat them as a Carriage Return
]; ];
// render ANSI to HTML // render ANSI to HTML
export function renderAnsi(line) { export function renderAnsi(line: string): string {
// create a fresh ansi_up instance because otherwise previous renders can influence // create a fresh ansi_up instance because otherwise previous renders can influence
// the output of future renders, because ansi_up is stateful and remembers things like // the output of future renders, because ansi_up is stateful and remembers things like
// unclosed opening tags for colors. // unclosed opening tags for colors.

View File

@ -8,7 +8,7 @@ window.addEventListener('load', async () => {
// Make the page's protocol be at the top of the schemes list // Make the page's protocol be at the top of the schemes list
const proto = window.location.protocol.slice(0, -1); const proto = window.location.protocol.slice(0, -1);
spec.schemes.sort((a, b) => { spec.schemes.sort((a: string, b: string) => {
if (a === proto) return -1; if (a === proto) return -1;
if (b === proto) return 1; if (b === proto) return 1;
return 0; return 0;

View File

@ -17,7 +17,7 @@ test('svgParseOuterInner', () => {
test('SvgIcon', () => { test('SvgIcon', () => {
const root = document.createElement('div'); const root = document.createElement('div');
createApp({render: () => h(SvgIcon, {name: 'octicon-link', size: 24, class: 'base', className: 'extra'})}).mount(root); createApp({render: () => h(SvgIcon, {name: 'octicon-link', size: 24, class: 'base', className: 'extra'})}).mount(root);
const node = root.firstChild; const node = root.firstChild as Element;
expect(node.nodeName).toEqual('svg'); expect(node.nodeName).toEqual('svg');
expect(node.getAttribute('width')).toEqual('24'); expect(node.getAttribute('width')).toEqual('24');
expect(node.getAttribute('height')).toEqual('24'); expect(node.getAttribute('height')).toEqual('24');

View File

@ -29,3 +29,10 @@ export type RequestData = string | FormData | URLSearchParams;
export type RequestOpts = { export type RequestOpts = {
data?: RequestData, data?: RequestData,
} & RequestInit; } & RequestInit;
export type IssueData = {
owner: string,
repo: string,
type: string,
index: string,
}

View File

@ -95,23 +95,20 @@ test('toAbsoluteUrl', () => {
}); });
test('encodeURLEncodedBase64, decodeURLEncodedBase64', () => { test('encodeURLEncodedBase64, decodeURLEncodedBase64', () => {
// TextEncoder is Node.js API while Uint8Array is jsdom API and their outputs are not
// structurally comparable, so we convert to array to compare. The conversion can be
// removed once https://github.com/jsdom/jsdom/issues/2524 is resolved.
const encoder = new TextEncoder(); const encoder = new TextEncoder();
const uint8array = encoder.encode.bind(encoder); const uint8array = encoder.encode.bind(encoder);
expect(encodeURLEncodedBase64(uint8array('AA?'))).toEqual('QUE_'); // standard base64: "QUE/" expect(encodeURLEncodedBase64(uint8array('AA?'))).toEqual('QUE_'); // standard base64: "QUE/"
expect(encodeURLEncodedBase64(uint8array('AA~'))).toEqual('QUF-'); // standard base64: "QUF+" expect(encodeURLEncodedBase64(uint8array('AA~'))).toEqual('QUF-'); // standard base64: "QUF+"
expect(Array.from(decodeURLEncodedBase64('QUE/'))).toEqual(Array.from(uint8array('AA?'))); expect(new Uint8Array(decodeURLEncodedBase64('QUE/'))).toEqual(uint8array('AA?'));
expect(Array.from(decodeURLEncodedBase64('QUF+'))).toEqual(Array.from(uint8array('AA~'))); expect(new Uint8Array(decodeURLEncodedBase64('QUF+'))).toEqual(uint8array('AA~'));
expect(Array.from(decodeURLEncodedBase64('QUE_'))).toEqual(Array.from(uint8array('AA?'))); expect(new Uint8Array(decodeURLEncodedBase64('QUE_'))).toEqual(uint8array('AA?'));
expect(Array.from(decodeURLEncodedBase64('QUF-'))).toEqual(Array.from(uint8array('AA~'))); expect(new Uint8Array(decodeURLEncodedBase64('QUF-'))).toEqual(uint8array('AA~'));
expect(encodeURLEncodedBase64(uint8array('a'))).toEqual('YQ'); // standard base64: "YQ==" expect(encodeURLEncodedBase64(uint8array('a'))).toEqual('YQ'); // standard base64: "YQ=="
expect(Array.from(decodeURLEncodedBase64('YQ'))).toEqual(Array.from(uint8array('a'))); expect(new Uint8Array(decodeURLEncodedBase64('YQ'))).toEqual(uint8array('a'));
expect(Array.from(decodeURLEncodedBase64('YQ=='))).toEqual(Array.from(uint8array('a'))); expect(new Uint8Array(decodeURLEncodedBase64('YQ=='))).toEqual(uint8array('a'));
}); });
test('file detection', () => { test('file detection', () => {

View File

@ -1,13 +1,14 @@
import {encode, decode} from 'uint8-to-base64'; import {encode, decode} from 'uint8-to-base64';
import type {IssueData} from './types.ts';
// transform /path/to/file.ext to file.ext // transform /path/to/file.ext to file.ext
export function basename(path) { export function basename(path: string): string {
const lastSlashIndex = path.lastIndexOf('/'); const lastSlashIndex = path.lastIndexOf('/');
return lastSlashIndex < 0 ? path : path.substring(lastSlashIndex + 1); return lastSlashIndex < 0 ? path : path.substring(lastSlashIndex + 1);
} }
// transform /path/to/file.ext to .ext // transform /path/to/file.ext to .ext
export function extname(path) { export function extname(path: string): string {
const lastSlashIndex = path.lastIndexOf('/'); const lastSlashIndex = path.lastIndexOf('/');
const lastPointIndex = path.lastIndexOf('.'); const lastPointIndex = path.lastIndexOf('.');
if (lastSlashIndex > lastPointIndex) return ''; if (lastSlashIndex > lastPointIndex) return '';
@ -15,54 +16,54 @@ export function extname(path) {
} }
// test whether a variable is an object // test whether a variable is an object
export function isObject(obj) { export function isObject(obj: any): boolean {
return Object.prototype.toString.call(obj) === '[object Object]'; return Object.prototype.toString.call(obj) === '[object Object]';
} }
// returns whether a dark theme is enabled // returns whether a dark theme is enabled
export function isDarkTheme() { export function isDarkTheme(): boolean {
const style = window.getComputedStyle(document.documentElement); const style = window.getComputedStyle(document.documentElement);
return style.getPropertyValue('--is-dark-theme').trim().toLowerCase() === 'true'; return style.getPropertyValue('--is-dark-theme').trim().toLowerCase() === 'true';
} }
// strip <tags> from a string // strip <tags> from a string
export function stripTags(text) { export function stripTags(text: string): string {
return text.replace(/<[^>]*>?/g, ''); return text.replace(/<[^>]*>?/g, '');
} }
export function parseIssueHref(href) { export function parseIssueHref(href: string): IssueData {
const path = (href || '').replace(/[#?].*$/, ''); const path = (href || '').replace(/[#?].*$/, '');
const [_, owner, repo, type, index] = /([^/]+)\/([^/]+)\/(issues|pulls)\/([0-9]+)/.exec(path) || []; const [_, owner, repo, type, index] = /([^/]+)\/([^/]+)\/(issues|pulls)\/([0-9]+)/.exec(path) || [];
return {owner, repo, type, index}; return {owner, repo, type, index};
} }
// parse a URL, either relative '/path' or absolute 'https://localhost/path' // parse a URL, either relative '/path' or absolute 'https://localhost/path'
export function parseUrl(str) { export function parseUrl(str: string): URL {
return new URL(str, str.startsWith('http') ? undefined : window.location.origin); return new URL(str, str.startsWith('http') ? undefined : window.location.origin);
} }
// return current locale chosen by user // return current locale chosen by user
export function getCurrentLocale() { export function getCurrentLocale(): string {
return document.documentElement.lang; return document.documentElement.lang;
} }
// given a month (0-11), returns it in the documents language // given a month (0-11), returns it in the documents language
export function translateMonth(month) { export function translateMonth(month: number) {
return new Date(Date.UTC(2022, month, 12)).toLocaleString(getCurrentLocale(), {month: 'short', timeZone: 'UTC'}); return new Date(Date.UTC(2022, month, 12)).toLocaleString(getCurrentLocale(), {month: 'short', timeZone: 'UTC'});
} }
// given a weekday (0-6, Sunday to Saturday), returns it in the documents language // given a weekday (0-6, Sunday to Saturday), returns it in the documents language
export function translateDay(day) { export function translateDay(day: number) {
return new Date(Date.UTC(2022, 7, day)).toLocaleString(getCurrentLocale(), {weekday: 'short', timeZone: 'UTC'}); return new Date(Date.UTC(2022, 7, day)).toLocaleString(getCurrentLocale(), {weekday: 'short', timeZone: 'UTC'});
} }
// convert a Blob to a DataURI // convert a Blob to a DataURI
export function blobToDataURI(blob) { export function blobToDataURI(blob: Blob): Promise<string> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
try { try {
const reader = new FileReader(); const reader = new FileReader();
reader.addEventListener('load', (e) => { reader.addEventListener('load', (e) => {
resolve(e.target.result); resolve(e.target.result as string);
}); });
reader.addEventListener('error', () => { reader.addEventListener('error', () => {
reject(new Error('FileReader failed')); reject(new Error('FileReader failed'));
@ -75,7 +76,7 @@ export function blobToDataURI(blob) {
} }
// convert image Blob to another mime-type format. // convert image Blob to another mime-type format.
export function convertImage(blob, mime) { export function convertImage(blob: Blob, mime: string): Promise<Blob> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
const img = new Image(); const img = new Image();
@ -104,7 +105,7 @@ export function convertImage(blob, mime) {
}); });
} }
export function toAbsoluteUrl(url) { export function toAbsoluteUrl(url: string): string {
if (url.startsWith('http://') || url.startsWith('https://')) { if (url.startsWith('http://') || url.startsWith('https://')) {
return url; return url;
} }
@ -118,15 +119,15 @@ export function toAbsoluteUrl(url) {
} }
// Encode an ArrayBuffer into a URLEncoded base64 string. // Encode an ArrayBuffer into a URLEncoded base64 string.
export function encodeURLEncodedBase64(arrayBuffer) { export function encodeURLEncodedBase64(arrayBuffer: ArrayBuffer): string {
return encode(arrayBuffer) return encode(arrayBuffer)
.replace(/\+/g, '-') .replace(/\+/g, '-')
.replace(/\//g, '_') .replace(/\//g, '_')
.replace(/=/g, ''); .replace(/=/g, '');
} }
// Decode a URLEncoded base64 to an ArrayBuffer string. // Decode a URLEncoded base64 to an ArrayBuffer.
export function decodeURLEncodedBase64(base64url) { export function decodeURLEncodedBase64(base64url: string): ArrayBuffer {
return decode(base64url return decode(base64url
.replace(/_/g, '/') .replace(/_/g, '/')
.replace(/-/g, '+')); .replace(/-/g, '+'));
@ -135,20 +136,22 @@ export function decodeURLEncodedBase64(base64url) {
const domParser = new DOMParser(); const domParser = new DOMParser();
const xmlSerializer = new XMLSerializer(); const xmlSerializer = new XMLSerializer();
export function parseDom(text, contentType) { export function parseDom(text: string, contentType: DOMParserSupportedType): Document {
return domParser.parseFromString(text, contentType); return domParser.parseFromString(text, contentType);
} }
export function serializeXml(node) { export function serializeXml(node: Element | Node): string {
return xmlSerializer.serializeToString(node); return xmlSerializer.serializeToString(node);
} }
export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); export function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
export function isImageFile({name, type}) { export function isImageFile({name, type}: {name: string, type?: string}): boolean {
return /\.(jpe?g|png|gif|webp|svg|heic)$/i.test(name || '') || type?.startsWith('image/'); return /\.(jpe?g|png|gif|webp|svg|heic)$/i.test(name || '') || type?.startsWith('image/');
} }
export function isVideoFile({name, type}) { export function isVideoFile({name, type}: {name: string, type?: string}): boolean {
return /\.(mpe?g|mp4|mkv|webm)$/i.test(name || '') || type?.startsWith('video/'); return /\.(mpe?g|mp4|mkv|webm)$/i.test(name || '') || type?.startsWith('video/');
} }

View File

@ -3,23 +3,23 @@ import type {ColorInput} from 'tinycolor2';
// Returns relative luminance for a SRGB color - https://en.wikipedia.org/wiki/Relative_luminance // Returns relative luminance for a SRGB color - https://en.wikipedia.org/wiki/Relative_luminance
// Keep this in sync with modules/util/color.go // Keep this in sync with modules/util/color.go
function getRelativeLuminance(color: ColorInput) { function getRelativeLuminance(color: ColorInput): number {
const {r, g, b} = tinycolor(color).toRgb(); const {r, g, b} = tinycolor(color).toRgb();
return (0.2126729 * r + 0.7151522 * g + 0.072175 * b) / 255; return (0.2126729 * r + 0.7151522 * g + 0.072175 * b) / 255;
} }
function useLightText(backgroundColor: ColorInput) { function useLightText(backgroundColor: ColorInput): boolean {
return getRelativeLuminance(backgroundColor) < 0.453; return getRelativeLuminance(backgroundColor) < 0.453;
} }
// Given a background color, returns a black or white foreground color that the highest // Given a background color, returns a black or white foreground color that the highest
// contrast ratio. In the future, the APCA contrast function, or CSS `contrast-color` will be better. // contrast ratio. In the future, the APCA contrast function, or CSS `contrast-color` will be better.
// https://github.com/color-js/color.js/blob/eb7b53f7a13bb716ec8b28c7a56f052cd599acd9/src/contrast/APCA.js#L42 // https://github.com/color-js/color.js/blob/eb7b53f7a13bb716ec8b28c7a56f052cd599acd9/src/contrast/APCA.js#L42
export function contrastColor(backgroundColor: ColorInput) { export function contrastColor(backgroundColor: ColorInput): string {
return useLightText(backgroundColor) ? '#fff' : '#000'; return useLightText(backgroundColor) ? '#fff' : '#000';
} }
function resolveColors(obj: Record<string, string>) { function resolveColors(obj: Record<string, string>): Record<string, string> {
const styles = window.getComputedStyle(document.documentElement); const styles = window.getComputedStyle(document.documentElement);
const getColor = (name: string) => styles.getPropertyValue(name).trim(); const getColor = (name: string) => styles.getPropertyValue(name).trim();
return Object.fromEntries(Object.entries(obj).map(([key, value]) => [key, getColor(value)])); return Object.fromEntries(Object.entries(obj).map(([key, value]) => [key, getColor(value)]));

View File

@ -266,10 +266,8 @@ export function initSubmitEventPolyfill() {
/** /**
* Check if an element is visible, equivalent to jQuery's `:visible` pseudo. * Check if an element is visible, equivalent to jQuery's `:visible` pseudo.
* Note: This function doesn't account for all possible visibility scenarios. * Note: This function doesn't account for all possible visibility scenarios.
* @param {HTMLElement} element The element to check.
* @returns {boolean} True if the element is visible.
*/ */
export function isElemVisible(element: HTMLElement) { export function isElemVisible(element: HTMLElement): boolean {
if (!element) return false; if (!element) return false;
return Boolean(element.offsetWidth || element.offsetHeight || element.getClientRects().length); return Boolean(element.offsetWidth || element.offsetHeight || element.getClientRects().length);

View File

@ -1,6 +1,11 @@
export async function pngChunks(blob) { type PngChunk = {
name: string,
data: Uint8Array,
}
export async function pngChunks(blob: Blob): Promise<PngChunk[]> {
const uint8arr = new Uint8Array(await blob.arrayBuffer()); const uint8arr = new Uint8Array(await blob.arrayBuffer());
const chunks = []; const chunks: PngChunk[] = [];
if (uint8arr.length < 12) return chunks; if (uint8arr.length < 12) return chunks;
const view = new DataView(uint8arr.buffer); const view = new DataView(uint8arr.buffer);
if (view.getBigUint64(0) !== 9894494448401390090n) return chunks; if (view.getBigUint64(0) !== 9894494448401390090n) return chunks;
@ -19,9 +24,14 @@ export async function pngChunks(blob) {
return chunks; return chunks;
} }
type ImageInfo = {
width?: number,
dppx?: number,
}
// decode a image and try to obtain width and dppx. It will never throw but instead // decode a image and try to obtain width and dppx. It will never throw but instead
// return default values. // return default values.
export async function imageInfo(blob) { export async function imageInfo(blob: Blob): Promise<ImageInfo> {
let width = 0, dppx = 1; // dppx: 1 dot per pixel for non-HiDPI screens let width = 0, dppx = 1; // dppx: 1 dot per pixel for non-HiDPI screens
if (blob.type === 'image/png') { // only png is supported currently if (blob.type === 'image/png') { // only png is supported currently

View File

@ -2,17 +2,17 @@ import emojis from '../../../assets/emoji.json';
const maxMatches = 6; const maxMatches = 6;
function sortAndReduce(map) { function sortAndReduce(map: Map<string, number>) {
const sortedMap = new Map(Array.from(map.entries()).sort((a, b) => a[1] - b[1])); const sortedMap = new Map(Array.from(map.entries()).sort((a, b) => a[1] - b[1]));
return Array.from(sortedMap.keys()).slice(0, maxMatches); return Array.from(sortedMap.keys()).slice(0, maxMatches);
} }
export function matchEmoji(queryText) { export function matchEmoji(queryText: string): string[] {
const query = queryText.toLowerCase().replaceAll('_', ' '); const query = queryText.toLowerCase().replaceAll('_', ' ');
if (!query) return emojis.slice(0, maxMatches).map((e) => e.aliases[0]); if (!query) return emojis.slice(0, maxMatches).map((e) => e.aliases[0]);
// results is a map of weights, lower is better // results is a map of weights, lower is better
const results = new Map(); const results = new Map<string, number>();
for (const {aliases} of emojis) { for (const {aliases} of emojis) {
const mainAlias = aliases[0]; const mainAlias = aliases[0];
for (const [aliasIndex, alias] of aliases.entries()) { for (const [aliasIndex, alias] of aliases.entries()) {
@ -27,7 +27,7 @@ export function matchEmoji(queryText) {
return sortAndReduce(results); return sortAndReduce(results);
} }
export function matchMention(queryText) { export function matchMention(queryText: string): string[] {
const query = queryText.toLowerCase(); const query = queryText.toLowerCase();
// results is a map of weights, lower is better // results is a map of weights, lower is better

View File

@ -1,16 +1,17 @@
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import utc from 'dayjs/plugin/utc.js'; import utc from 'dayjs/plugin/utc.js';
import {getCurrentLocale} from '../utils.ts'; import {getCurrentLocale} from '../utils.ts';
import type {ConfigType} from 'dayjs';
dayjs.extend(utc); dayjs.extend(utc);
/** /**
* Returns an array of millisecond-timestamps of start-of-week days (Sundays) * Returns an array of millisecond-timestamps of start-of-week days (Sundays)
* *
* @param startConfig The start date. Can take any type that `Date` accepts. * @param startDate The start date. Can take any type that dayjs accepts.
* @param endConfig The end date. Can take any type that `Date` accepts. * @param endDate The end date. Can take any type that dayjs accepts.
*/ */
export function startDaysBetween(startDate, endDate) { export function startDaysBetween(startDate: ConfigType, endDate: ConfigType): number[] {
const start = dayjs.utc(startDate); const start = dayjs.utc(startDate);
const end = dayjs.utc(endDate); const end = dayjs.utc(endDate);
@ -21,7 +22,7 @@ export function startDaysBetween(startDate, endDate) {
current = current.add(1, 'day'); current = current.add(1, 'day');
} }
const startDays = []; const startDays: number[] = [];
while (current.isBefore(end)) { while (current.isBefore(end)) {
startDays.push(current.valueOf()); startDays.push(current.valueOf());
current = current.add(1, 'week'); current = current.add(1, 'week');
@ -30,7 +31,7 @@ export function startDaysBetween(startDate, endDate) {
return startDays; return startDays;
} }
export function firstStartDateAfterDate(inputDate) { export function firstStartDateAfterDate(inputDate: Date): number {
if (!(inputDate instanceof Date)) { if (!(inputDate instanceof Date)) {
throw new Error('Invalid date'); throw new Error('Invalid date');
} }
@ -41,7 +42,14 @@ export function firstStartDateAfterDate(inputDate) {
return resultDate.valueOf(); return resultDate.valueOf();
} }
export function fillEmptyStartDaysWithZeroes(startDays, data) { type DayData = {
week: number,
additions: number,
deletions: number,
commits: number,
}
export function fillEmptyStartDaysWithZeroes(startDays: number[], data: DayData): DayData[] {
const result = {}; const result = {};
for (const startDay of startDays) { for (const startDay of startDays) {
@ -51,11 +59,11 @@ export function fillEmptyStartDaysWithZeroes(startDays, data) {
return Object.values(result); return Object.values(result);
} }
let dateFormat; let dateFormat: Intl.DateTimeFormat;
// format a Date object to document's locale, but with 24h format from user's current locale because this // format a Date object to document's locale, but with 24h format from user's current locale because this
// option is a personal preference of the user, not something that the document's locale should dictate. // option is a personal preference of the user, not something that the document's locale should dictate.
export function formatDatetime(date) { export function formatDatetime(date: Date | number): string {
if (!dateFormat) { if (!dateFormat) {
// TODO: replace `hour12` with `Intl.Locale.prototype.getHourCycles` once there is broad browser support // TODO: replace `hour12` with `Intl.Locale.prototype.getHourCycles` once there is broad browser support
dateFormat = new Intl.DateTimeFormat(getCurrentLocale(), { dateFormat = new Intl.DateTimeFormat(getCurrentLocale(), {

View File

@ -1,12 +1,12 @@
export function pathEscapeSegments(s) { export function pathEscapeSegments(s: string): string {
return s.split('/').map(encodeURIComponent).join('/'); return s.split('/').map(encodeURIComponent).join('/');
} }
function stripSlash(url) { function stripSlash(url: string): string {
return url.endsWith('/') ? url.slice(0, -1) : url; return url.endsWith('/') ? url.slice(0, -1) : url;
} }
export function isUrl(url) { export function isUrl(url: string): boolean {
try { try {
return stripSlash((new URL(url).href)).trim() === stripSlash(url).trim(); return stripSlash((new URL(url).href)).trim() === stripSlash(url).trim();
} catch { } catch {

View File

@ -1,10 +1,16 @@
window.__webpack_public_path__ = ''; window.__webpack_public_path__ = '';
window.config = { window.config = {
appUrl: 'http://localhost:3000/',
appSubUrl: '',
assetVersionEncoded: '',
assetUrlPrefix: '',
runModeIsProd: true,
customEmojis: {},
csrfToken: 'test-csrf-token-123456', csrfToken: 'test-csrf-token-123456',
pageData: {}, pageData: {},
i18n: {}, notificationSettings: {},
appSubUrl: '', enableTimeTracking: true,
mentionValues: [ mentionValues: [
{key: 'user1 User 1', value: 'user1', name: 'user1', fullname: 'User 1', avatar: 'https://avatar1.com'}, {key: 'user1 User 1', value: 'user1', name: 'user1', fullname: 'User 1', avatar: 'https://avatar1.com'},
{key: 'user2 User 2', value: 'user2', name: 'user2', fullname: 'User 2', avatar: 'https://avatar2.com'}, {key: 'user2 User 2', value: 'user2', name: 'user2', fullname: 'User 2', avatar: 'https://avatar2.com'},
@ -14,4 +20,6 @@ window.config = {
{key: 'org6 User 6', value: 'org6', name: 'org6', fullname: 'User 6', avatar: 'https://avatar6.com'}, {key: 'org6 User 6', value: 'org6', name: 'org6', fullname: 'User 6', avatar: 'https://avatar6.com'},
{key: 'org7 User 7', value: 'org7', name: 'org7', fullname: 'User 7', avatar: 'https://avatar7.com'}, {key: 'org7 User 7', value: 'org7', name: 'org7', fullname: 'User 7', avatar: 'https://avatar7.com'},
], ],
mermaidMaxSourceCharacters: 5000,
i18n: {},
}; };