Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
15 commits
Select commit Hold shift + click to select a range
6c2bc2a
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Mar 11, 2025
dbf4583
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Mar 17, 2025
6f37932
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Mar 21, 2025
f5fc28a
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Mar 21, 2025
b25f1fe
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Mar 21, 2025
70ae11e
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Mar 24, 2025
2210309
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Mar 24, 2025
c61bdf1
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Mar 24, 2025
12e1779
Merge branch 'main' into feat_backoff
andrewhibbert Mar 24, 2025
0872d8c
fix: Stop github scaler from scaling if there are no job labels
andrewhibbert Aug 1, 2025
ea0fff7
Merge branch 'feat_backoff' of github.com:andrewhibbert/keda into fea…
andrewhibbert Aug 1, 2025
1cf0105
Merge branch 'main' into feat_backoff
andrewhibbert Aug 29, 2025
22112c2
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Aug 29, 2025
32b33e0
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Aug 29, 2025
549d2be
feat: Add support to enable backoff when rate limited by the Github API
andrewhibbert Oct 17, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,7 @@ New deprecation(s):
- **Elasticsearch Scaler**: Support IgnoreNullValues at Elasticsearch scaler ([#6599](https://github.com/kedacore/keda/pull/6599))
- **GitHub Scaler**: Add support to use ETag for conditional requests against the Github API ([#6503](https://github.com/kedacore/keda/issues/6503))
- **GitHub Scaler**: Filter workflows via query parameter for improved queue count accuracy ([#6519](https://github.com/kedacore/keda/pull/6519))
- **Github Scaler**: Implement backoff when receive rate limit errors ([#6643](https://github.com/kedacore/keda/issues/6643))
- **IBMMQ Scaler**: Handling StatusNotFound in IBMMQ scaler ([#6472](https://github.com/kedacore/keda/pull/6472))
- **MongoDB Scaler**: Support float queryValue for MongoDB scaler ([#6574](https://github.com/kedacore/keda/issues/6574))
- **Prometheus Scaler**: Add custom HTTP client timeout ([#6607](https://github.com/kedacore/keda/pull/6607))
Expand Down
112 changes: 89 additions & 23 deletions pkg/scalers/github_runner_scaler.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,17 @@ const (
var reservedLabels = []string{"self-hosted", "linux", "x64"}

type githubRunnerScaler struct {
metricType v2.MetricTargetType
metadata *githubRunnerMetadata
httpClient *http.Client
logger logr.Logger
etags map[string]string
previousRepos []string
previousWfrs map[string]map[string]*WorkflowRuns
previousJobs map[string][]Job
metricType v2.MetricTargetType
metadata *githubRunnerMetadata
httpClient *http.Client
logger logr.Logger
etags map[string]string
previousRepos []string
previousWfrs map[string]map[string]*WorkflowRuns
previousJobs map[string][]Job
rateLimit RateLimit
previousQueueLength int64
previousQueueLengthTime time.Time
}

type githubRunnerMetadata struct {
Expand All @@ -48,6 +51,7 @@ type githubRunnerMetadata struct {
Labels []string `keda:"name=labels, order=triggerMetadata;resolvedEnv, optional"`
NoDefaultLabels bool `keda:"name=noDefaultLabels, order=triggerMetadata;resolvedEnv, default=false"`
EnableEtags bool `keda:"name=enableEtags, order=triggerMetadata;resolvedEnv, default=false"`
EnableBackoff bool `keda:"name=enableBackoff, order=triggerMetadata;resolvedEnv, default=false"`
MatchUnlabeledJobsWithUnlabeledRunners bool `keda:"name=matchUnlabeledJobsWithUnlabeledRunners, order=triggerMetadata;resolvedEnv, default=false"`
TargetWorkflowQueueLength int64 `keda:"name=targetWorkflowQueueLength, order=triggerMetadata;resolvedEnv, default=1"`
TriggerIndex int
Expand Down Expand Up @@ -331,6 +335,12 @@ type Job struct {
HeadBranch string `json:"head_branch"`
}

type RateLimit struct {
Remaining int `json:"remaining"`
ResetTime time.Time `json:"resetTime"`
RetryAfterTime time.Time `json:"retryAfterTime"`
}

// NewGitHubRunnerScaler creates a new GitHub Runner Scaler
func NewGitHubRunnerScaler(config *scalersconfig.ScalerConfig) (Scaler, error) {
httpClient := kedautil.CreateHTTPClient(config.GlobalHTTPTimeout, false)
Expand Down Expand Up @@ -359,16 +369,22 @@ func NewGitHubRunnerScaler(config *scalersconfig.ScalerConfig) (Scaler, error) {
previousRepos := []string{}
previousJobs := make(map[string][]Job)
previousWfrs := make(map[string]map[string]*WorkflowRuns)
rateLimit := RateLimit{}
previousQueueLength := int64(0)
previousQueueLengthTime := time.Time{}

return &githubRunnerScaler{
metricType: metricType,
metadata: meta,
httpClient: httpClient,
logger: InitializeLogger(config, "github_runner_scaler"),
etags: etags,
previousRepos: previousRepos,
previousJobs: previousJobs,
previousWfrs: previousWfrs,
metricType: metricType,
metadata: meta,
httpClient: httpClient,
logger: InitializeLogger(config, "github_runner_scaler"),
etags: etags,
previousRepos: previousRepos,
previousJobs: previousJobs,
previousWfrs: previousWfrs,
rateLimit: rateLimit,
previousQueueLength: previousQueueLength,
previousQueueLengthTime: previousQueueLengthTime,
}, nil
}

Expand Down Expand Up @@ -466,6 +482,32 @@ func (s *githubRunnerScaler) getRepositories(ctx context.Context) ([]string, err
return repoList, nil
}

func (s *githubRunnerScaler) getRateLimit(header http.Header) RateLimit {
var retryAfterTime time.Time

remaining, _ := strconv.Atoi(header.Get("X-RateLimit-Remaining"))
reset, _ := strconv.ParseInt(header.Get("X-RateLimit-Reset"), 10, 64)
resetTime := time.Unix(reset, 0)

if retryAfterStr := header.Get("Retry-After"); retryAfterStr != "" {
if retrySeconds, err := strconv.Atoi(retryAfterStr); err == nil {
retryAfterTime = time.Now().Add(time.Duration(retrySeconds) * time.Second)
}
}

if retryAfterTime.IsZero() {
s.logger.V(1).Info(fmt.Sprintf("Github API rate limit: Remaining: %d, ResetTime: %s", remaining, resetTime))
} else {
s.logger.V(1).Info(fmt.Sprintf("Github API rate limit: Remaining: %d, ResetTime: %s, Retry-After: %s", remaining, resetTime, retryAfterTime))
}

return RateLimit{
Remaining: remaining,
ResetTime: resetTime,
RetryAfterTime: retryAfterTime,
}
}

func (s *githubRunnerScaler) getGithubRequest(ctx context.Context, url string, metadata *githubRunnerMetadata, httpClient *http.Client) ([]byte, int, error) {
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
Expand Down Expand Up @@ -496,19 +538,22 @@ func (s *githubRunnerScaler) getGithubRequest(ctx context.Context, url string, m
}
_ = r.Body.Close()

if r.Header.Get("X-RateLimit-Remaining") != "" {
s.rateLimit = s.getRateLimit(r.Header)
}

if r.StatusCode != 200 {
if r.StatusCode == 304 && s.metadata.EnableEtags {
s.logger.V(1).Info(fmt.Sprintf("The github rest api for the url: %s returned status %d %s", url, r.StatusCode, http.StatusText(r.StatusCode)))
return []byte{}, r.StatusCode, nil
}

if r.Header.Get("X-RateLimit-Remaining") != "" {
githubAPIRemaining, _ := strconv.Atoi(r.Header.Get("X-RateLimit-Remaining"))
if s.rateLimit.Remaining == 0 && !s.rateLimit.ResetTime.IsZero() {
return []byte{}, r.StatusCode, fmt.Errorf("GitHub API rate limit exceeded, reset time %s", s.rateLimit.ResetTime)
}

if githubAPIRemaining == 0 {
resetTime, _ := strconv.ParseInt(r.Header.Get("X-RateLimit-Reset"), 10, 64)
return []byte{}, r.StatusCode, fmt.Errorf("GitHub API rate limit exceeded, resets at %s", time.Unix(resetTime, 0))
}
if !s.rateLimit.RetryAfterTime.IsZero() && time.Now().Before(s.rateLimit.RetryAfterTime) {
return []byte{}, r.StatusCode, fmt.Errorf("GitHub API rate limit exceeded, retry after %s", s.rateLimit.RetryAfterTime)
}

return []byte{}, r.StatusCode, fmt.Errorf("the GitHub REST API returned error. url: %s status: %d response: %s", url, r.StatusCode, string(b))
Expand Down Expand Up @@ -625,6 +670,23 @@ func (s *githubRunnerScaler) canRunnerMatchLabels(jobLabels []string, runnerLabe

// GetWorkflowQueueLength returns the number of workflow jobs in the queue
func (s *githubRunnerScaler) GetWorkflowQueueLength(ctx context.Context) (int64, error) {
if s.metadata.EnableBackoff {
var backoffUntilTime time.Time
if s.rateLimit.Remaining == 0 && !s.rateLimit.ResetTime.IsZero() && time.Now().Before(s.rateLimit.ResetTime) {
backoffUntilTime = s.rateLimit.ResetTime
} else if !s.rateLimit.RetryAfterTime.IsZero() && time.Now().Before(s.rateLimit.RetryAfterTime) {
backoffUntilTime = s.rateLimit.RetryAfterTime
}
if !backoffUntilTime.IsZero() {
if !s.previousQueueLengthTime.IsZero() {
s.logger.V(1).Info(fmt.Sprintf("Github API rate limit exceeded, returning previous queue length %d, last successful queue length check %s, backing off until %s", s.previousQueueLength, s.previousQueueLengthTime, backoffUntilTime))
return s.previousQueueLength, nil
}

return -1, fmt.Errorf("github API rate limit exceeded, no valid previous queue length available, API available at %s", backoffUntilTime)
}
}

var repos []string
var err error

Expand Down Expand Up @@ -667,12 +729,16 @@ func (s *githubRunnerScaler) GetWorkflowQueueLength(ctx context.Context) (int64,
}
}

if s.metadata.EnableBackoff {
s.previousQueueLength = queueCount
s.previousQueueLengthTime = time.Now()
}

return queueCount, nil
}

func (s *githubRunnerScaler) GetMetricsAndActivity(ctx context.Context, metricName string) ([]external_metrics.ExternalMetricValue, bool, error) {
queueLen, err := s.GetWorkflowQueueLength(ctx)

if err != nil {
s.logger.Error(err, "error getting workflow queue length")
return []external_metrics.ExternalMetricValue{}, false, err
Expand Down
35 changes: 35 additions & 0 deletions pkg/scalers/github_runner_scaler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -819,6 +819,41 @@ func TestNewGitHubRunnerScaler_QueueLength_MultiRepo_PulledRepos_NoRate(t *testi
}
}

func TestNewGitHubRunnerScaler_QueueLength_SingleRepo_WithRateLimitBackoff(t *testing.T) {
// First call will set previous queue length
apiStub := apiStubHandler(true, false)
meta := getGitHubTestMetaData(apiStub.URL)
meta.EnableBackoff = true

scaler := githubRunnerScaler{
metadata: meta,
httpClient: http.DefaultClient,
}
scaler.metadata.Repos = []string{"test"}
scaler.metadata.Labels = []string{"foo", "bar"}

if queueLen, err := scaler.GetWorkflowQueueLength(context.Background()); err != nil {
fmt.Println(err)
t.Fail()
} else if queueLen != 1 {
fmt.Printf("Expected queue length of 1 got %d\n", queueLen)
t.Fail()
}

// Second call simulating that there is a rate limit
// should return previous queue length
scaler.rateLimit.Remaining = 0
scaler.rateLimit.ResetTime = time.Now().Add(5 * time.Minute)

if queueLen, err := scaler.GetWorkflowQueueLength(context.Background()); err != nil {
fmt.Println(err)
t.Fail()
} else if queueLen != 1 {
fmt.Printf("Expected queue length of 1 after rate limit backoff got %d\n", queueLen)
t.Fail()
}
}

type githubRunnerMetricIdentifier struct {
metadataTestData *map[string]string
triggerIndex int
Expand Down
1 change: 1 addition & 0 deletions tests/scalers/github_runner/github_runner_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,7 @@ spec:
labels: {{.Labels}}
runnerScopeFromEnv: "RUNNER_SCOPE"
enableEtags: "true"
enableBackoff: "true"
authenticationRef:
name: github-trigger-auth
`
Expand Down
Loading