Skip to content

Commit

Permalink
Make history-exporter default action (#11)
Browse files Browse the repository at this point in the history
As we found it's much better to export stats in a batch, so lets make history-exporter default binary for the action.
That will break backward compatibility, just we expect we are the only users of this action so far, so it should be ok-ish.

Anyway, lets tag it with `v0.2.x`, make it visible with incompatible changes comparing to `v0.1.x`
  • Loading branch information
fedordikarev authored Nov 11, 2024
1 parent e70d2ba commit 4c998b2
Show file tree
Hide file tree
Showing 6 changed files with 126 additions and 59 deletions.
8 changes: 4 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ COPY go.mod go.sum ./
RUN go mod download

COPY pkg/ pkg/
COPY cmd/gh-action/ ./
COPY cmd/history-exporter/ ./

ENV CGO_ENABLED=0
RUN go build -v -o ./gh-action-workflow-stats
RUN go build -v -o ./gh-action-history-exporter


FROM scratch
Expand All @@ -36,9 +36,9 @@ COPY --from=go-build /etc/passwd /etc/group /etc/
USER gh-action:gh-action

# Copy the static executable
COPY --from=go-build /build/gh-action-workflow-stats /gh-action-workflow-stats
COPY --from=go-build /build/gh-action-history-exporter /gh-action-history-exporter

# Run the binary
ENTRYPOINT ["/gh-action-workflow-stats"]
ENTRYPOINT ["/gh-action-history-exporter"]


16 changes: 10 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
# Github Workflow Stats exporter to Postgres

## WIP
Work is in progress so list of parameters could be changed.

## Inputs

| Input | Description |
| ------------------- | -------------------------------------- |
| `DB_URI` | Database URI |
| `DB_TABLE` | Table for storing Workflow stats |
| `GH_RUN_ID` | Workflow Run Id to get information on |
| `GH_TOKEN` | Github Token, optional for public Repo |
| Input | Description |
| ------------------- | ----------------------------------------- |
| `db_uri` | Database URI |
| `db_table` | Table for storing Workflow stats |
| `gh_run_id` | Workflow Run Id to get information on |
| `gh_token` | Github Token, optional for public Repo |
| `duration` | Duration for the history period to export |
15 changes: 13 additions & 2 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,25 @@ inputs:
required: true
gh_run_id:
description: Workflow Run Id to get information on
required: true
required: false
gh_token:
description: Github Token with permissions to access Workflows. Not required for public repos.
required: false
duration:
description: Duration for the history period to export (in Golang time.parseDuration format)
required: false
default: '1h'
exit_on_token_rate_limit:
description: Do not sleep and just exit when we used github token rate limit
required: false
default: 'true'

runs:
using: docker
image: "docker://neondatabase/gh-workflow-stats-action:v0.1.4"
image: "docker://neondatabase/gh-workflow-stats-action:v0.2.1"
args:
- -duration=${{ inputs.duration }}
- -exit-on-token-rate-limit=${{ inputs.exit_on_token_rate_limit }}
env:
DB_URI: ${{ inputs.db_uri }}
DB_TABLE: ${{ inputs.db_table }}
Expand Down
126 changes: 86 additions & 40 deletions cmd/history-exporter/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,34 +14,83 @@ import (
"github.com/neondatabase/gh-workflow-stats-action/pkg/gh"
)

func main() {
var startDateStr string
var endDateStr string
var startDate time.Time
var endDate time.Time
func parseTimesAndDuration(startTimeStr string, endTimeStr string, durationStr string) (time.Time, time.Time, error) {
startTime := time.Now()
endTime := time.Now()
checkTimeLayouts := []string{time.DateOnly, time.RFC3339, time.DateTime}
var duration time.Duration
var err error

flag.StringVar(&startDateStr, "start-date", "", "start date to quert and export")
flag.StringVar(&endDateStr, "end-date", "", "end date to quert and export")
flag.Parse()
if startTimeStr != "" && endTimeStr != "" && durationStr != "" {
return startTime, endTime, fmt.Errorf("you can't set all startTime, endTime and duration")
}

if startDateStr == "" {
startDate = time.Now().Truncate(24 * time.Hour)
} else {
var err error
startDate, err = time.Parse("2006-01-02", startDateStr)
if durationStr != "" {
duration, err = time.ParseDuration(durationStr)
if err != nil {
log.Fatalf("Failed to parse date: %s", err)
return startTime, endTime, fmt.Errorf("failed to parse duration [%s]: %v", durationStr, err)
}
}
if endTimeStr != "" {
parsedSuccess := false
for _, layout := range checkTimeLayouts {
endTime, err = time.Parse(layout, endTimeStr)
if err == nil {
parsedSuccess = true
break
}
}
if !parsedSuccess {
return startTime, endTime, fmt.Errorf("failed to parse endTime [%s]: %v", endTimeStr, err)
}
}
if startTimeStr != "" {
parsedSuccess := false
for _, layout := range checkTimeLayouts {
startTime, err = time.Parse(layout, startTimeStr)
if err == nil {
parsedSuccess = true
break
}
}
if !parsedSuccess {
return startTime, endTime, fmt.Errorf("failed to parse startTime [%s]: %v", startTimeStr, err)
}
}
if startTimeStr == "" && endTimeStr == "" {
startTime = time.Now().Truncate(24 * time.Hour)
endTime = time.Now().Truncate(time.Minute)
}

if endDateStr == "" {
endDate = startDate.AddDate(0, 0, 1)
} else {
var err error
endDate, err = time.Parse("2006-01-02", endDateStr)
if err != nil {
log.Fatalf("Failed to parse end date: %s", err)
if durationStr != "" {
if startTimeStr != "" {
return startTime, startTime.Add(duration), nil
}
return endTime.Add(-duration), endTime, nil
}

return startTime, endTime, nil
}

func main() {
var startTimeStr string
var endTimeStr string
var durationStr string
var startTime time.Time
var endTime time.Time
var err error

var exitOnTokenRateLimit bool

flag.StringVar(&startTimeStr, "start-time", "", "start time to query and export")
flag.StringVar(&endTimeStr, "end-time", "", "end time to query and export")
flag.StringVar(&durationStr, "duration", "", "duration of the export period")
flag.BoolVar(&exitOnTokenRateLimit, "exit-on-token-rate-limit", false, "Should program exit when we hit github token rate limit or sleep and wait for renewal")
flag.Parse()

startTime, endTime, err = parseTimesAndDuration(startTimeStr, endTimeStr, durationStr)
if err != nil {
log.Fatalf("Failed to parse dates: %s", err)
}

conf, err := config.GetConfig()
Expand All @@ -53,26 +102,20 @@ func main() {
if err != nil {
log.Fatal(err)
}
err = db.InitDatabase(conf)
if err != nil {
log.Fatal(err)
}

gh.InitGhClient(&conf)
ctx := context.Background()

durations := []time.Duration{
6 * time.Hour, // 18:00 - 24:00
3 * time.Hour, // 15:00 - 18:00
1 * time.Hour, // 14:00 - 15:00
1 * time.Hour, // 13:00 - 14:00
1 * time.Hour, // 12:00 - 13:00
2 * time.Hour, // 10:00 - 12:00
4 * time.Hour, // 06:00 - 10:00
6 * time.Hour, // 00:00 - 06:00
}
curDurIdx := 0
for date := endDate.Add(-durations[curDurIdx]); date.Compare(startDate) >= 0; date = date.Add(-durations[curDurIdx]) {
runs, rate, _ := gh.ListWorkflowRuns(ctx, conf, date, date.Add(durations[curDurIdx]))
fmt.Println("\n", date, len(runs))
queryDuration := time.Duration(time.Hour)
for queryTime := endTime.Add(-queryDuration); queryTime.Compare(startTime) >= 0; queryTime = queryTime.Add(-queryDuration) {
runs, rate, _ := gh.ListWorkflowRuns(ctx, conf, queryTime, queryTime.Add(queryDuration))
fmt.Println("\n", queryTime, len(runs))
if len(runs) >= 1000 {
fmt.Printf("\n\n+++\n+ PAGINATION LIMIT: %v\n+++\n", date)
fmt.Printf("\n\n+++\n+ PAGINATION LIMIT: %v\n+++\n", queryTime)
}
fetchedRunsKeys := make([]gh.WorkflowRunAttemptKey, len(runs))
i := 0
Expand All @@ -82,13 +125,17 @@ func main() {
}
notInDb := db.QueryWorkflowRunsNotInDb(conf, fetchedRunsKeys)
fmt.Printf("Time range: %v - %v, fetched: %d, notInDb: %d.\n",
date, date.Add(durations[curDurIdx]),
queryTime, queryTime.Add(queryDuration),
len(runs), len(notInDb),
)
if rate.Remaining < 30 {
fmt.Printf("Close to rate limit, remaining: %d", rate.Remaining)
if exitOnTokenRateLimit {
fmt.Printf("Exit due to the flag -exit-on-token-rate-limit=true")
break
}
fmt.Printf("Sleep till %v (%v seconds)\n", rate.Reset, time.Until(rate.Reset.Time))
time.Sleep(time.Until(rate.Reset.Time) + 10*time.Second)
time.Sleep(time.Until(rate.Reset.Time))
} else {
fmt.Printf("Rate: %+v\n", rate)
}
Expand All @@ -104,8 +151,7 @@ func main() {
attemptRun, _ = gh.GetWorkflowAttempt(ctx, conf, key.RunAttempt)
}
db.SaveWorkflowRunAttempt(conf, attemptRun)
export.ExportAndSaveJobs(ctx, conf, key.RunAttempt)
export.ExportAndSaveJobs(ctx, conf, key.RunAttempt, exitOnTokenRateLimit)
}
curDurIdx = (curDurIdx + 1) % len(durations)
}
}
14 changes: 8 additions & 6 deletions pkg/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,14 @@ func GetConfig() (ConfigType, error) {

envRunID := os.Getenv("GH_RUN_ID")
var runID int64
if len(envRunID) == 0 {
return ConfigType{}, fmt.Errorf("missing env: GH_RUN_ID")
}
runID, err := strconv.ParseInt(envRunID, 10, 64)
if err != nil {
return ConfigType{}, fmt.Errorf("GH_RUN_ID must be integer, error: %v", err)
if len(envRunID) > 0 {
var err error
runID, err = strconv.ParseInt(envRunID, 10, 64)
if err != nil {
return ConfigType{}, fmt.Errorf("GH_RUN_ID must be integer, error: %v", err)
}
} else {
runID = -1
}

githubToken := os.Getenv("GH_TOKEN")
Expand Down
6 changes: 5 additions & 1 deletion pkg/export/export.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,17 @@ import (
"github.com/neondatabase/gh-workflow-stats-action/pkg/gh"
)

func ExportAndSaveJobs(ctx context.Context, conf config.ConfigType, runAttempt int64) error {
func ExportAndSaveJobs(ctx context.Context, conf config.ConfigType, runAttempt int64, exitOnTokenRateLimit bool) error {
jobsInfo, rate, err := gh.GetWorkflowAttemptJobs(ctx, conf, runAttempt)
if err != nil {
log.Fatal(err)
}
if rate.Remaining < 20 {
fmt.Printf("Close to rate limit, remaining: %d", rate.Remaining)
if exitOnTokenRateLimit {
fmt.Printf("Exit due to the flag -exit-on-token-rate-limit=true")
return nil
}
fmt.Printf("Sleep till %v (%v seconds)\n", rate.Reset, time.Until(rate.Reset.Time))
time.Sleep(time.Until(rate.Reset.Time))
}
Expand Down

0 comments on commit 4c998b2

Please sign in to comment.