Skip to content
This repository was archived by the owner on Mar 13, 2026. It is now read-only.

Commit 59ecef0

Browse files
authored
Merge New features for v0.1.1 #14
2 parents d2b5f8b + 70da83c commit 59ecef0

9 files changed

Lines changed: 145 additions & 35 deletions

File tree

.github/workflows/tests.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ jobs:
1010

1111
runs-on: ubuntu-latest
1212
strategy:
13+
fail-fast: false
1314
matrix:
1415
python-version: ["3.10", "3.11"]
1516

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
__pycache__/
2+
.coverage

.pre-commit-config.yaml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
repos:
2+
- repo: https://github.com/pre-commit/pre-commit-hooks
3+
rev: v4.4.0
4+
hooks:
5+
- id: trailing-whitespace
6+
- id: check-docstring-first
7+
- id: check-json
8+
- id: pretty-format-json
9+
args: [--autofix, --no-sort-keys]
10+
- id: check-added-large-files
11+
- id: check-yaml
12+
- id: debug-statements
13+
- id: end-of-file-fixer
14+
- repo: https://github.com/myint/docformatter
15+
rev: v1.5.1
16+
hooks:
17+
- id: docformatter
18+
args: [--in-place]
19+
- repo: https://github.com/asottile/pyupgrade
20+
rev: v3.3.1
21+
hooks:
22+
- id: pyupgrade
23+
args: [--py38-plus]
24+
- repo: https://github.com/PyCQA/flake8
25+
rev: 6.0.0
26+
hooks:
27+
- id: flake8

README.md

Lines changed: 28 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,41 @@
1-
# github-workflows-monitoring
1+
# GitHub Workflows Monitoring
22

33
[![Tests](https://github.com/midokura/github-workflows-monitoring/actions/workflows/tests.yaml/badge.svg)](https://github.com/midokura/github-workflows-monitoring/actions/workflows/tests.yaml)
44

55
## About
66

7-
Github Workflow Monitoring is a small Flask-based web server that connects to Github using websockets to monitor Github Actions workflows. It tracks each workflow's state (queued, in_progress, completed) and calculates the time spent in each state. The metrics are logged in logfmt format for easy consumption by Grafana.
7+
Github Workflows Monitoring is a small Python (Flask-based) application that processes [GitHub webhook calls] and logs them.
8+
It tracks each workflow's state (`queued`, `in_progress`, `completed`) and calculates the time spent in each state.
9+
10+
This application can be very useful to gather information about Organization Runners:
11+
- How much time is spent before a job starts processing?
12+
- What repositories are triggering lots of jobs?
13+
14+
The metrics are logged in `logfmt` format to simplify querying them (eg. with Grafana).
15+
16+
[GitHub webhook calls]: https://docs.github.com/en/developers/webhooks-and-events/webhooks/creating-webhooks
17+
18+
## Setup
19+
20+
Go to your **GitHub Organization** >> **Settings** >> **Webhooks** >> **Add new webhook**.
21+
22+
Expose your application to Internet (ngrok, Load Balancer, etc), and **use endpoint** `/github-webhook`.
23+
24+
![Example of Webhook configuration](media/github_setup.png)
25+
26+
The **events** that are currently supported are:
27+
- Workflow jobs
828

929
## Testing
1030

1131
Into a virtual environment, install the requirements:
1232

13-
pip install -r tests/requirements.txt
14-
33+
```sh
34+
pip install -r tests/requirements.txt
35+
```
1536

1637
To run the tests:
1738

18-
pytest --cov=src
39+
```sh
40+
pytest --cov=src
41+
```

media/github_setup.png

27.1 KB
Loading

setup.cfg

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
[metadata]
22
name = github-workflows-monitoring
3-
version = 0.1
3+
version = 0.1.1
44
license-file = LICENSE
55

66
[options]
77
python_requires = >=3.8
88
packages = find:
9-
install_requires =
9+
install_requires =
1010
Flask>=2.2,<3
1111

1212
[flake8]

src/app.py

Lines changed: 42 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88

99
from const import GithubHeaders, LOGGING_CONFIG
10-
from utils import parse_datetime
10+
from utils import parse_datetime, dict_to_logfmt
1111

1212
dictConfig(LOGGING_CONFIG)
1313

@@ -51,15 +51,25 @@ def process_workflow_job():
5151
workflow = job["workflow_job"]["workflow_name"]
5252
time_start = parse_datetime(job["workflow_job"]["started_at"])
5353
repository = job["repository"]["full_name"]
54+
repository_private = job["repository"]["private"]
5455
action = job["action"]
56+
conclusion = job["workflow_job"].get("conclusion")
57+
requestor = job.get("sender", {}).get("login")
58+
runner_name = job["workflow_job"]["runner_name"]
59+
runner_group_name = job["workflow_job"]["runner_group_name"]
60+
runner_public = (runner_group_name == "GitHub Actions")
61+
62+
context_details = {
63+
"action": action,
64+
"repository": repository,
65+
"job_id": job_id,
66+
"workflow": workflow,
67+
"requestor": requestor,
68+
}
5569

5670
if action == "queued":
5771
# add to memory as timestamp
5872
jobs[job_id] = int(time_start.timestamp())
59-
msg = (
60-
f"action={action} repository={repository} job_id={job_id}"
61-
f' workflow="{workflow}"'
62-
)
6373

6474
elif action == "in_progress":
6575
job_requested = jobs.get(job_id)
@@ -68,10 +78,14 @@ def process_workflow_job():
6878
time_to_start = 0
6979
else:
7080
time_to_start = (time_start - datetime.fromtimestamp(job_requested)).seconds
71-
msg = (
72-
f"action={action} repository={repository} job_id={job_id}"
73-
f' workflow="{workflow}" time_to_start={time_to_start}'
74-
)
81+
82+
context_details = {
83+
**context_details,
84+
"time_to_start": time_to_start,
85+
"runner_name": runner_name,
86+
"runner_public": runner_public,
87+
"repository_private": repository_private
88+
}
7589

7690
elif action == "completed":
7791
job_requested = jobs.get(job_id)
@@ -84,29 +98,37 @@ def process_workflow_job():
8498
).seconds
8599
# delete from memory
86100
del jobs[job_id]
87-
msg = (
88-
f"action={action} repository={repository} job_id={job_id}"
89-
f' workflow="{workflow}" time_to_finish={time_to_finish}'
90-
)
101+
102+
context_details = {
103+
**context_details,
104+
"time_to_finish": time_to_finish,
105+
"conclusion": conclusion
106+
}
107+
91108
else:
92109
app.logger.warning(f"Unknown action {action}, removing from memory")
93110
if job_id in jobs:
94111
del jobs[job_id]
95-
msg = None
112+
context_details = None
96113

97-
if msg:
98-
app.logger.info(msg)
114+
if context_details:
115+
app.logger.info(dict_to_logfmt(context_details))
99116
return True
100117

101118

119+
allowed_events = {
120+
"workflow_job": process_workflow_job
121+
}
122+
123+
102124
@app.route("/github-webhook", methods=["POST"])
103125
def github_webhook_process():
104126
event = request.headers.get(GithubHeaders.EVENT.value)
105-
command = f"process_{event}"
106127

107-
if command == "process_workflow_job":
108-
app.logger.debug(f"Calling function {command}")
109-
process_workflow_job()
128+
if event in allowed_events:
129+
app.logger.debug(f"Calling function to process {event=}")
130+
func = allowed_events.get(event)
131+
func()
110132
return "OK"
111133

112134
app.logger.error(f"Unknown event type {event}, can't handle")

src/utils.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,24 @@
22

33

44
def parse_datetime(date: str) -> datetime:
5-
"""Parse GitHub date to object"""
5+
"""Parse GitHub date to object."""
66
exp = "%Y-%m-%dT%H:%M:%SZ"
77
return datetime.strptime(date, exp)
8+
9+
10+
def dict_to_logfmt(data: dict) -> str:
11+
"""Convert a dict to logfmt string."""
12+
outstr = list()
13+
for k, v in data.items():
14+
if v is None:
15+
outstr.append(f"{k}=")
16+
continue
17+
if isinstance(v, bool):
18+
v = "true" if v else "false"
19+
elif isinstance(v, (dict, object, int)):
20+
v = str(v)
21+
22+
if " " in v:
23+
v = '"%s"' % v.replace('"', '\\"')
24+
outstr.append(f"{k}={v}")
25+
return " ".join(outstr)

tests/tests.py

Lines changed: 25 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,22 @@
1111
"id": 0,
1212
"workflow_name": "CI",
1313
"started_at": "2023-01-27T14:00:00Z",
14+
"conclusion": None,
15+
"labels": [],
16+
"runner_id": None,
17+
"runner_name": None,
18+
"runner_group_id": None,
19+
"runner_group_name": None,
1420
},
1521
"repository": {
22+
"name": "foo",
1623
"full_name": "foo/foo",
24+
"private": False,
25+
},
26+
"sender": {
27+
"login": "testerbot",
28+
"id": 1,
29+
"type": "User",
1730
},
1831
}
1932

@@ -53,7 +66,8 @@ def test_started_job_not_stored(client, caplog):
5366
assert response.status_code == 200
5467
assert caplog.messages == [
5568
"Job 2 is in_progress but not stored!",
56-
'action=in_progress repository=foo/foo job_id=2 workflow="CI" time_to_start=0',
69+
'action=in_progress repository=foo/foo job_id=2 workflow=CI requestor=testerbot time_to_start=0 '
70+
'runner_name= runner_public=false repository_private=false',
5771
]
5872

5973

@@ -65,7 +79,7 @@ def test_finished_job_not_stored(client, caplog):
6579
assert response.status_code == 200
6680
assert caplog.messages == [
6781
"Job 3 is completed but not stored!",
68-
'action=completed repository=foo/foo job_id=3 workflow="CI" time_to_finish=0',
82+
'action=completed repository=foo/foo job_id=3 workflow=CI requestor=testerbot time_to_finish=0 conclusion=',
6983
]
7084

7185

@@ -79,7 +93,7 @@ def test_unknown_action(client, caplog):
7993
response = client.post("/github-webhook", headers=HEADERS, json=body_failed)
8094
assert response.status_code == 200
8195
assert caplog.messages == [
82-
'action=queued repository=foo/foo job_id=4 workflow="CI"',
96+
'action=queued repository=foo/foo job_id=4 workflow=CI requestor=testerbot',
8397
"Unknown action failed, removing from memory",
8498
]
8599

@@ -91,7 +105,7 @@ def test_queued_job(client, caplog):
91105
response = client.post("/github-webhook", headers=HEADERS, json=body_queued)
92106
assert response.status_code == 200
93107
assert caplog.messages == [
94-
'action=queued repository=foo/foo job_id=1 workflow="CI"'
108+
'action=queued repository=foo/foo job_id=1 workflow=CI requestor=testerbot'
95109
]
96110

97111

@@ -103,7 +117,7 @@ def test_logging_flow(client, caplog):
103117
response = client.post("/github-webhook", headers=HEADERS, json=body_queued)
104118
assert response.status_code == 200
105119
assert (
106-
caplog.messages[0] == 'action=queued repository=foo/foo job_id=5 workflow="CI"'
120+
caplog.messages[0] == 'action=queued repository=foo/foo job_id=5 workflow=CI requestor=testerbot'
107121
)
108122

109123
body_started = BODY.copy()
@@ -113,15 +127,19 @@ def test_logging_flow(client, caplog):
113127
assert response.status_code == 200
114128
assert (
115129
caplog.messages[1]
116-
== 'action=in_progress repository=foo/foo job_id=5 workflow="CI" time_to_start=5'
130+
== 'action=in_progress repository=foo/foo job_id=5 workflow=CI requestor=testerbot time_to_start=5 '
131+
'runner_name= runner_public=false repository_private=false'
132+
117133
)
118134

119135
body_completed = BODY.copy()
120136
body_completed["action"] = "completed"
137+
body_completed["workflow_job"]["conclusion"] = "success"
121138
body_completed["workflow_job"]["completed_at"] = "2023-01-27T14:05:00Z"
122139
response = client.post("/github-webhook", headers=HEADERS, json=body_completed)
123140
assert response.status_code == 200
124141
assert (
125142
caplog.messages[2]
126-
== 'action=completed repository=foo/foo job_id=5 workflow="CI" time_to_finish=295'
143+
== 'action=completed repository=foo/foo job_id=5 workflow=CI requestor=testerbot '
144+
'time_to_finish=295 conclusion=success'
127145
)

0 commit comments

Comments
 (0)