mirror of
https://github.com/PluralKit/PluralKit.git
synced 2026-02-04 13:06:50 +00:00
test
This commit is contained in:
parent
d2553e7ca8
commit
6dd7a8a0af
6 changed files with 215 additions and 89 deletions
29
.github/workflows/ci-runner.yml
vendored
Normal file
29
.github/workflows/ci-runner.yml
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
name: PluralKit CI
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dispatchData:
|
||||
|
||||
jobs:
|
||||
run:
|
||||
name: Run CI
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ghcr.io/pluralkit/ci:${{ github.sha }}
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
env:
|
||||
DOCKER_HOST: unix:///var/run/docker.sock
|
||||
|
||||
GITHUB_APP_TOKEN: ${{ secrets.COMMIT_STATUS_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
GIT_SHA: ${{ github.sha }}
|
||||
REPO_URL: https://github.com/${{ github.repository }}
|
||||
|
||||
ACTION_LOGS_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/jobs/${{ jobs. }}
|
||||
|
||||
DISPATCH_DATA: ${{ inputs.dispatchData }}
|
||||
steps:
|
||||
- run: /ci/run_ci.py
|
||||
19
.github/workflows/ci.yml
vendored
19
.github/workflows/ci.yml
vendored
|
|
@ -2,12 +2,10 @@ name: PluralKit CI
|
|||
on:
|
||||
push:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dispatchData:
|
||||
|
||||
jobs:
|
||||
build-ci-container:
|
||||
build_container:
|
||||
name: Build CI container
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: docker/login-action@v1
|
||||
|
|
@ -20,7 +18,8 @@ jobs:
|
|||
push: true
|
||||
tags: ghcr.io/pluralkit/ci:${{ github.sha }}
|
||||
file: ci/Dockerfile
|
||||
setup-ci:
|
||||
spawn_jobs:
|
||||
name: Spawn jobs
|
||||
runs-on: ubuntu-latest
|
||||
needs: ["build-ci-container"]
|
||||
container:
|
||||
|
|
@ -30,12 +29,12 @@ jobs:
|
|||
env:
|
||||
DOCKER_HOST: unix:///var/run/docker.sock
|
||||
|
||||
COMMIT_STATUS_TOKEN: ${{ secrets.COMMIT_STATUS_TOKEN }}
|
||||
GITHUB_APP_TOKEN: ${{ secrets.COMMIT_STATUS_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
DISPATCH_DATA: ${{ inputs.dispatchData }}
|
||||
|
||||
# these only work on the push/pull_request jobs
|
||||
CUR_SHA: ${{ github.sha }}
|
||||
|
||||
REPO_URL: https://github.com/${{ github.repository }}
|
||||
OLD_SHA: ${{ github.event.before }}
|
||||
IS_FORCE_PUSH: ${{ github.event.forced }}
|
||||
steps:
|
||||
- run: /run_ci.py
|
||||
- run: /ci/spawn_jobs.py
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
FROM alpine:latest
|
||||
RUN apk add python3 docker git
|
||||
COPY ci/run_ci.py /run_ci.py
|
||||
COPY ci/ .
|
||||
|
|
|
|||
26
ci/run.sh
26
ci/run.sh
|
|
@ -1,26 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
notify_discord() {
|
||||
todo
|
||||
}
|
||||
|
||||
# CI_PREV_COMMIT
|
||||
# GH_BRANCH
|
||||
|
||||
files_changed=$(git diff --name-only $CI_PREV_COMMIT)
|
||||
|
||||
if [ ! -z "$(echo $files_changed | grep -E '.cs$')" ]; then
|
||||
dotnet_format
|
||||
fi
|
||||
|
||||
if [ ! -z "$(echo $files_changed | grep -E '.rs$')" ]; then
|
||||
rustfmt
|
||||
fi
|
||||
|
||||
###
|
||||
|
||||
if PluralKit.Bot changed build bot
|
||||
if PluralKit.Core changed build bot api
|
||||
idk this should just be python
|
||||
123
ci/run_ci.py
Executable file → Normal file
123
ci/run_ci.py
Executable file → Normal file
|
|
@ -1,9 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
|
||||
import os, sys, json, subprocess
|
||||
|
||||
dispatch_data = os.environ.get("DISPATCH_DATA")
|
||||
import os, sys, json, subprocess, random, time, datetime
|
||||
import urllib.request
|
||||
|
||||
def must_get_env(name):
|
||||
val = os.environ.get(name)
|
||||
|
|
@ -15,68 +13,89 @@ def docker_build(data):
|
|||
# file="", tags=[], root="/"
|
||||
pass
|
||||
|
||||
def create_jobs():
|
||||
modify_regexes = {
|
||||
r'^ci/': "all",
|
||||
def take_some_time():
|
||||
time.sleep(random.random() * 10)
|
||||
|
||||
r'^docs/': "bin_docs",
|
||||
r'^dashboard/': "bin_dashboard",
|
||||
def report_status(name, start_time, exit=None):
|
||||
status=""
|
||||
match exit:
|
||||
case None:
|
||||
status = "in_progress"
|
||||
case True:
|
||||
status = "success"
|
||||
case False:
|
||||
status = "failure"
|
||||
|
||||
r'\.rs$': "format_rs",
|
||||
r'\.cs$': "format_cs",
|
||||
|
||||
r'^Cargo.lock': "all_rs",
|
||||
|
||||
r'^services/api': "bin_api",
|
||||
# dispatch doesn't use libpk
|
||||
r'^services/dispatch': "bin_dispatch",
|
||||
r'^services/scheduled_tasks': "bin_scheduled_tasks",
|
||||
|
||||
# one image for all dotnet
|
||||
r'^PluralKit\.': "bin_dotnet",
|
||||
r'^Myriad': "bin_dotnet",
|
||||
data = {
|
||||
'name': name,
|
||||
'head_sha': must_get_env("GIT_SHA"),
|
||||
'status': status,
|
||||
'started_at': start_time,
|
||||
'output': {
|
||||
'title': name,
|
||||
'summary': f"dasdfasdfasdf", # todo
|
||||
'text': "[]",
|
||||
'annotations': []
|
||||
},
|
||||
}
|
||||
|
||||
aliases = {
|
||||
"all": ["bin_dotnet", "bin_api", "bin_dispatch", "bin_scheduled_tasks", "bin_dashboard"],
|
||||
"all_rs": ["bin_api", "bin_dispatch"],
|
||||
}
|
||||
if exit is not None:
|
||||
data['completed_at'] = datetime.datetime.now(tz=datetime.timezone.utc).isoformat(timespec='seconds')
|
||||
|
||||
now = must_get_env("CUR_SHA")
|
||||
before = must_get_env("OLD_SHA")
|
||||
changed_files = subprocess.check_output(["git", "diff", "--name-only", before, now])
|
||||
req = urllib.request.Request(
|
||||
f"https://api.github.com/repos/pluralkit/pluralkit/check-runs",
|
||||
method='POST',
|
||||
headers={
|
||||
'Accept': 'application/vnd.github+json',
|
||||
'Authorization': f'Bearer {must_get_env("GITHUB_APP_TOKEN")}',
|
||||
'content-type':'application/json'
|
||||
},
|
||||
data=json.dumps(data)
|
||||
)
|
||||
|
||||
jobs = set([])
|
||||
for key in modify_regexes.keys():
|
||||
if true:
|
||||
jobs = jobs | modify_regexes[key]
|
||||
try:
|
||||
with urllib.request.urlopen(request) as response:
|
||||
response_code = response.getcode()
|
||||
response_data = response.read()
|
||||
print(f"{response_code} updated status {data}: {response_data}")
|
||||
except urllib.error.HTTPError as e:
|
||||
response_code = e.getcode()
|
||||
response_data = e.read()
|
||||
print(f"{response_code} failed to update status {name}: {response_data}")
|
||||
|
||||
for key in changes:
|
||||
if aliases.get(key) is not None:
|
||||
jobs = jobs | aliases[key]
|
||||
jobs = jobs - [key]
|
||||
|
||||
pass
|
||||
def run_job(data):
|
||||
subprocess.check_output(["git", "clone", must_get_env("REPO_URL")])
|
||||
os.chdir(os.path.basename(must_get_env("REPO_URL")))
|
||||
subprocess.run(["git", "checkout", must_get_env("GIT_SHA")])
|
||||
|
||||
# run actual job
|
||||
take_some_time()
|
||||
|
||||
def main():
|
||||
print("hello from python!")
|
||||
subprocess.run(["docker", "run", "--rm", "-i", "hello-world"], check=True)
|
||||
|
||||
return 0
|
||||
dispatch_data = os.environ.get("DISPATCH_DATA")
|
||||
if dispatch_data == "":
|
||||
return create_jobs()
|
||||
print("no data!")
|
||||
return 1
|
||||
|
||||
data = json.loads(dispatch_data)
|
||||
match data.get("action"):
|
||||
case "docker_build":
|
||||
return docker_build(data.get("data"))
|
||||
case "rustfmt":
|
||||
pass
|
||||
case "dotnet_format":
|
||||
pass
|
||||
case _:
|
||||
print (f"data unknown: {dispatch_data}")
|
||||
return 1
|
||||
print("running {dispatch_data}")
|
||||
|
||||
time_started = datetime.datetime.now(tz=datetime.timezone.utc).isoformat(timespec='seconds')
|
||||
report_status(data["action"], time_started)
|
||||
|
||||
ok = True
|
||||
try:
|
||||
run_job(data)
|
||||
except Exception:
|
||||
ok = False
|
||||
print("job failed!")
|
||||
traceback.format_exc()
|
||||
|
||||
report_status(data["action"], time_started, ok)
|
||||
|
||||
return 0 if ok else 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
|
|
|||
105
ci/spawn_jobs.py
Executable file
105
ci/spawn_jobs.py
Executable file
|
|
@ -0,0 +1,105 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os, sys, json, subprocess, random, time
|
||||
import urllib.request
|
||||
|
||||
def must_get_env(name):
|
||||
val = os.environ.get(name)
|
||||
if val == "":
|
||||
raise "meow"
|
||||
return val
|
||||
|
||||
def docker_build(data):
|
||||
# file="", tags=[], root="/"
|
||||
pass
|
||||
|
||||
def spawn_job(name):
|
||||
req = urllib.request.Request(
|
||||
f"https://api.github.com/repos/pluralkit/pluralkit/actions/workflows/ci-runner/dispatches",
|
||||
method='POST',
|
||||
headers={
|
||||
'Accept': 'application/vnd.github+json',
|
||||
'Authorization': f'Bearer {must_get_env("GITHUB_APP_TOKEN")}',
|
||||
'content-type':'application/json'
|
||||
},
|
||||
data=json.dumps({
|
||||
'ref': must_get_env("GIT_SHA"),
|
||||
'inputs': {
|
||||
'dispatchData': json.dumps({
|
||||
'action': name,
|
||||
})
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(request) as response:
|
||||
response_code = response.getcode()
|
||||
response_data = response.read()
|
||||
print(f"{response_code} spawned job {name}: {response_data}")
|
||||
except urllib.error.HTTPError as e:
|
||||
response_code = e.getcode()
|
||||
response_data = e.read()
|
||||
print(f"{response_code} failed to spawn job {name}: {response_data}")
|
||||
|
||||
def create_jobs():
|
||||
modify_regexes = {
|
||||
r'^ci/': "all",
|
||||
|
||||
r'^docs/': "bin_docs",
|
||||
r'^dashboard/': "bin_dashboard",
|
||||
|
||||
r'\.rs$': "format_rs",
|
||||
r'\.cs$': "format_cs",
|
||||
|
||||
r'^Cargo.lock': "all_rs",
|
||||
|
||||
r'^services/api': "bin_api",
|
||||
# dispatch doesn't use libpk
|
||||
r'^services/dispatch': "bin_dispatch",
|
||||
r'^services/scheduled_tasks': "bin_scheduled_tasks",
|
||||
|
||||
# one image for all dotnet
|
||||
r'^PluralKit\.': "bin_dotnet",
|
||||
r'^Myriad': "bin_dotnet",
|
||||
}
|
||||
|
||||
aliases = {
|
||||
"all": ["bin_dotnet", "bin_api", "bin_dispatch", "bin_scheduled_tasks", "bin_dashboard"],
|
||||
"all_rs": ["bin_api", "bin_dispatch"],
|
||||
}
|
||||
|
||||
now = must_get_env("GIT_SHA")
|
||||
before = must_get_env("OLD_SHA")
|
||||
changed_files = subprocess.check_output(["git", "diff", "--name-only", before, now])
|
||||
|
||||
jobs = set([])
|
||||
if must_get_env("IS_FORCE_PUSH") == "true":
|
||||
jobs = jobs | aliases["all"]
|
||||
jobs = jobs | ["format_cs", "format_rs"]
|
||||
else:
|
||||
for key in modify_regexes.keys():
|
||||
if re.match(key, changed_file, flags=re.MULTILINE) is not None:
|
||||
jobs = jobs | modify_regexes[key]
|
||||
|
||||
for key in changes:
|
||||
if aliases.get(key) is not None:
|
||||
jobs = jobs | aliases[key]
|
||||
jobs = jobs - [key]
|
||||
|
||||
# test
|
||||
jobs = jobs | ["test"]
|
||||
|
||||
# do this in a tx or something
|
||||
for job in jobs:
|
||||
spawn_job(job)
|
||||
|
||||
if len(jobs) == 0:
|
||||
print("no jobs to run (??)")
|
||||
|
||||
exit 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("hello from python!")
|
||||
|
||||
sys.exit(create_jobs())
|
||||
Loading…
Add table
Add a link
Reference in a new issue