Compare commits

...

6 commits

Author SHA1 Message Date
582a893aed refactor(vcs/generic): introduce ChangeStatus and remove CommitStatus
CommitStatus was too rigid for GitHub legacy usecases. It is replaced by
a more flexible ChangeStatus tailored for Gerrit richer features.

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2025-01-01 04:12:15 +01:00
2f8d0160f4 feat(contrib/frontend/gerrit): design a simple status & check frontend for Gerrit
It uses imaginary APIs for now, but it's OK.

This has showed up a bunch of generalizations we will need in our own
API.

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2025-01-01 04:12:15 +01:00
378b2a495e feat(statcheck): introduce status & checks server
This is a basic server that returns mocked data.

It contains a basic Diesel scaffolding.

Next steps are persistence, client support in the rest of the code, etc.

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2025-01-01 04:12:15 +01:00
1ea8833954 refactor(vcs/generic): promote Gerrit checks as the generic variant
This commit is incomplete as we did not generalize enough the commit
status states.

We are not able to describe yet failure or more complicated cases.

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2025-01-01 04:12:15 +01:00
bcc8d1600d chore(devshell): add event streaming & VCS filter in the dev Procfile
Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2025-01-01 04:12:15 +01:00
efa973cbf2 feat(amqp): support mTLS connections
This way, we can connect to our new mTLS shiny RabbitMQ!

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2025-01-01 04:12:15 +01:00
35 changed files with 1506 additions and 618 deletions

379
Cargo.lock generated
View file

@ -151,9 +151,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.94"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "arrayref"
@ -387,12 +387,13 @@ checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f"
dependencies = [
"async-trait",
"axum-core",
"axum-macros",
"bytes",
"futures-util",
"http 1.2.0",
"http-body 1.0.1",
"http-body-util",
"hyper 1.5.1",
"hyper 1.5.2",
"hyper-util",
"itoa",
"matchit",
@ -434,6 +435,17 @@ dependencies = [
"tracing",
]
[[package]]
name = "axum-macros"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57d123550fa8d071b7255cb0cc04dc302baa6c8c4a79f55701552684d8399bce"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "backtrace"
version = "0.3.74"
@ -553,9 +565,9 @@ dependencies = [
[[package]]
name = "cc"
version = "1.2.4"
version = "1.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9157bbaa6b165880c27a4293a474c91cdcf265cc68cc829bf10be0964a391caf"
checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e"
dependencies = [
"jobserver",
"libc",
@ -626,7 +638,7 @@ version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
dependencies = [
"heck",
"heck 0.5.0",
"proc-macro2",
"quote",
"syn",
@ -720,9 +732,9 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
version = "0.8.20"
version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crypto-common"
@ -734,12 +746,88 @@ dependencies = [
"typenum",
]
[[package]]
name = "darling"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989"
dependencies = [
"darling_core",
"darling_macro",
]
[[package]]
name = "darling_core"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim",
"syn",
]
[[package]]
name = "darling_macro"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
dependencies = [
"darling_core",
"quote",
"syn",
]
[[package]]
name = "data-encoding"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2"
[[package]]
name = "deadpool"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed"
dependencies = [
"deadpool-runtime",
"num_cpus",
"tokio",
]
[[package]]
name = "deadpool-diesel"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "590573e9e29c5190a5ff782136f871e6e652e35d598a349888e028693601adf1"
dependencies = [
"deadpool",
"deadpool-sync",
"diesel",
]
[[package]]
name = "deadpool-runtime"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b"
dependencies = [
"tokio",
]
[[package]]
name = "deadpool-sync"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "524bc3df0d57e98ecd022e21ba31166c2625e7d3e5bcc4510efaeeab4abcab04"
dependencies = [
"deadpool-runtime",
"tracing",
]
[[package]]
name = "der"
version = "0.7.9"
@ -796,6 +884,65 @@ dependencies = [
"cipher",
]
[[package]]
name = "diesel"
version = "2.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf1bedf64cdb9643204a36dd15b19a6ce8e7aa7f7b105868e9f1fad5ffa7d12"
dependencies = [
"bitflags 2.6.0",
"byteorder",
"chrono",
"diesel_derives",
"itoa",
"pq-sys",
]
[[package]]
name = "diesel-derive-enum"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81c5131a2895ef64741dad1d483f358c2a229a3a2d1b256778cdc5e146db64d4"
dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "diesel_derives"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7f2c3de51e2ba6bf2a648285696137aaf0f5f487bcbea93972fe8a364e131a4"
dependencies = [
"diesel_table_macro_syntax",
"dsl_auto_type",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "diesel_migrations"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a73ce704bad4231f001bff3314d91dce4aba0770cee8b233991859abc15c1f6"
dependencies = [
"diesel",
"migrations_internals",
"migrations_macros",
]
[[package]]
name = "diesel_table_macro_syntax"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "209c735641a413bc68c4923a9d6ad4bcb3ca306b794edaa7eb0b3228a99ffb25"
dependencies = [
"syn",
]
[[package]]
name = "digest"
version = "0.10.7"
@ -845,6 +992,20 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "dsl_auto_type"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5d9abe6314103864cc2d8901b7ae224e0ab1a103a0a416661b4097b0779b607"
dependencies = [
"darling",
"either",
"heck 0.5.0",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "either"
version = "1.13.0"
@ -1162,6 +1323,12 @@ version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "heck"
version = "0.5.0"
@ -1265,9 +1432,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "hyper"
version = "0.14.31"
version = "0.14.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85"
checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7"
dependencies = [
"bytes",
"futures-channel",
@ -1289,9 +1456,9 @@ dependencies = [
[[package]]
name = "hyper"
version = "1.5.1"
version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f"
checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0"
dependencies = [
"bytes",
"futures-channel",
@ -1310,13 +1477,13 @@ dependencies = [
[[package]]
name = "hyper-rustls"
version = "0.27.3"
version = "0.27.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333"
checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2"
dependencies = [
"futures-util",
"http 1.2.0",
"hyper 1.5.1",
"hyper 1.5.2",
"hyper-util",
"rustls",
"rustls-native-certs 0.8.1",
@ -1336,7 +1503,7 @@ dependencies = [
"futures-util",
"http 0.2.12",
"http-body 0.4.6",
"hyper 0.14.31",
"hyper 0.14.32",
"tokio",
"tower-service",
]
@ -1347,7 +1514,7 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
dependencies = [
"hyper 1.5.1",
"hyper 1.5.2",
"hyper-util",
"pin-project-lite",
"tokio",
@ -1365,7 +1532,7 @@ dependencies = [
"futures-util",
"http 1.2.0",
"http-body 1.0.1",
"hyper 1.5.1",
"hyper 1.5.2",
"pin-project-lite",
"socket2 0.5.8",
"tokio",
@ -1514,6 +1681,12 @@ dependencies = [
"syn",
]
[[package]]
name = "ident_case"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "1.0.3"
@ -1675,9 +1848,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.168"
version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d"
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]]
name = "libredox"
@ -1759,6 +1932,27 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "migrations_internals"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd01039851e82f8799046eabbb354056283fb265c8ec0996af940f4e85a380ff"
dependencies = [
"serde",
"toml",
]
[[package]]
name = "migrations_macros"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffb161cc72176cb37aa47f1fc520d3ef02263d67d661f44f05d05a079e1237fd"
dependencies = [
"migrations_internals",
"proc-macro2",
"quote",
]
[[package]]
name = "mime"
version = "0.3.17"
@ -1773,9 +1967,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.8.0"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1"
checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394"
dependencies = [
"adler2",
]
@ -1856,10 +2050,20 @@ dependencies = [
]
[[package]]
name = "object"
version = "0.36.5"
name = "num_cpus"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
"hermit-abi 0.3.9",
"libc",
]
[[package]]
name = "object"
version = "0.36.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
dependencies = [
"memchr",
]
@ -1876,12 +2080,16 @@ dependencies = [
"brace-expand",
"chrono",
"clap",
"deadpool-diesel",
"diesel",
"diesel-derive-enum",
"diesel_migrations",
"either",
"fs2",
"futures",
"futures-util",
"http 1.2.0",
"hyper 1.5.1",
"hyper 1.5.2",
"hyper-server",
"jfs",
"lapin",
@ -1902,7 +2110,7 @@ dependencies = [
"shellexpand",
"sys-info",
"tempfile",
"thiserror 2.0.7",
"thiserror 2.0.9",
"tokio",
"tokio-stream",
"tracing",
@ -1946,7 +2154,7 @@ dependencies = [
"once_cell",
"shell-escape",
"tempfile",
"thiserror 2.0.7",
"thiserror 2.0.9",
"tokio",
]
@ -2071,9 +2279,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ordered-float"
version = "4.5.0"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c65ee1f9701bf938026630b455d5315f490640234259037edb259798b3bcf85e"
checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951"
dependencies = [
"num-traits",
]
@ -2297,6 +2505,15 @@ dependencies = [
"zerocopy",
]
[[package]]
name = "pq-sys"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6cc05d7ea95200187117196eee9edd0644424911821aeb28a18ce60ea0b8793"
dependencies = [
"vcpkg",
]
[[package]]
name = "proc-macro2"
version = "1.0.92"
@ -2342,7 +2559,7 @@ dependencies = [
"rustc-hash",
"rustls",
"socket2 0.5.8",
"thiserror 2.0.7",
"thiserror 2.0.9",
"tokio",
"tracing",
]
@ -2361,7 +2578,7 @@ dependencies = [
"rustls",
"rustls-pki-types",
"slab",
"thiserror 2.0.7",
"thiserror 2.0.9",
"tinyvec",
"tracing",
"web-time",
@ -2369,9 +2586,9 @@ dependencies = [
[[package]]
name = "quinn-udp"
version = "0.5.8"
version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52cd4b1eff68bf27940dd39811292c49e007f4d0b4c357358dc9b0197be6b527"
checksum = "1c40286217b4ba3a71d644d752e6a0b71f13f1b6a2c5311acfcbe0c2418ed904"
dependencies = [
"cfg_aliases",
"libc",
@ -2518,7 +2735,7 @@ dependencies = [
"http 1.2.0",
"http-body 1.0.1",
"http-body-util",
"hyper 1.5.1",
"hyper 1.5.2",
"hyper-rustls",
"hyper-util",
"ipnet",
@ -2659,7 +2876,7 @@ dependencies = [
"openssl-probe",
"rustls-pki-types",
"schannel",
"security-framework 3.0.1",
"security-framework 3.1.0",
]
[[package]]
@ -2753,9 +2970,9 @@ dependencies = [
[[package]]
name = "security-framework"
version = "3.0.1"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8"
checksum = "81d3f8c9bfcc3cbb6b0179eb57042d75b1582bdc65c3cb95f3fa999509c03cbc"
dependencies = [
"bitflags 2.6.0",
"core-foundation 0.10.0",
@ -2766,9 +2983,9 @@ dependencies = [
[[package]]
name = "security-framework-sys"
version = "2.12.1"
version = "2.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2"
checksum = "1863fd3768cd83c56a7f60faa4dc0d403f1b6df0a38c3c25f44b7894e45370d5"
dependencies = [
"core-foundation-sys",
"libc",
@ -2802,9 +3019,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.133"
version = "1.0.134"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d"
dependencies = [
"itoa",
"memchr",
@ -2822,6 +3039,15 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_spanned"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
dependencies = [
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
@ -2969,9 +3195,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.90"
version = "2.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31"
checksum = "d53cbcb5a243bd33b7858b1d7f4aca2153490815872d86d955d6ea29f743c035"
dependencies = [
"proc-macro2",
"quote",
@ -3044,11 +3270,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.7"
version = "2.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767"
checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc"
dependencies = [
"thiserror-impl 2.0.7",
"thiserror-impl 2.0.9",
]
[[package]]
@ -3064,9 +3290,9 @@ dependencies = [
[[package]]
name = "thiserror-impl"
version = "2.0.7"
version = "2.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36"
checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4"
dependencies = [
"proc-macro2",
"quote",
@ -3126,9 +3352,9 @@ dependencies = [
[[package]]
name = "tinyvec"
version = "1.8.0"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938"
checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8"
dependencies = [
"tinyvec_macros",
]
@ -3201,6 +3427,40 @@ dependencies = [
"tokio",
]
[[package]]
name = "toml"
version = "0.8.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.22.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5"
dependencies = [
"indexmap 2.7.0",
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
]
[[package]]
name = "tonic"
version = "0.12.3"
@ -3216,7 +3476,7 @@ dependencies = [
"http 1.2.0",
"http-body 1.0.1",
"http-body-util",
"hyper 1.5.1",
"hyper 1.5.2",
"hyper-timeout",
"hyper-util",
"percent-encoding",
@ -3440,6 +3700,12 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "version_check"
version = "0.1.5"
@ -3769,6 +4035,15 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
version = "0.6.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b"
dependencies = [
"memchr",
]
[[package]]
name = "write16"
version = "1.0.0"

View file

@ -1,3 +1,5 @@
amqp-server: rabbitmq-server
gerrit-event-streamer: cargo run --bin gerrit-event-streamer -- dev.config.json
gerrit-vcs-filter: cargo run --bin gerrit-generic-vcs-filter -- dev.config.json
pastebin-worker: cargo run --bin pastebin-worker -- dev.config.json
stats-worker: cargo run --bin stats -- dev.config.json

88
contrib/checks-ofborg.js Normal file
View file

@ -0,0 +1,88 @@
/* Inspired from the Lix setup.
* Inspired from the Buildbot setup.
*
* Designed for OfBorg custom checks & server API.
* Original-Author: puckipedia
*/
Gerrit.install((plugin) => {
const serverInfo = plugin.serverInfo();
const { statcheck_base_uri, enabled_projects } = serverInfo.plugin;
const configuration = {
baseUri: statcheck_base_uri,
// TODO: use directly ofborg API for this.
supportedProjects: enabled_projects,
};
function makeStatcheckUri(suffix) {
return `${configuration.baseUri}/${suffix}`;
}
let checksProvider;
checksProvider = {
async fetch({ repo, patchsetSha, changeNumber, patchsetNumber }, runBefore = false) {
if (!configuration.supportedProjects.includes(repo)) {
return { responseCode: 'OK' };
}
let num = changeNumber.toString(10);
// Iterate over all check runs.
let checksFetch = await fetch(makeStatcheckUri(`changes/${num}/versions/${patchsetNumber}/checks`), { credentials: 'include' });
if (checksFetch.status === 400) {
if ((await checksFetch.json()).error === 'invalid origin' && !runBefore) {
return await checksProvider.fetch({ repo, patchsetSha, changeNumber, patchsetNumber }, true);
}
return { responseCode: 'OK' }
} else if (checksFetch.status === 403) {
console.warn(`Failed to fetch change '${changeNumber}' for authorization reasons, automatic login is still a WIP.`);
return { responseCode: 'NOT_LOGGED_IN', loginCallback() {
} };
}
let checks = await checksFetch.json();
if (checks.length === 0) {
return { responseCode: 'OK' };
}
let runs = [];
let links = [];
for (let check of checks) {
let checkrun = {
attempt: check.id,
checkName: check.name,
externalId: check.id,
status: check.status,
checkLink: null, // TODO: have a proper and nice web URI
labelName: 'Verified', // TODO: generalize what label a check affects.
results: [],
links: [], // TODO: have a proper web uri
};
if (check.started_at !== null) {
checkrun.startedTimestamp = new Date(check.started_at * 1000);
}
if (check.completed_at !== null) {
checkrun.finishedTimestamp = new Date(check.completed_at * 1000);
}
if (check.results !== null) {
checkrun.results = [
{
category: "SUCCESS",
summary: check.summary
}
];
}
runs.push(checkrun);
}
return { responseCode: 'OK', runs, links };
}
};
plugin.checks().register(checksProvider);
});

View file

@ -85,6 +85,8 @@
pkg-config
rabbitmq-server
hivemind
diesel-cli
postgresql.dev
];
postHook = ''
@ -117,5 +119,6 @@
RUST_BACKTRACE = "1";
RUST_LOG = "ofborg=debug";
NIX_PATH = "nixpkgs=${pkgs.path}";
DATABASE_URL = "postgres:///ofborg";
};
}

View file

@ -11,7 +11,7 @@ edition = "2018"
[dependencies]
async-stream = "0.3.6"
async-trait = "0.1.83"
axum = "0.7.8"
axum = { version = "0.7.8", features = ["macros"] }
base64 = "0.22.1"
brace-expand = "0.1.0"
chrono = "0.4.38"
@ -51,3 +51,7 @@ tracing-opentelemetry = "0.28.0"
uuid = { version = "1.11", features = ["v4"] }
zstd = "0.13.2"
blake3 = { version = "1.5.5", features = ["digest"] }
diesel = { version = "2.2.6", features = ["chrono", "postgres"] }
diesel_migrations = { version = "2.2.0", features = ["postgres"] }
deadpool-diesel = { version = "0.6.1", features = ["postgres", "tracing"] }
diesel-derive-enum = { version = "2.1.0", features = ["postgres"] }

9
ofborg/diesel.toml Normal file
View file

@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/models/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "/home/raito/dev/git.lix.systems/the-distro/ofborg/ofborg/migrations"

View file

@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View file

@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
DROP TABLE IF EXISTS "checks";

View file

@ -0,0 +1,10 @@
-- Your SQL goes here
CREATE TABLE "checks"(
"id" INT4 NOT NULL PRIMARY KEY,
"name" VARCHAR NOT NULL,
"status" VARCHAR NOT NULL,
"started_at" TIMESTAMP,
"completed_at" TIMESTAMP,
"summary" TEXT NOT NULL
);

View file

@ -0,0 +1,133 @@
use axum::{
extract::State,
routing::{get, put},
Json, Router,
};
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
use ofborg::config::Config;
use ofborg::web::statcheck;
use serde::Serialize;
use std::{env, net::SocketAddr, os::unix::io::FromRawFd, sync::Arc};
use tokio::net::TcpListener;
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/");
// --- Entry point ---
#[tokio::main]
async fn main() {
ofborg::setup_log();
let arg = env::args().nth(1).expect("usage: statcheck-web <config>");
let cfg = ofborg::config::load(arg.as_ref());
let shared_config = Arc::new(cfg);
let manager = deadpool_diesel::postgres::Manager::new(
cfg.statcheck.database_url,
deadpool_diesel::Runtime::Tokio1,
);
let pool = deadpool_diesel::postgres::Pool::builder(manager)
.build()
.expect("Failed to build a PostgreSQL connection pool");
// run the migrations on server startup
{
let conn = pool.get().await.unwrap();
conn.interact(|conn| conn.run_pending_migrations(MIGRATIONS).map(|_| ()))
.await
.unwrap()
.unwrap();
}
// Build the app router
let app = Router::new()
.route("/health", get(health_check))
.route("/config", get(config_check))
.route(
"/changes/:change_id/versions/:version_id/checks/:check_id",
get(statcheck::get_check).patch(statcheck::patch_check),
)
.route(
"/changes/:change_id/versions/:version_id/checks",
put(statcheck::put_checks),
)
.with_state(shared_config)
.with_state(pool);
// Check for systemd socket activation
if let Some(listener) = get_systemd_listener() {
tracing::info!("Running with systemd socket activation");
axum::serve(listener, app.into_make_service())
.await
.expect("Failed to serve");
} else {
// Fallback to manual address for testing
let host = env::var("HOST").unwrap_or_else(|_| "127.0.0.1".to_string());
let port = env::var("PORT")
.unwrap_or_else(|_| "8000".to_string())
.parse::<u16>()
.expect("Invalid port number");
let addr = SocketAddr::new(host.parse().expect("Invalid host"), port);
tracing::info!("Running on http://{}", addr);
let listener = TcpListener::bind(addr)
.await
.expect("Failed to bind on the provided socket address");
axum::serve(listener, app)
.await
.expect("Failed to bind server");
}
}
// --- Route Handlers ---
#[derive(Serialize)]
struct HealthStatus {
status: String,
}
/// Health check endpoint
async fn health_check() -> Json<HealthStatus> {
Json(HealthStatus {
status: "OK".to_string(),
})
}
#[derive(Serialize)]
struct ConfigStatus {
version: String,
environment: String,
gerrit_instance: Option<String>,
// TODO: add ongoing_statuses as a simple counter?
}
/// Config endpoint
async fn config_check(State(config): State<Arc<Config>>) -> Json<ConfigStatus> {
Json(ConfigStatus {
version: env!("CARGO_PKG_VERSION").to_string(),
environment: "production".to_string(),
gerrit_instance: config.gerrit.as_ref().map(|g| g.instance_uri.clone()),
})
}
/// Try to retrieve a listener from systemd socket activation
fn get_systemd_listener() -> Option<tokio::net::TcpListener> {
if let Ok(listen_fds) = env::var("LISTEN_FDS") {
let listen_fds: i32 = listen_fds.parse().ok()?;
let fd_offset = 3; // File descriptors start at 3 in systemd
if listen_fds > 0 {
// Use the first systemd-provided file descriptor
let fd = fd_offset;
println!("Using systemd file descriptor: {}", fd);
unsafe {
let std_listener = std::net::TcpListener::from_raw_fd(fd);
std_listener.set_nonblocking(true).ok()?;
let listener = TcpListener::from_std(std_listener).ok()?;
return Some(listener);
}
}
}
None
}

View file

@ -1,83 +0,0 @@
/// Statuses and checks worker
/// - will keep a database of changes
/// - their statuses
/// - their checks
/// - is VCS/CI agnostic
use std::env;
use std::error::Error;
use ofborg::config;
use ofborg::easyamqp;
use ofborg::easyamqp::ChannelExt;
use ofborg::easyamqp::ConsumerExt;
use ofborg::easylapin;
use ofborg::tasks;
use tracing::info;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
ofborg::setup_log();
let arg = env::args()
.nth(1)
.expect("usage: statcheck-worker <config>");
let cfg = config::load(arg.as_ref());
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
let mut chan = conn.create_channel().await?;
// an RPC queue for verbs
let api_queue_name = "statcheck-api".to_owned();
// an event queue to be notified about statuses & checks changes.
let event_queue_name = "statcheck-events".to_owned();
chan.declare_exchange(easyamqp::ExchangeConfig {
exchange: api_queue_name.clone(),
exchange_type: easyamqp::ExchangeType::Topic,
passive: false,
durable: true,
auto_delete: false,
no_wait: false,
internal: false,
})
.await?;
chan.declare_queue(easyamqp::QueueConfig {
queue: api_queue_name.clone(),
passive: false,
durable: true,
exclusive: false,
auto_delete: false,
no_wait: false,
})
.await?;
chan.bind_queue(easyamqp::BindQueueConfig {
queue: api_queue_name.clone(),
exchange: api_queue_name.clone(),
routing_key: None,
no_wait: false,
})
.await?;
info!("Waiting for API calls on {}", api_queue_name);
info!("Notifying of new changes on {}", event_queue_name);
easylapin::WorkerChannel(chan)
.consume(
tasks::status_check_collector::StatusCheckCollector::new(cfg.statcheck.clone().db),
easyamqp::ConsumeConfig {
queue: api_queue_name.clone(),
consumer_tag: format!("{}-{}", cfg.whoami(), api_queue_name),
no_local: false,
no_ack: false,
no_wait: false,
exclusive: false,
},
)
.await?;
drop(conn); // Close connection.
info!("Closed the session... EOF");
Ok(())
}

View file

@ -7,6 +7,7 @@ use std::io::Read;
use std::marker::PhantomData;
use std::path::{Path, PathBuf};
use lapin::tcp::{OwnedIdentity, OwnedTLSConfig};
use serde::de::{self, Deserialize, Deserializer};
use tracing::error;
@ -38,8 +39,7 @@ pub struct FeedbackConfig {
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StatusCheckConfig {
#[serde(deserialize_with = "deserialize_and_expand_pathbuf")]
pub db: PathBuf,
pub database_url: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@ -59,6 +59,9 @@ pub struct RabbitMqConfig {
pub virtualhost: Option<String>,
pub username: String,
pub password_file: Option<PathBuf>,
pub ssl_cacert_file: Option<PathBuf>,
#[serde(deserialize_with = "optional_deserialize_and_expand_pathbuf")]
pub ssl_client_key_file: Option<PathBuf>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@ -187,6 +190,24 @@ impl RabbitMqConfig {
);
Ok(uri)
}
pub fn into_tls_config(&self) -> Option<OwnedTLSConfig> {
if self.ssl_client_key_file.is_none() {
return None;
}
Some(OwnedTLSConfig {
identity: Some(OwnedIdentity {
der: std::fs::read(self.ssl_client_key_file.as_ref().unwrap()).ok()?,
// Our certificates never have any password.
password: "".to_owned(),
}),
cert_chain: self
.ssl_cacert_file
.as_ref()
.and_then(|p| std::fs::read_to_string(p).ok()),
})
}
}
#[must_use]
@ -253,3 +274,15 @@ where
.into_owned(),
))
}
fn optional_deserialize_and_expand_pathbuf<'de, D>(
deserializer: D,
) -> Result<Option<PathBuf>, D::Error>
where
D: Deserializer<'de>,
{
let raw_literal: PathBuf = Deserialize::deserialize(deserializer)?;
Ok(shellexpand::env(&raw_literal.to_string_lossy())
.ok()
.map(|p| PathBuf::from(p.into_owned())))
}

View file

@ -31,7 +31,11 @@ pub async fn from_config(cfg: &RabbitMqConfig) -> Result<Connection, lapin::Erro
client_properties: props,
..Default::default()
};
Connection::connect(&cfg.as_uri()?, opts).await
if let Some(tls_config) = cfg.into_tls_config() {
Connection::connect_with_config(&cfg.as_uri()?, opts, tls_config).await
} else {
Connection::connect(&cfg.as_uri()?, opts).await
}
}
#[async_trait]

View file

@ -1,10 +1,12 @@
use crate::nix;
use crate::vcs::generic::{Category, CheckResult, Link};
use std::fs::File;
use std::path::Path;
pub struct EvalChecker {
name: String,
failure_category: Category,
op: nix::Operation,
args: Vec<String>,
nix: nix::Nix,
@ -12,9 +14,16 @@ pub struct EvalChecker {
impl EvalChecker {
#[must_use]
pub fn new(name: &str, op: nix::Operation, args: Vec<String>, nix: nix::Nix) -> EvalChecker {
pub fn new(
name: &str,
failure_category: Category,
op: nix::Operation,
args: Vec<String>,
nix: nix::Nix,
) -> EvalChecker {
EvalChecker {
name: name.to_owned(),
failure_category,
op,
args,
nix,
@ -26,6 +35,16 @@ impl EvalChecker {
&self.name
}
#[must_use]
pub fn success_category(&self) -> Category {
Category::Success
}
#[must_use]
pub fn failure_category(&self) -> Category {
self.failure_category
}
pub async fn execute(&self, path: &Path) -> Result<File, File> {
self.nix
.safely(&self.op, path, self.args.clone(), false)
@ -38,4 +57,28 @@ impl EvalChecker {
cli.append(&mut self.args.clone());
cli.join(" ")
}
pub fn into_successful_result(&self) -> CheckResult {
CheckResult {
external_id: None,
category: self.success_category(),
summary: self.cli_cmd(),
message: None,
tags: vec![],
links: vec![],
code_pointers: vec![],
}
}
pub fn into_failed_result(&self, links: Vec<Link>) -> CheckResult {
CheckResult {
external_id: None,
category: self.failure_category(),
summary: self.cli_cmd(),
message: None,
tags: vec![],
links,
code_pointers: vec![],
}
}
}

View file

@ -37,6 +37,7 @@ pub mod ghevent;
pub mod locks;
pub mod maintainers;
pub mod message;
pub mod models;
pub mod nix;
pub mod nixenv;
pub mod nixstats;
@ -49,6 +50,7 @@ pub mod tasks;
pub mod test_scratch;
pub mod utils;
pub mod vcs;
pub mod web;
pub mod worker;
pub mod writetoline;
@ -74,6 +76,7 @@ pub mod ofborg {
pub use crate::tasks;
pub use crate::test_scratch;
pub use crate::vcs;
pub use crate::web;
pub use crate::worker;
pub use crate::writetoline;

2
ofborg/src/models/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod schema;
pub mod statcheck;

View file

@ -0,0 +1,16 @@
// @generated automatically by Diesel CLI.
diesel::table! {
checks (change_id, id, version) {
id -> Int4,
change_id -> Int4,
version -> Int4,
head_sha -> Varchar,
name -> Varchar,
status -> Varchar,
conclusion -> Varchar,
started_at -> Nullable<Timestamp>,
completed_at -> Nullable<Timestamp>,
summary -> Text,
}
}

View file

@ -0,0 +1,17 @@
use chrono::NaiveDateTime;
use diesel::prelude::*;
#[derive(Queryable, Selectable)]
#[diesel(table_name = super::schema::checks)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Check {
pub id: i32,
pub change_id: i32,
pub version: i32,
pub name: String,
pub status: String,
// In UTC.
pub started_at: Option<NaiveDateTime>,
pub completed_at: Option<NaiveDateTime>,
pub summary: String,
}

View file

@ -4,15 +4,14 @@ pub mod stdenvs;
pub use self::nixpkgs::NixpkgsStrategy;
pub use self::stdenvs::Stdenvs;
use crate::message::buildjob::BuildJob;
use crate::vcs::commit_status::CommitStatusError;
use crate::vcs::generic::CheckRunOptions;
use crate::vcs::generic::{CheckRun, CommitStatusError};
pub type StepResult<T> = Result<T, Error>;
#[derive(Default)]
pub struct EvaluationComplete {
pub builds: Vec<BuildJob>,
pub checks: Vec<CheckRunOptions>,
pub checks: Vec<CheckRun>,
}
#[derive(Debug)]

View file

@ -10,9 +10,8 @@ use crate::nixenv::HydraNixEnv;
use crate::outpathdiff::{OutPathDiff, PackageArch};
use crate::tagger::{MaintainerPrTagger, PkgsAddedRemovedTagger, RebuildTagger, StdenvTagger};
use crate::tasks::eval::{stdenvs::Stdenvs, Error, EvaluationComplete, StepResult};
use crate::vcs::commit_status::CommitStatus;
use crate::vcs::generic::{
CheckRunOptions, CheckRunState, Conclusion, State, VersionControlSystemAPI,
Category, ChangeStatus, CheckResult, CheckRun, CheckRunState, VersionControlSystemAPI,
};
use std::path::Path;
@ -196,28 +195,37 @@ impl<'a> NixpkgsStrategy<'a> {
}
}
fn performance_stats(&self) -> Vec<CheckRunOptions> {
fn performance_stats(&self) -> Vec<CheckRun> {
if let Some(ref rebuildsniff) = self.outpath_diff {
if let Some(_report) = rebuildsniff.performance_diff() {
return vec![CheckRunOptions {
name: "Evaluation Performance Report".to_owned(),
completed_at: Some(
if let Some(report) = rebuildsniff.performance_diff() {
return vec![CheckRun {
check_name: "Evaluation Performance Report".to_owned(),
finished_timestamp: Some(
Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
),
started_at: None,
conclusion: Some(Conclusion::Success),
status: Some(CheckRunState::Completed),
details_url: None,
started_timestamp: None,
scheduled_timestamp: None,
label_name: Some("Performance-Impact".to_owned()),
status_description: Some("Succeded.".to_owned()),
status: CheckRunState::Completed,
external_id: None,
head_sha: self.job.change.head_sha.clone(),
// FIXME: before going into production, let's reintroduce this as a pastebin?
// output: Some(Output {
// title: "Evaluator Performance Report".to_string(),
// summary: "".to_string(),
// text: Some(report.markdown()),
// annotations: None,
// images: None,
// }),
results: vec![CheckResult {
external_id: None,
category: crate::vcs::generic::Category::Info,
summary: "".to_owned(),
message: Some(report.markdown()),
tags: vec![],
links: vec![],
code_pointers: vec![],
}],
change: None,
patchset: None,
attempt: None,
check_description: None,
check_link: None,
is_ai_powered: Some(false),
status_link: None,
// FIXME: head_sha: self.job.change.head_sha.clone(),
}];
}
}
@ -241,14 +249,16 @@ impl<'a> NixpkgsStrategy<'a> {
async fn update_rebuild_labels(
&mut self,
dir: &Path,
overall_status: &mut CommitStatus,
overall_status: &mut ChangeStatus,
) -> Result<(), Error> {
if let Some(ref rebuildsniff) = self.outpath_diff {
let mut rebuild_tags = RebuildTagger::new();
if let Some(attrs) = rebuildsniff.calculate_rebuild() {
if !attrs.is_empty() {
overall_status.set_url(self.gist_changed_paths(&attrs).await);
overall_status
.set_status_link(self.gist_changed_paths(&attrs).await)
.await;
self.record_impacted_maintainers(dir, &attrs).await?;
}
@ -287,6 +297,15 @@ impl<'a> NixpkgsStrategy<'a> {
.collect::<Vec<Vec<&str>>>();
if let Some(ref changed_paths) = self.changed_paths {
let mut status = ChangeStatus::builder(&self.job.change)
.name("Automatic maintainer request")
.description("This check will evaluate automatically impacted maintainers by this change and request them. In case of too large changes, this step is skipped.")
.build();
status
.create(self.vcs_api.clone(), CheckRunState::Running)
.await;
let m =
ImpactedMaintainers::calculate(&self.nix, dir, changed_paths, &changed_attributes)
.await;
@ -308,27 +327,24 @@ impl<'a> NixpkgsStrategy<'a> {
"pull request has {} changed paths, skipping review requests",
changed_paths.len()
);
let status = CommitStatus::new(
self.vcs_api.clone(),
self.repo.clone(),
self.job.change.head_sha.clone(),
"ofborg-eval-check-maintainers".to_owned(),
String::from("large change, skipping automatic review requests"),
gist_url,
);
status.set(State::Success).await?;
status
.update_status_with_description(
"large change, skipping automatic review requests",
CheckRunState::Completed,
gist_url,
)
.await;
return Ok(());
}
let status = CommitStatus::new(
self.vcs_api.clone(),
self.repo.clone(),
self.job.change.head_sha.clone(),
"ofborg-eval-check-maintainers".to_owned(),
String::from("matching changed paths to changed attrs..."),
gist_url,
);
status.set(State::Success).await?;
status
.update_status_with_description(
"matching changed paths to changed attrs...",
CheckRunState::Completed,
gist_url,
)
.await;
if let Ok(ref maint) = m {
self.request_reviews(maint).await;
@ -351,15 +367,13 @@ impl<'a> NixpkgsStrategy<'a> {
async fn check_meta_queue_builds(&mut self, dir: &Path) -> StepResult<Vec<BuildJob>> {
if let Some(ref possibly_touched_packages) = self.touched_packages {
let mut status = CommitStatus::new(
self.vcs_api.clone(),
self.repo.clone(),
self.job.change.head_sha.clone(),
"ci-eval-check-meta".to_owned(),
String::from("config.nix: checkMeta = true"),
None,
);
status.set(State::Pending).await?;
let status = ChangeStatus::builder(&self.job.change)
.name("Evaluate with meta attributes typing")
.description("This runs a nixpkgs evaluation with `config.checkMeta = true;` during nixpkgs instantiation")
.label_name("Verified")
.build();
status.update_status(CheckRunState::Running).await;
let nixenv = HydraNixEnv::new(self.nix.clone(), dir.to_path_buf(), true);
match nixenv.execute_with_stats().await {
@ -373,8 +387,7 @@ impl<'a> NixpkgsStrategy<'a> {
try_build.sort();
try_build.dedup();
status.set_url(None);
status.set(State::Success).await?;
status.update_status(CheckRunState::Completed).await;
if !try_build.is_empty() && try_build.len() <= 20 {
// In the case of trying to merge master in to
@ -395,17 +408,21 @@ impl<'a> NixpkgsStrategy<'a> {
}
}
Err(out) => {
status.set_url(
crate::utils::pastebin::make_pastebin(
&mut self.chan,
"Meta Check",
out.display(),
status
.update_status_with_description(
"Meta check failed",
CheckRunState::Completed,
crate::utils::pastebin::make_pastebin(
&mut self.chan,
"Meta Check",
out.display(),
)
.await
.ok()
.map(|pp| pp.uri),
)
.await
.ok()
.map(|pp| pp.uri),
);
status.set(State::Failure).await?;
.await;
// TODO: add a failed result with the details.
Err(Error::Fail(String::from(
"Failed to validate package metadata.",
)))
@ -421,19 +438,47 @@ impl<'a> NixpkgsStrategy<'a> {
Ok(())
}
pub(crate) async fn preflight_check(
&self,
target_branch: &str,
status: &mut ChangeStatus,
) -> StepResult<bool> {
if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") {
status
.update_description(
"The branch you have targeted is a read-only mirror for channels. \
Please target release-* or master.",
)
.await;
info!("PR targets a nixos-* or nixpkgs-* branch");
return Ok(false);
};
Ok(true)
}
pub(crate) async fn on_target_branch(
&mut self,
dir: &Path,
status: &mut CommitStatus,
status: &mut ChangeStatus,
) -> StepResult<()> {
status
.set_with_description("Checking original stdenvs", State::Pending)
.await?;
.update_status_with_description(
"Checking original stdenvs",
CheckRunState::Scheduled,
None,
)
.await;
self.check_stdenvs_before(dir).await;
status
.set_with_description("Checking original out paths", State::Pending)
.await?;
.update_status_with_description(
"Checking original out paths",
CheckRunState::Scheduled,
None,
)
.await;
self.check_outpaths_before(dir).await?;
Ok(())
@ -456,18 +501,22 @@ impl<'a> NixpkgsStrategy<'a> {
.await;
}
pub(crate) async fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> {
pub(crate) async fn after_merge(&mut self, status: &mut ChangeStatus) -> StepResult<()> {
self.update_labels(&[], &["2.status: merge conflict".to_owned()])
.await;
status
.set_with_description("Checking new stdenvs", State::Pending)
.await?;
.update_status_with_description("Checking new stdenvs", CheckRunState::Scheduled, None)
.await;
self.check_stdenvs_after().await;
status
.set_with_description("Checking new out paths", State::Pending)
.await?;
.update_status_with_description(
"Checking new out paths",
CheckRunState::Scheduled,
None,
)
.await;
self.check_outpaths_after().await?;
Ok(())
@ -484,12 +533,14 @@ impl<'a> NixpkgsStrategy<'a> {
vec![
EvalChecker::new(
"package-list",
Category::Error,
nix::Operation::QueryPackagesJson,
vec![String::from("--file"), String::from(".")],
self.nix.clone(),
),
EvalChecker::new(
"package-list-with-aliases",
Category::Error,
nix::Operation::QueryPackagesJson,
vec![
String::from("--file"),
@ -502,6 +553,7 @@ impl<'a> NixpkgsStrategy<'a> {
),
EvalChecker::new(
"lib-tests",
Category::Error,
nix::Operation::Build,
vec![
String::from("--arg"),
@ -513,6 +565,7 @@ impl<'a> NixpkgsStrategy<'a> {
),
EvalChecker::new(
"nixos",
Category::Error,
nix::Operation::Instantiate,
vec![
String::from("--arg"),
@ -526,6 +579,7 @@ impl<'a> NixpkgsStrategy<'a> {
),
EvalChecker::new(
"nixos-options",
Category::Error,
nix::Operation::Instantiate,
vec![
String::from("--arg"),
@ -539,6 +593,7 @@ impl<'a> NixpkgsStrategy<'a> {
),
EvalChecker::new(
"nixos-manual",
Category::Error,
nix::Operation::Instantiate,
vec![
String::from("--arg"),
@ -552,6 +607,7 @@ impl<'a> NixpkgsStrategy<'a> {
),
EvalChecker::new(
"nixpkgs-manual",
Category::Error,
nix::Operation::Instantiate,
vec![
String::from("--arg"),
@ -565,6 +621,7 @@ impl<'a> NixpkgsStrategy<'a> {
),
EvalChecker::new(
"nixpkgs-tarball",
Category::Error,
nix::Operation::Instantiate,
vec![
String::from("--arg"),
@ -578,6 +635,7 @@ impl<'a> NixpkgsStrategy<'a> {
),
EvalChecker::new(
"nixpkgs-unstable-jobset",
Category::Error,
nix::Operation::Instantiate,
vec![
String::from("--arg"),
@ -591,6 +649,7 @@ impl<'a> NixpkgsStrategy<'a> {
),
EvalChecker::new(
"darwin",
Category::Warning,
nix::Operation::Instantiate,
vec![
String::from("--arg"),
@ -608,13 +667,17 @@ impl<'a> NixpkgsStrategy<'a> {
pub(crate) async fn all_evaluations_passed(
&mut self,
dir: &Path,
status: &mut CommitStatus,
status: &mut ChangeStatus,
) -> StepResult<EvaluationComplete> {
self.update_stdenv_labels().await;
status
.set_with_description("Calculating Changed Outputs", State::Pending)
.await?;
.update_status_with_description(
"Calculating Changed Outputs",
CheckRunState::Scheduled,
None,
)
.await;
self.update_new_package_labels().await;
self.update_rebuild_labels(dir, status).await?;

View file

@ -9,8 +9,10 @@ use crate::stats::{self, Event};
use crate::systems;
use crate::tasks::eval;
use crate::utils::pastebin::PersistedPastebin;
use crate::vcs::commit_status::{CommitStatus, CommitStatusError};
use crate::vcs::generic::{Issue, IssueState, State, VersionControlSystemAPI};
use crate::vcs::generic::{
AugmentedVCSApi, ChangeStatus, CheckResult, CheckRunState, CommitStatusError, Issue,
IssueState, Link, VersionControlSystemAPI,
};
use crate::vcs::gerrit::http::GerritHTTPApi;
use crate::worker;
@ -93,7 +95,11 @@ impl<E: stats::SysEvents + 'static + Sync + Send> worker::SimpleWorker for Evalu
let _enter = span.enter();
let vcs_api: Arc<dyn VersionControlSystemAPI> = match self.vcs {
SupportedVCS::Gerrit => Arc::new(GerritHTTPApi),
// TODO: make it easier to build an augmented vcs api handle.
SupportedVCS::Gerrit => Arc::new(AugmentedVCSApi {
minimal_api: GerritHTTPApi,
statcheck_api: crate::vcs::generic::http::StatcheckHTTPApi,
}),
};
OneEval::new(
@ -118,6 +124,7 @@ struct OneEval<'a, E> {
identity: &'a str,
cloner: &'a checkout::CachedCloner,
job: &'a evaluationjob::EvaluationJob,
status: ChangeStatus,
}
impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
@ -139,6 +146,10 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
identity,
cloner,
job,
status: ChangeStatus::builder(&job.change)
.name("Nix evaluation")
.description("Run a Nix-based evaluation strategy on this change")
.build(),
}
}
@ -146,7 +157,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
&self,
description: String,
url: Option<String>,
state: State,
state: CheckRunState,
) -> Result<(), CommitStatusError> {
let description = if description.len() >= 140 {
warn!(
@ -192,15 +203,17 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
Ok(r) => Ok(r),
// Handle error cases which expect us to post statuses
// to github. Convert Eval Errors in to Result<_, CommitStatusWrite>
Err(EvalWorkerError::EvalError(eval::Error::Fail(msg))) => {
Err(self.update_status(msg, None, State::Failure).await)
}
Err(EvalWorkerError::EvalError(eval::Error::Fail(msg))) => Err(self
.update_status(msg, None, CheckRunState::Completed)
.await),
Err(EvalWorkerError::EvalError(eval::Error::FailWithPastebin(msg, title, content))) => {
let pastebin = self
.make_pastebin(chan, &title, content)
.await
.map(|pp| pp.uri);
Err(self.update_status(msg, pastebin, State::Failure).await)
Err(self
.update_status(msg, pastebin, CheckRunState::Completed)
.await)
}
Err(
EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e))
@ -297,18 +310,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
self.nix.clone(),
);
let mut overall_status = CommitStatus::new(
self.vcs_api.clone(),
job.repo.clone(),
job.change.head_sha.clone(),
"ofborg-eval".to_owned(),
"Starting".to_owned(),
None,
);
overall_status
.set_with_description("Starting", State::Pending)
.await?;
self.status.update_description("Pre-cloning").await;
evaluation_strategy.pre_clone().await?;
@ -316,9 +318,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
.cloner
.project(&job.repo.full_name, job.repo.clone_url.clone());
overall_status
.set_with_description("Cloning project", State::Pending)
.await?;
self.status.update_description("Cloning project").await;
info!("Working on {}", job.change.number);
let co = project
@ -331,31 +331,26 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
None => String::from("master"),
};
// TODO: this is a preflight check, encode it as such.
if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") {
overall_status
.set_with_description(
"The branch you have targeted is a read-only mirror for channels. \
Please target release-* or master.",
State::Error,
)
.await?;
self.status.update_description("Pre-flight checking").await;
info!("PR targets a nixos-* or nixpkgs-* branch");
if !evaluation_strategy
.preflight_check(&target_branch, &mut self.status)
.await?
{
self.status.update_status(CheckRunState::Completed).await;
info!("Pre-flight check failed, skipping this job");
return Ok(Actions::skip(job));
};
}
self.status
.update_description(format!("Checking out {}", &target_branch).as_ref())
.await;
overall_status
.set_with_description(
format!("Checking out {}", &target_branch).as_ref(),
State::Pending,
)
.await?;
info!("Checking out target branch {}", &target_branch);
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
evaluation_strategy
.on_target_branch(Path::new(&refpath), &mut overall_status)
.on_target_branch(Path::new(&refpath), &mut self.status)
.await?;
let target_branch_rebuild_sniff_start = Instant::now();
@ -367,19 +362,21 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
))
.await;
self.events
.notify(Event::EvaluationDurationCount(target_branch))
.notify(Event::EvaluationDurationCount(target_branch.clone()))
.await;
overall_status
.set_with_description("Fetching PR", State::Pending)
.await?;
self.status.update_description("Fetching change").await;
co.fetch_change(&job.change).unwrap();
if !co.commit_exists(job.change.head_sha.as_ref()) {
overall_status
.set_with_description("Commit not found", State::Error)
.await?;
self.status
.update_status_with_description(
"Change's commit not found",
CheckRunState::Completed,
None,
)
.await;
info!("Commit {} doesn't exist", job.change.head_sha);
return Ok(Actions::skip(job));
@ -387,14 +384,16 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
evaluation_strategy.after_fetch(&co);
overall_status
.set_with_description("Merging PR", State::Pending)
.await?;
self.status
.update_description(
format!("Merging change in target branch {}", target_branch).as_ref(),
)
.await;
if co.merge_commit(job.change.head_sha.as_ref()).is_err() {
overall_status
.set_with_description("Failed to merge", State::Failure)
.await?;
self.status
.update_description("Failed to merge; executing merge conflict strategy")
.await;
info!("Failed to merge {}", job.change.head_sha);
@ -403,58 +402,47 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
return Ok(Actions::skip(job));
}
evaluation_strategy.after_merge(&mut overall_status).await?;
evaluation_strategy.after_merge(&mut self.status).await?;
info!("Got path: {:?}, building", refpath);
overall_status
.set_with_description("Beginning Evaluations", State::Pending)
.await?;
self.status
.update_description("Beginning Evaluations")
.await;
let mut all_good = true;
// TODO: conduct all checks in parallel and send them as soon as they are available.
for check in evaluation_strategy.evaluation_checks() {
let mut status = CommitStatus::new(
self.vcs_api.clone(),
job.repo.clone(),
job.change.head_sha.clone(),
format!("ofborg-eval-{}", check.name()),
check.cli_cmd(),
None,
);
status
.set(State::Pending)
.await
.expect("Failed to set status on eval strategy");
let state: State;
let gist_url: Option<String>;
let result: CheckResult;
match check.execute(Path::new(&refpath)).await {
Ok(_) => {
state = State::Success;
gist_url = None;
result = check.into_successful_result();
}
Err(mut out) => {
state = State::Failure;
gist_url = self
let gist_link = self
.make_pastebin(
chan,
&format!("[ofborg] Evaluation of {}", check.name()),
file_to_str(&mut out),
)
.await
.map(|pp| pp.uri);
.map(|pp| pp.uri)
.map(|url| Link {
url,
tooltip: Some("Details of this evaluation check".to_owned()),
primary: true,
icon: crate::vcs::generic::LinkIcon::History,
});
result =
check.into_failed_result(vec![gist_link].into_iter().flatten().collect());
}
}
status.set_url(gist_url);
status
.set(state)
.await
.expect("Failed to set status on eval strategy");
if state != State::Success {
if !result.is_successful() {
all_good = false;
}
self.status.add_result(result).send_results().await;
}
info!("Finished evaluations");
@ -462,7 +450,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
if all_good {
let complete = evaluation_strategy
.all_evaluations_passed(Path::new(&refpath), &mut overall_status)
.all_evaluations_passed(Path::new(&refpath), &mut self.status)
.await?;
self.vcs_api
@ -470,13 +458,17 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
.await;
response.extend(schedule_builds(complete.builds, &auto_schedule_build_archs));
overall_status
.set_with_description("^.^!", State::Success)
.await?;
self.status
.update_status_with_description("^.^!", CheckRunState::Completed, None)
.await;
} else {
overall_status
.set_with_description("Complete, with errors", State::Failure)
.await?;
self.status
.update_status_with_description(
"Complete, with errors",
CheckRunState::Completed,
None,
)
.await;
}
self.events.notify(Event::TaskEvaluationCheckComplete).await;

View file

@ -1,87 +0,0 @@
use std::sync::Arc;
use tracing::warn;
use crate::vcs::generic::State;
use super::generic::VersionControlSystemAPI;
pub struct CommitStatus {
api: Arc<dyn VersionControlSystemAPI>,
repo: crate::message::Repo,
sha: String,
context: String,
description: String,
url: String,
}
impl CommitStatus {
pub fn new(
api: Arc<dyn VersionControlSystemAPI>,
repo: crate::message::Repo,
sha: String,
context: String,
description: String,
url: Option<String>,
) -> CommitStatus {
let mut stat = CommitStatus {
api,
repo,
sha,
context,
description,
url: String::new(),
};
stat.set_url(url);
stat
}
pub fn set_url(&mut self, url: Option<String>) {
self.url = url.unwrap_or_default();
}
pub async fn set_with_description(
&mut self,
description: &str,
state: State,
) -> Result<(), CommitStatusError> {
self.set_description(description.to_owned());
self.set(state).await
}
pub fn set_description(&mut self, description: String) {
self.description = description;
}
pub async fn set(&self, state: State) -> Result<(), CommitStatusError> {
let desc = if self.description.len() >= 140 {
warn!(
"description is over 140 char; truncating: {:?}",
&self.description
);
self.description.chars().take(140).collect()
} else {
self.description.clone()
};
self.api
.create_commit_statuses(
&self.repo,
self.sha.clone(),
state,
self.context.clone(),
desc,
self.url.clone(),
)
.await
}
}
#[derive(Debug)]
pub enum CommitStatusError {
ExpiredCreds(()),
MissingSha(()),
Error(()),
}

View file

@ -1,133 +0,0 @@
//! Set of generic structures to abstract over a VCS in a richful way.
//! Not all VCS can represent the full set of states, so implementations
//! will have to downgrade richer values to the closest representation.
//!
//! Gerrit is the first-class supported model.
use futures_util::future::BoxFuture;
use serde::{Deserialize, Serialize};
use crate::message::{Change, Repo};
use super::commit_status::CommitStatusError;
pub enum IssueState {
Open,
Closed,
}
pub struct Account {
pub username: String,
}
pub struct Issue {
pub title: String,
pub number: u64,
pub repo: Repo,
pub state: IssueState,
pub created_by: Account,
}
pub struct Repository {}
pub struct ChangeReviewers {
pub entity_reviewers: Vec<String>,
pub team_reviewers: Vec<String>,
}
impl Issue {
#[must_use]
pub fn is_wip(&self) -> bool {
false
}
}
pub trait VersionControlSystemAPI: Sync + Send {
fn get_repository(&self, repo: &crate::message::Repo) -> Repository;
fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture<Vec<Change>>;
fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture<Option<Change>>;
fn get_issue(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<Result<Issue, String>>;
fn update_labels(
&self,
repo: &crate::message::Repo,
number: u64,
add: &[String],
remove: &[String],
) -> BoxFuture<()>;
fn get_existing_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<ChangeReviewers>;
fn request_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
entity_reviewers: Vec<String>,
team_reviewers: Vec<String>,
) -> BoxFuture<()>;
fn create_commit_statuses(
&self,
repo: &crate::message::Repo,
sha: String,
state: State,
context: String,
description: String,
target_url: String,
) -> BoxFuture<Result<(), CommitStatusError>>;
fn create_check_statuses(
&self,
repo: &crate::message::Repo,
checks: Vec<CheckRunOptions>,
) -> BoxFuture<()>;
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum CheckRunState {
Runnable,
Running,
Scheduled,
Completed,
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)]
#[serde(rename_all = "snake_case")]
pub enum State {
Pending,
Error,
Failure,
Success,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum Conclusion {
Skipped,
Success,
Failure,
Neutral,
Cancelled,
TimedOut,
ActionRequired,
}
#[derive(Debug, Serialize, PartialEq)]
pub struct CheckRunOptions {
pub name: String,
pub head_sha: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub details_url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub external_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status: Option<CheckRunState>,
#[serde(skip_serializing_if = "Option::is_none")]
pub started_at: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub conclusion: Option<Conclusion>,
#[serde(skip_serializing_if = "Option::is_none")]
pub completed_at: Option<String>,
}

View file

@ -0,0 +1,173 @@
//! Set of generic structures to abstract over a VCS in a richful way.
//! Not all VCS can represent the full set of states, so implementations
//! will have to downgrade richer values to the closest representation.
//!
//! Gerrit is the first-class supported model.
use futures_util::future::BoxFuture;
use crate::message::{Change, Repo};
use super::{commit_status::CommitStatusError, http::StatcheckHTTPApi, CheckRun, CheckRunState};
pub enum IssueState {
Open,
Closed,
}
pub struct Account {
pub username: String,
}
pub struct Issue {
pub title: String,
pub number: u64,
pub repo: Repo,
pub state: IssueState,
pub created_by: Account,
}
pub struct Repository {}
pub struct ChangeReviewers {
pub entity_reviewers: Vec<String>,
pub team_reviewers: Vec<String>,
}
impl Issue {
#[must_use]
pub fn is_wip(&self) -> bool {
false
}
}
pub trait MinimalVersionControlSystemAPI: Sync + Send {
fn get_repository(&self, repo: &crate::message::Repo) -> Repository;
fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture<Vec<Change>>;
fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture<Option<Change>>;
fn get_issue(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<Result<Issue, String>>;
fn update_labels(
&self,
repo: &crate::message::Repo,
number: u64,
add: &[String],
remove: &[String],
) -> BoxFuture<()>;
fn get_existing_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<ChangeReviewers>;
fn request_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
entity_reviewers: Vec<String>,
team_reviewers: Vec<String>,
) -> BoxFuture<()>;
}
pub trait VersionControlSystemAPI: Sync + Send + MinimalVersionControlSystemAPI {
fn create_commit_statuses(
&self,
repo: &crate::message::Repo,
sha: String,
state: CheckRunState,
context: String,
description: String,
target_url: String,
) -> BoxFuture<Result<(), CommitStatusError>>;
fn create_check_statuses(
&self,
repo: &crate::message::Repo,
checks: Vec<CheckRun>,
) -> BoxFuture<()>;
}
/// This is an augmented VCS API expanded by our internal status & checks API server.
/// This means that a VCS does not need to implement more than the basics to benefit automatically
/// from our own status & checks server.
/// You can still implement replication from our custom status & checks to your original VCS if you
/// want.
pub struct AugmentedVCSApi<A: MinimalVersionControlSystemAPI> {
pub minimal_api: A,
pub statcheck_api: StatcheckHTTPApi,
}
/// This is a forwarder implementation to `minimal_api`.
impl<A: MinimalVersionControlSystemAPI> MinimalVersionControlSystemAPI for AugmentedVCSApi<A> {
fn get_issue(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<Result<Issue, String>> {
self.minimal_api.get_issue(repo, number)
}
fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture<Option<Change>> {
self.minimal_api.get_change(repo, number)
}
fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture<Vec<Change>> {
self.minimal_api.get_changes(repo)
}
fn get_repository(&self, repo: &crate::message::Repo) -> Repository {
self.minimal_api.get_repository(repo)
}
fn update_labels(
&self,
repo: &crate::message::Repo,
number: u64,
add: &[String],
remove: &[String],
) -> BoxFuture<()> {
self.minimal_api.update_labels(repo, number, add, remove)
}
fn get_existing_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<ChangeReviewers> {
self.minimal_api.get_existing_reviewers(repo, number)
}
fn request_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
entity_reviewers: Vec<String>,
team_reviewers: Vec<String>,
) -> BoxFuture<()> {
self.minimal_api
.request_reviewers(repo, number, entity_reviewers, team_reviewers)
}
}
impl<A: MinimalVersionControlSystemAPI> VersionControlSystemAPI for AugmentedVCSApi<A> {
fn create_check_statuses(
&self,
_repo: &crate::message::Repo,
_checks: Vec<CheckRun>,
) -> BoxFuture<()> {
// Create all checks in parallel.
todo!();
}
fn create_commit_statuses(
&self,
_repo: &crate::message::Repo,
_sha: String,
_state: CheckRunState,
_context: String,
_description: String,
_target_url: String,
) -> BoxFuture<Result<(), CommitStatusError>> {
// Create the commit status.
todo!();
}
}

View file

@ -0,0 +1,176 @@
/// Change status is an evolution of the legacy CommitStatus control structure
/// which was really only tailored for simple usecases and GitHub.
use std::sync::Arc;
use chrono::NaiveDateTime;
use crate::message::Change;
use super::{CheckResult, CheckRunState, VersionControlSystemAPI};
/// This is a structure to control a specific check run and its results.
pub struct ChangeStatus {
// Internal information for the status server.
change: u64,
patchset: u64,
attempt: u64,
/// Global name of this check. Must be unique on a given change.
name: String,
/// Description of what this check does.
description: String,
/// A link to documentation about what does this specific check.
doc_link: Option<String>,
/// What label does this check affect? To help causality analysis on the frontend.
label_name: Option<String>,
/// Scheduling timestamp
scheduled_timestamp: Option<NaiveDateTime>,
/// Started timestamp
started_timestamp: Option<NaiveDateTime>,
/// Estimated finished timestamp or actual finished timestamp.
finished_timestamp: Option<NaiveDateTime>,
status: CheckRunState,
status_link: Option<String>,
results: Vec<CheckResult>,
}
impl ChangeStatus {}
/// Builder for ChangeStatus.
pub struct ChangeStatusBuilder {
change: u64,
patchset: u64,
attempt: u64,
name: String,
description: String,
doc_link: Option<String>,
status: CheckRunState,
status_link: Option<String>,
label_name: Option<String>,
scheduled_timestamp: Option<NaiveDateTime>,
finished_timestamp: Option<NaiveDateTime>,
started_timestamp: Option<NaiveDateTime>,
results: Vec<CheckResult>,
}
impl ChangeStatusBuilder {
pub fn name(mut self, name: &str) -> Self {
self.name = name.to_owned();
self
}
pub fn description(mut self, description: &str) -> Self {
self.description = description.to_owned();
self
}
pub fn doc_link(mut self, doc_link: Option<String>) -> Self {
self.doc_link = doc_link;
self
}
pub fn label_name(mut self, label_name: &str) -> Self {
self.label_name = Some(label_name.to_owned());
self
}
pub fn scheduled_timestamp(mut self, timestamp: Option<NaiveDateTime>) -> Self {
self.scheduled_timestamp = timestamp;
self
}
pub fn finished_timestamp(mut self, timestamp: Option<NaiveDateTime>) -> Self {
self.finished_timestamp = timestamp;
self
}
pub fn build(self) -> ChangeStatus {
ChangeStatus {
change: self.change,
patchset: self.patchset,
attempt: self.attempt,
name: self.name,
description: self.description,
doc_link: self.doc_link,
status: self.status,
status_link: self.status_link,
label_name: self.label_name,
scheduled_timestamp: self.scheduled_timestamp,
finished_timestamp: self.finished_timestamp,
started_timestamp: self.started_timestamp,
results: self.results,
}
}
}
impl ChangeStatus {
/// Create a new builder instance.
pub fn builder(change: &Change) -> ChangeStatusBuilder {
ChangeStatusBuilder {
change: change.number,
patchset: 0,
attempt: 0,
name: String::new(),
description: String::new(),
doc_link: None,
status: CheckRunState::Runnable,
status_link: None,
label_name: None,
scheduled_timestamp: None,
finished_timestamp: None,
started_timestamp: None,
results: Vec::new(),
}
}
/// This creates the change status over the API, making it possibly visible to VCSes.
pub async fn create(
&mut self,
api: Arc<dyn VersionControlSystemAPI>,
initial_state: CheckRunState,
) -> Self {
self.status = initial_state;
todo!();
}
pub async fn set_started(&self) {
// Update the started timestamp.
todo!();
}
pub async fn update_description(&self, description: &str) {
todo!();
}
/// This updates the current status of this check with a description.
pub async fn update_status_with_description(
&self,
description: &str,
status: CheckRunState,
link: Option<String>,
) {
todo!();
}
pub async fn set_status_link(&mut self, link: Option<String>) {
self.status_link = link;
todo!();
}
pub async fn update_status(&self, status: CheckRunState) {
todo!();
}
/// Add a result regarding this check, it does not get sent immediately.
/// Call [`send_results`] to send it.
pub fn add_result(&mut self, result: CheckResult) -> &mut Self {
self.results.push(result);
self
}
/// This sends the results via the API and make them visible to servers.
pub async fn send_results(&mut self) -> Self {
todo!();
}
}

View file

@ -1,94 +1,80 @@
use crate::vcs::generic::CheckRunState;
use serde::{Deserialize, Serialize};
/// Gerrit is used as the generic model for CI checks.
/// Downgrading those to your VCS is possible.
/// If your VCS is richer than Gerrit, feel free to discuss how to extend this.
/// Port from <https://gerrit.googlesource.com/gerrit/+/master/polygerrit-ui/app/api/checks.ts>
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)]
#[serde(rename_all = "UPPERCASE")]
enum RunStatus {
pub enum CheckRunState {
Runnable,
Running,
Scheduled,
Completed,
}
impl From<RunStatus> for CheckRunState {
fn from(value: RunStatus) -> Self {
match value {
RunStatus::Runnable => CheckRunState::Runnable,
RunStatus::Running => CheckRunState::Running,
RunStatus::Scheduled => CheckRunState::Scheduled,
RunStatus::Completed => CheckRunState::Completed,
}
}
}
impl From<CheckRunState> for RunStatus {
fn from(value: CheckRunState) -> Self {
match value {
CheckRunState::Runnable => Self::Runnable,
CheckRunState::Running => Self::Running,
CheckRunState::Scheduled => Self::Scheduled,
CheckRunState::Completed => Self::Completed,
}
}
}
#[allow(dead_code)]
#[derive(Debug, Serialize, PartialEq)]
struct CheckRun {
pub struct CheckRun {
#[serde(skip_serializing_if = "Option::is_none")]
change: Option<u64>,
pub change: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
patchset: Option<u64>,
pub patchset: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
attempt: Option<u64>,
pub attempt: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
external_id: Option<String>,
check_name: String,
pub external_id: Option<String>,
pub check_name: String,
#[serde(skip_serializing_if = "Option::is_none")]
check_description: Option<String>,
pub check_description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
check_link: Option<String>,
pub check_link: Option<String>,
// defaults to false
#[serde(skip_serializing_if = "Option::is_none")]
is_ai_powered: Option<bool>,
status: RunStatus,
pub is_ai_powered: Option<bool>,
pub status: CheckRunState,
#[serde(skip_serializing_if = "Option::is_none")]
status_description: Option<String>,
pub status_description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
status_link: Option<String>,
pub status_link: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
label_name: Option<String>,
pub label_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
scheduled_timestamp: Option<String>,
pub scheduled_timestamp: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
started_timestamp: Option<String>,
pub started_timestamp: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
finished_timestamp: Option<String>,
pub finished_timestamp: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
results: Vec<CheckResult>,
pub results: Vec<CheckResult>,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct CheckResult {
pub struct CheckResult {
#[serde(skip_serializing_if = "Option::is_none")]
external_id: Option<String>,
category: Category,
summary: String,
pub external_id: Option<String>,
pub category: Category,
pub summary: String,
#[serde(skip_serializing_if = "Option::is_none")]
message: Option<String>,
pub message: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
tags: Vec<Tag>,
pub tags: Vec<Tag>,
#[serde(skip_serializing_if = "Vec::is_empty")]
links: Vec<Link>,
pub links: Vec<Link>,
#[serde(skip_serializing_if = "Vec::is_empty")]
code_pointers: Vec<CodePointer>,
pub code_pointers: Vec<CodePointer>,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
impl CheckResult {
pub fn is_successful(&self) -> bool {
self.category == Category::Success
}
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)]
#[serde(rename_all = "UPPERCASE")]
enum Category {
pub enum Category {
Success,
Info,
Warning,
@ -97,7 +83,7 @@ enum Category {
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "UPPERCASE")]
enum TagColor {
pub enum TagColor {
Gray,
Yellow,
Pink,
@ -107,7 +93,7 @@ enum TagColor {
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Tag {
pub struct Tag {
name: String,
#[serde(skip_serializing_if = "Option::is_none")]
tooltip: Option<String>,
@ -116,23 +102,23 @@ struct Tag {
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Link {
url: String,
pub struct Link {
pub url: String,
#[serde(skip_serializing_if = "Option::is_none")]
tooltip: Option<String>,
primary: bool,
icon: LinkIcon,
pub tooltip: Option<String>,
pub primary: bool,
pub icon: LinkIcon,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct CodePointer {
pub struct CodePointer {
path: String,
range: CommentRange,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "UPPERCASE")]
enum LinkIcon {
pub enum LinkIcon {
External,
Image,
History,
@ -147,7 +133,7 @@ enum LinkIcon {
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct CommentRange {
pub struct CommentRange {
// 1-based
start_line: u64,
// 0-based

View file

@ -0,0 +1,18 @@
//! REST API bindings for our custom status & checks API server
//! It is tailored for Gerrit, this is on purpose. Gerrit is the most featureful VCS supported.
use super::{CheckResult, CheckRun};
pub struct StatcheckHTTPApi;
#[allow(dead_code)]
#[allow(clippy::unused_async)] // FIXME
impl StatcheckHTTPApi {
pub(crate) async fn create_new_check(&self, _check: CheckRun) -> Option<CheckRun> {
todo!();
}
pub(crate) async fn update_result(&self, _result: CheckResult) -> Option<()> {
todo!();
}
}

View file

@ -0,0 +1,10 @@
pub mod api;
pub mod change_status;
pub mod checks;
pub mod commit_status;
pub mod http;
pub use api::*;
pub use change_status::*;
pub use checks::*;
pub use commit_status::*;

View file

@ -0,0 +1,6 @@
use super::{http::GerritHTTPApi, ssh::GerritSSHApi};
pub struct GerritClient {
http_api: GerritHTTPApi,
ssh_api: GerritSSHApi,
}

View file

@ -3,37 +3,16 @@
use futures_util::FutureExt;
use crate::vcs::generic::VersionControlSystemAPI;
use crate::vcs::generic::MinimalVersionControlSystemAPI;
use super::{data_structures::Account, http::GerritHTTPApi};
impl VersionControlSystemAPI for GerritHTTPApi {
// The next three APIs are todo!() because they cannot be implemented in Gerrit.
impl MinimalVersionControlSystemAPI for GerritHTTPApi {
// The next API is todo!() because they cannot be implemented in Gerrit.
// Gerrit does not offer any way to get this information out.
// GerritHTTPApi needs to return something like Unsupported
// and we need to compose a GerritHTTPApi with a GerritForge which contains an implementation
// of check statuses and commit statuses and an issue tracker.
fn create_check_statuses(
&self,
_repo: &crate::message::Repo,
_checks: Vec<crate::vcs::generic::CheckRunOptions>,
) -> futures_util::future::BoxFuture<()> {
todo!();
}
fn create_commit_statuses(
&self,
_repo: &crate::message::Repo,
_sha: String,
_state: crate::vcs::generic::State,
_context: String,
_description: String,
_target_url: String,
) -> futures_util::future::BoxFuture<Result<(), crate::vcs::commit_status::CommitStatusError>>
{
todo!();
}
fn get_issue(
&self,
_repo: &crate::message::Repo,

View file

@ -1,4 +1,4 @@
pub mod checks;
//pub mod client;
pub mod data_structures;
pub mod http;
pub mod r#impl;

View file

@ -1,3 +1,2 @@
pub mod commit_status;
pub mod generic;
pub mod gerrit;

1
ofborg/src/web/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod statcheck;

View file

@ -0,0 +1,103 @@
use axum::{extract::Path, Json};
use crate::{message::Repo, vcs::generic::CheckRun};
/// This contains the web code for the status & checks server.
// TODO: how to do code reuse with the other structure that contains an API handle?
#[allow(dead_code)]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct CommitStatus {
repo: Repo,
sha: String,
context: String,
description: String,
url: String,
}
/// Handler for GET /changes/:change_id/statuses
pub async fn get_statuses(Path(_change_id): Path<String>) -> Json<Vec<CommitStatus>> {
// TODO: Retrieve the statuses from the data store
Json(vec![]) // Return an empty list for now
}
/// Handler for GET /changes/:change_id/statuses/:status_id
pub async fn get_status(
Path((_change_id, _status_id)): Path<(String, String)>,
) -> Json<CommitStatus> {
// TODO: Retrieve a specific status from the data store
Json(CommitStatus {
repo: Repo {
owner: "example".to_string(),
name: "repo".to_string(),
full_name: "example/repo".to_string(),
clone_url: "https://example.com/repo.git".to_string(),
},
sha: "example_sha".to_string(),
context: "example_context".to_string(),
description: "example_description".to_string(),
url: "https://example.com/status".to_string(),
})
}
/// Handler for PUT /changes/:change_id/statuses
pub async fn put_statuses(
Path(change_id): Path<String>,
Json(_payload): Json<CommitStatus>,
) -> Json<String> {
// TODO: Add the status to the data store
Json(format!("Added status for change_id {}", change_id))
}
/// Handler for PATCH /changes/:change_id/statuses/:status_id
pub async fn patch_status(
Path((change_id, status_id)): Path<(String, String)>,
Json(_payload): Json<CommitStatus>,
) -> Json<String> {
// TODO: Update the status in the data store
Json(format!(
"Updated status_id {} for change_id {}",
status_id, change_id
))
}
/// Handler for GET /changes/:change_id/versions/:version_id/checks/:check_id
pub async fn get_check(
Path((_c_id, _vid, _check_id)): Path<(String, String, String)>,
) -> Json<Option<CheckRun>> {
// use crate::models::schema::checks::dsl::*;
// use crate::models::statcheck::Check;
// let check = checks
// .filter(id.eq(check_id))
// .filter(change_id.eq(c_id))
// .filter(version.eq(vid))
// .limit(1)
// .select(Check::as_select())
// .load(connection)
// .expect("Error loading a check");
//
// TODO: Retrieve a specific check from the data store
Json(None)
}
/// Handler for PUT /changes/:change_id/checks
pub async fn put_checks(
Path(change_id): Path<String>,
Json(_payload): Json<CheckRun>,
) -> Json<String> {
// TODO: Add the check to the data store
Json(format!("Added check for change_id {}", change_id))
}
/// Handler for PATCH /changes/:change_id/checks/:check_id
pub async fn patch_check(
Path((change_id, check_id)): Path<(String, String)>,
Json(_payload): Json<CheckRun>,
) -> Json<String> {
// TODO: Update the check in the data store
Json(format!(
"Updated check_id {} for change_id {}",
check_id, change_id
))
}