Compare commits

...

4 commits

Author SHA1 Message Date
raito 00c55f40bf chore: init diesel in the web project
Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2024-12-24 01:37:41 +01:00
raito 042e62d3dd feat(contrib/frontend/gerrit): design a simple status & check frontend for Gerrit
It uses imaginary APIs for now, but it's OK.

This has showed up a bunch of generalizations we will need in our own
API.

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2024-12-24 01:37:41 +01:00
raito 4b0cf86ef7 feat(statcheck): introduce status & checks server
This is a basic server that returns mocked data.

Next steps are persistence, client support in the rest of the code, etc.

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2024-12-24 01:37:41 +01:00
raito 14e79573d3 chore(devshell): add event streaming & VCS filter in the dev Procfile
Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2024-12-24 01:37:41 +01:00
18 changed files with 516 additions and 85 deletions

118
Cargo.lock generated
View file

@ -387,6 +387,7 @@ checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum-core", "axum-core",
"axum-macros",
"bytes", "bytes",
"futures-util", "futures-util",
"http 1.2.0", "http 1.2.0",
@ -434,6 +435,17 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "axum-macros"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57d123550fa8d071b7255cb0cc04dc302baa6c8c4a79f55701552684d8399bce"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.3.74" version = "0.3.74"
@ -734,6 +746,41 @@ dependencies = [
"typenum", "typenum",
] ]
[[package]]
name = "darling"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989"
dependencies = [
"darling_core",
"darling_macro",
]
[[package]]
name = "darling_core"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim",
"syn",
]
[[package]]
name = "darling_macro"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
dependencies = [
"darling_core",
"quote",
"syn",
]
[[package]] [[package]]
name = "data-encoding" name = "data-encoding"
version = "2.6.0" version = "2.6.0"
@ -796,6 +843,41 @@ dependencies = [
"cipher", "cipher",
] ]
[[package]]
name = "diesel"
version = "2.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf1bedf64cdb9643204a36dd15b19a6ce8e7aa7f7b105868e9f1fad5ffa7d12"
dependencies = [
"bitflags 2.6.0",
"byteorder",
"diesel_derives",
"itoa",
"pq-sys",
]
[[package]]
name = "diesel_derives"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7f2c3de51e2ba6bf2a648285696137aaf0f5f487bcbea93972fe8a364e131a4"
dependencies = [
"diesel_table_macro_syntax",
"dsl_auto_type",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "diesel_table_macro_syntax"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "209c735641a413bc68c4923a9d6ad4bcb3ca306b794edaa7eb0b3228a99ffb25"
dependencies = [
"syn",
]
[[package]] [[package]]
name = "digest" name = "digest"
version = "0.10.7" version = "0.10.7"
@ -845,6 +927,20 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "dsl_auto_type"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5d9abe6314103864cc2d8901b7ae224e0ab1a103a0a416661b4097b0779b607"
dependencies = [
"darling",
"either",
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "either" name = "either"
version = "1.13.0" version = "1.13.0"
@ -1514,6 +1610,12 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "ident_case"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]] [[package]]
name = "idna" name = "idna"
version = "1.0.3" version = "1.0.3"
@ -1876,6 +1978,7 @@ dependencies = [
"brace-expand", "brace-expand",
"chrono", "chrono",
"clap", "clap",
"diesel",
"either", "either",
"fs2", "fs2",
"futures", "futures",
@ -2297,6 +2400,15 @@ dependencies = [
"zerocopy", "zerocopy",
] ]
[[package]]
name = "pq-sys"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6cc05d7ea95200187117196eee9edd0644424911821aeb28a18ce60ea0b8793"
dependencies = [
"vcpkg",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.92" version = "1.0.92"
@ -3440,6 +3552,12 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]] [[package]]
name = "version_check" name = "version_check"
version = "0.1.5" version = "0.1.5"

View file

@ -1,3 +1,5 @@
amqp-server: rabbitmq-server amqp-server: rabbitmq-server
gerrit-event-streamer: cargo run --bin gerrit-event-streamer -- dev.config.json
gerrit-vcs-filter: cargo run --bin gerrit-generic-vcs-filter -- dev.config.json
pastebin-worker: cargo run --bin pastebin-worker -- dev.config.json pastebin-worker: cargo run --bin pastebin-worker -- dev.config.json
stats-worker: cargo run --bin stats -- dev.config.json stats-worker: cargo run --bin stats -- dev.config.json

88
contrib/checks-ofborg.js Normal file
View file

@ -0,0 +1,88 @@
/* Inspired from the Lix setup.
* Inspired from the Buildbot setup.
*
* Designed for OfBorg custom checks & server API.
* Original-Author: puckipedia
*/
Gerrit.install((plugin) => {
const serverInfo = plugin.serverInfo();
const { statcheck_base_uri, enabled_projects } = serverInfo.plugin;
const configuration = {
baseUri: statcheck_base_uri,
// TODO: use directly ofborg API for this.
supportedProjects: enabled_projects,
};
function makeStatcheckUri(suffix) {
return `${configuration.baseUri}/${suffix}`;
}
let checksProvider;
checksProvider = {
async fetch({ repo, patchsetSha, changeNumber, patchsetNumber }, runBefore = false) {
if (!configuration.supportedProjects.includes(repo)) {
return { responseCode: 'OK' };
}
let num = changeNumber.toString(10);
// Iterate over all check runs.
let checksFetch = await fetch(makeStatcheckUri(`changes/${num}/versions/${patchsetNumber}/checks`), { credentials: 'include' });
if (checksFetch.status === 400) {
if ((await checksFetch.json()).error === 'invalid origin' && !runBefore) {
return await checksProvider.fetch({ repo, patchsetSha, changeNumber, patchsetNumber }, true);
}
return { responseCode: 'OK' }
} else if (checksFetch.status === 403) {
console.warn(`Failed to fetch change '${changeNumber}' for authorization reasons, automatic login is still a WIP.`);
return { responseCode: 'NOT_LOGGED_IN', loginCallback() {
} };
}
let checks = await checksFetch.json();
if (checks.length === 0) {
return { responseCode: 'OK' };
}
let runs = [];
let links = [];
for (let check of checks) {
let checkrun = {
attempt: check.id,
checkName: check.name,
externalId: check.id,
status: check.status,
checkLink: null, // TODO: have a proper and nice web URI
labelName: 'Verified', // TODO: generalize what label a check affects.
results: [],
links: [], // TODO: have a proper web uri
};
if (check.started_at !== null) {
checkrun.startedTimestamp = new Date(check.started_at * 1000);
}
if (check.completed_at !== null) {
checkrun.finishedTimestamp = new Date(check.completed_at * 1000);
}
if (check.results !== null) {
checkrun.results = [
{
category: "SUCCESS",
summary: check.summary
}
];
}
runs.push(checkrun);
}
return { responseCode: 'OK', runs, links };
}
};
plugin.checks().register(checksProvider);
});

View file

@ -85,6 +85,8 @@
pkg-config pkg-config
rabbitmq-server rabbitmq-server
hivemind hivemind
diesel-cli
postgresql.dev
]; ];
postHook = '' postHook = ''
@ -117,5 +119,6 @@
RUST_BACKTRACE = "1"; RUST_BACKTRACE = "1";
RUST_LOG = "ofborg=debug"; RUST_LOG = "ofborg=debug";
NIX_PATH = "nixpkgs=${pkgs.path}"; NIX_PATH = "nixpkgs=${pkgs.path}";
DATABASE_URL = "postgres:///ofborg";
}; };
} }

View file

@ -11,7 +11,7 @@ edition = "2018"
[dependencies] [dependencies]
async-stream = "0.3.6" async-stream = "0.3.6"
async-trait = "0.1.83" async-trait = "0.1.83"
axum = "0.7.8" axum = { version = "0.7.8", features = ["macros"] }
base64 = "0.22.1" base64 = "0.22.1"
brace-expand = "0.1.0" brace-expand = "0.1.0"
chrono = "0.4.38" chrono = "0.4.38"
@ -51,3 +51,4 @@ tracing-opentelemetry = "0.28.0"
uuid = { version = "1.11", features = ["v4"] } uuid = { version = "1.11", features = ["v4"] }
zstd = "0.13.2" zstd = "0.13.2"
blake3 = { version = "1.5.5", features = ["digest"] } blake3 = { version = "1.5.5", features = ["digest"] }
diesel = { version = "2.2.6", features = ["postgres"] }

9
ofborg/diesel.toml Normal file
View file

@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/models/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "/home/raito/dev/git.lix.systems/the-distro/ofborg/ofborg/migrations"

View file

@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View file

@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View file

@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
DROP TABLE IF EXISTS "checks";

View file

@ -0,0 +1,10 @@
-- Your SQL goes here
CREATE TABLE "checks"(
"id" INT4 NOT NULL PRIMARY KEY,
"name" VARCHAR NOT NULL,
"status" VARCHAR NOT NULL,
"started_at" TIMESTAMP,
"completed_at" TIMESTAMP,
"summary" TEXT NOT NULL
);

View file

@ -0,0 +1,119 @@
use axum::{
extract::State,
routing::{get, put},
Json, Router,
};
use ofborg::config::Config;
use ofborg::web::statcheck;
use serde::Serialize;
use std::{env, net::SocketAddr, os::unix::io::FromRawFd, sync::Arc};
use tokio::net::TcpListener;
// --- Entry point ---
#[tokio::main]
async fn main() {
ofborg::setup_log();
let arg = env::args()
.nth(1)
.expect("usage: evaluation-filter <config>");
let cfg = ofborg::config::load(arg.as_ref());
let shared_config = Arc::new(cfg);
// Build the app router
let app = Router::new()
.route("/health", get(health_check))
.route("/config", get(config_check))
.route(
"/changes/:change_id/statuses",
get(statcheck::get_statuses).put(statcheck::put_statuses),
)
.route(
"/changes/:change_id/statuses/:status_id",
get(statcheck::get_status).patch(statcheck::patch_status),
)
.route(
"/changes/:change_id/checks/:check_id",
get(statcheck::get_check).patch(statcheck::patch_check),
)
.route("/changes/:change_id/checks", put(statcheck::put_checks))
.with_state(shared_config);
// Check for systemd socket activation
if let Some(listener) = get_systemd_listener() {
tracing::info!("Running with systemd socket activation");
axum::serve(listener, app.into_make_service())
.await
.expect("Failed to serve");
} else {
// Fallback to manual address for testing
let host = env::var("HOST").unwrap_or_else(|_| "127.0.0.1".to_string());
let port = env::var("PORT")
.unwrap_or_else(|_| "8000".to_string())
.parse::<u16>()
.expect("Invalid port number");
let addr = SocketAddr::new(host.parse().expect("Invalid host"), port);
tracing::info!("Running on http://{}", addr);
let listener = TcpListener::bind(addr)
.await
.expect("Failed to bind on the provided socket address");
axum::serve(listener, app)
.await
.expect("Failed to bind server");
}
}
// --- Route Handlers ---
#[derive(Serialize)]
struct HealthStatus {
status: String,
}
/// Health check endpoint
async fn health_check() -> Json<HealthStatus> {
Json(HealthStatus {
status: "OK".to_string(),
})
}
#[derive(Serialize)]
struct ConfigStatus {
version: String,
environment: String,
gerrit_instance: Option<String>,
// TODO: add ongoing_statuses as a simple counter?
}
/// Config endpoint
async fn config_check(State(config): State<Arc<Config>>) -> Json<ConfigStatus> {
Json(ConfigStatus {
version: env!("CARGO_PKG_VERSION").to_string(),
environment: "production".to_string(),
gerrit_instance: config.gerrit.as_ref().map(|g| g.instance_uri.clone()),
})
}
/// Try to retrieve a listener from systemd socket activation
fn get_systemd_listener() -> Option<tokio::net::TcpListener> {
if let Ok(listen_fds) = env::var("LISTEN_FDS") {
let listen_fds: i32 = listen_fds.parse().ok()?;
let fd_offset = 3; // File descriptors start at 3 in systemd
if listen_fds > 0 {
// Use the first systemd-provided file descriptor
let fd = fd_offset;
println!("Using systemd file descriptor: {}", fd);
unsafe {
let std_listener = std::net::TcpListener::from_raw_fd(fd);
std_listener.set_nonblocking(true).ok()?;
let listener = TcpListener::from_std(std_listener).ok()?;
return Some(listener);
}
}
}
None
}

View file

@ -1,83 +0,0 @@
/// Statuses and checks worker
/// - will keep a database of changes
/// - their statuses
/// - their checks
/// - is VCS/CI agnostic
use std::env;
use std::error::Error;
use ofborg::config;
use ofborg::easyamqp;
use ofborg::easyamqp::ChannelExt;
use ofborg::easyamqp::ConsumerExt;
use ofborg::easylapin;
use ofborg::tasks;
use tracing::info;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
ofborg::setup_log();
let arg = env::args()
.nth(1)
.expect("usage: statcheck-worker <config>");
let cfg = config::load(arg.as_ref());
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
let mut chan = conn.create_channel().await?;
// an RPC queue for verbs
let api_queue_name = "statcheck-api".to_owned();
// an event queue to be notified about statuses & checks changes.
let event_queue_name = "statcheck-events".to_owned();
chan.declare_exchange(easyamqp::ExchangeConfig {
exchange: api_queue_name.clone(),
exchange_type: easyamqp::ExchangeType::Topic,
passive: false,
durable: true,
auto_delete: false,
no_wait: false,
internal: false,
})
.await?;
chan.declare_queue(easyamqp::QueueConfig {
queue: api_queue_name.clone(),
passive: false,
durable: true,
exclusive: false,
auto_delete: false,
no_wait: false,
})
.await?;
chan.bind_queue(easyamqp::BindQueueConfig {
queue: api_queue_name.clone(),
exchange: api_queue_name.clone(),
routing_key: None,
no_wait: false,
})
.await?;
info!("Waiting for API calls on {}", api_queue_name);
info!("Notifying of new changes on {}", event_queue_name);
easylapin::WorkerChannel(chan)
.consume(
tasks::status_check_collector::StatusCheckCollector::new(cfg.statcheck.clone().db),
easyamqp::ConsumeConfig {
queue: api_queue_name.clone(),
consumer_tag: format!("{}-{}", cfg.whoami(), api_queue_name),
no_local: false,
no_ack: false,
no_wait: false,
exclusive: false,
},
)
.await?;
drop(conn); // Close connection.
info!("Closed the session... EOF");
Ok(())
}

View file

@ -37,6 +37,7 @@ pub mod ghevent;
pub mod locks; pub mod locks;
pub mod maintainers; pub mod maintainers;
pub mod message; pub mod message;
pub mod models;
pub mod nix; pub mod nix;
pub mod nixenv; pub mod nixenv;
pub mod nixstats; pub mod nixstats;
@ -49,6 +50,7 @@ pub mod tasks;
pub mod test_scratch; pub mod test_scratch;
pub mod utils; pub mod utils;
pub mod vcs; pub mod vcs;
pub mod web;
pub mod worker; pub mod worker;
pub mod writetoline; pub mod writetoline;
@ -74,6 +76,7 @@ pub mod ofborg {
pub use crate::tasks; pub use crate::tasks;
pub use crate::test_scratch; pub use crate::test_scratch;
pub use crate::vcs; pub use crate::vcs;
pub use crate::web;
pub use crate::worker; pub use crate::worker;
pub use crate::writetoline; pub use crate::writetoline;

1
ofborg/src/models/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod schema;

View file

@ -0,0 +1,12 @@
// @generated automatically by Diesel CLI.
diesel::table! {
checks (id) {
id -> Int4,
name -> Varchar,
status -> Varchar,
started_at -> Nullable<Timestamp>,
completed_at -> Nullable<Timestamp>,
summary -> Text,
}
}

View file

@ -114,7 +114,7 @@ pub enum Conclusion {
ActionRequired, ActionRequired,
} }
#[derive(Debug, Serialize, PartialEq)] #[derive(Debug, Serialize, Deserialize, PartialEq)]
pub struct CheckRunOptions { pub struct CheckRunOptions {
pub name: String, pub name: String,
pub head_sha: String, pub head_sha: String,

1
ofborg/src/web/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod statcheck;

View file

@ -0,0 +1,103 @@
use axum::{extract::Path, Json};
use crate::{
message::Repo,
vcs::generic::{CheckRunOptions, CheckRunState},
};
/// This contains the web code for the status & checks server.
// TODO: how to do code reuse with the other structure that contains an API handle?
#[allow(dead_code)]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct CommitStatus {
repo: Repo,
sha: String,
context: String,
description: String,
url: String,
}
/// Handler for GET /changes/:change_id/statuses
pub async fn get_statuses(Path(_change_id): Path<String>) -> Json<Vec<CommitStatus>> {
// TODO: Retrieve the statuses from the data store
Json(vec![]) // Return an empty list for now
}
/// Handler for GET /changes/:change_id/statuses/:status_id
pub async fn get_status(
Path((_change_id, _status_id)): Path<(String, String)>,
) -> Json<CommitStatus> {
// TODO: Retrieve a specific status from the data store
Json(CommitStatus {
repo: Repo {
owner: "example".to_string(),
name: "repo".to_string(),
full_name: "example/repo".to_string(),
clone_url: "https://example.com/repo.git".to_string(),
},
sha: "example_sha".to_string(),
context: "example_context".to_string(),
description: "example_description".to_string(),
url: "https://example.com/status".to_string(),
})
}
/// Handler for PUT /changes/:change_id/statuses
pub async fn put_statuses(
Path(change_id): Path<String>,
Json(_payload): Json<CommitStatus>,
) -> Json<String> {
// TODO: Add the status to the data store
Json(format!("Added status for change_id {}", change_id))
}
/// Handler for PATCH /changes/:change_id/statuses/:status_id
pub async fn patch_status(
Path((change_id, status_id)): Path<(String, String)>,
Json(_payload): Json<CommitStatus>,
) -> Json<String> {
// TODO: Update the status in the data store
Json(format!(
"Updated status_id {} for change_id {}",
status_id, change_id
))
}
/// Handler for GET /changes/:change_id/checks/:check_id
pub async fn get_check(
Path((_change_id, _check_id)): Path<(String, String)>,
) -> Json<CheckRunOptions> {
// TODO: Retrieve a specific check from the data store
Json(CheckRunOptions {
name: "example_check".to_string(),
head_sha: "example_sha".to_string(),
details_url: Some("https://example.com/details".to_string()),
external_id: Some("external_id".to_string()),
status: Some(CheckRunState::Running),
started_at: Some("2024-12-17T00:00:00Z".to_string()),
conclusion: None,
completed_at: None,
})
}
/// Handler for PUT /changes/:change_id/checks
pub async fn put_checks(
Path(change_id): Path<String>,
Json(_payload): Json<CheckRunOptions>,
) -> Json<String> {
// TODO: Add the check to the data store
Json(format!("Added check for change_id {}", change_id))
}
/// Handler for PATCH /changes/:change_id/checks/:check_id
pub async fn patch_check(
Path((change_id, check_id)): Path<(String, String)>,
Json(_payload): Json<CheckRunOptions>,
) -> Json<String> {
// TODO: Update the check in the data store
Json(format!(
"Updated check_id {} for change_id {}",
check_id, change_id
))
}