Initial scaffold: web API (Bun/Elysia) + monitor (Rust/Tokio)
This commit is contained in:
commit
570222c7a9
|
|
@ -0,0 +1,8 @@
|
|||
# Web app + coordinator
|
||||
DATABASE_URL=postgres://pingql:pingql@localhost:5432/pingql
|
||||
MONITOR_TOKEN=changeme-use-a-random-secret
|
||||
|
||||
# Rust monitor
|
||||
COORDINATOR_URL=http://localhost:3000
|
||||
MONITOR_TOKEN=changeme-use-a-random-secret
|
||||
RUST_LOG=info
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
.env
|
||||
node_modules/
|
||||
dist/
|
||||
target/
|
||||
*.lock
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
# PingQL
|
||||
|
||||
Developer-friendly uptime monitoring. Works like a query — filter status, parse content, write custom checks.
|
||||
|
||||
## Apps
|
||||
|
||||
- **apps/web** — API, dashboard, and job coordinator (Bun + Elysia)
|
||||
- **apps/monitor** — Check runner (Rust + Tokio)
|
||||
- **cli** — CLI tool (Bun)
|
||||
|
||||
## Quick start
|
||||
|
||||
```bash
|
||||
bun install
|
||||
bun run dev # starts web app
|
||||
```
|
||||
|
||||
## Docs
|
||||
|
||||
Coming soon at pingql.com
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
[package]
|
||||
name = "pingql-monitor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
reqwest = { version = "0.12", features = ["json", "rustls-tls"], default-features = false }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
scraper = "0.21" # CSS selector / HTML parsing
|
||||
futures = "0.3"
|
||||
regex = "1"
|
||||
anyhow = "1"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
mod query;
|
||||
mod runner;
|
||||
mod types;
|
||||
|
||||
use anyhow::Result;
|
||||
use std::env;
|
||||
use tokio::time::{sleep, Duration};
|
||||
use tracing::{error, info};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(env::var("RUST_LOG").unwrap_or_else(|_| "info".into()))
|
||||
.init();
|
||||
|
||||
let coordinator_url = env::var("COORDINATOR_URL")
|
||||
.unwrap_or_else(|_| "http://localhost:3000".into());
|
||||
let monitor_token = env::var("MONITOR_TOKEN")
|
||||
.expect("MONITOR_TOKEN must be set");
|
||||
|
||||
info!("PingQL monitor starting, coordinator: {coordinator_url}");
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(Duration::from_secs(30))
|
||||
.user_agent("PingQL-Monitor/0.1")
|
||||
.build()?;
|
||||
|
||||
loop {
|
||||
match runner::fetch_and_run(&client, &coordinator_url, &monitor_token).await {
|
||||
Ok(n) => info!("Ran {n} checks"),
|
||||
Err(e) => error!("Check cycle failed: {e}"),
|
||||
}
|
||||
sleep(Duration::from_secs(10)).await;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,141 @@
|
|||
/// PingQL query evaluation against a check response.
|
||||
///
|
||||
/// Query shape (MongoDB-inspired):
|
||||
///
|
||||
/// Simple equality:
|
||||
/// { "status": 200 }
|
||||
///
|
||||
/// Operators:
|
||||
/// { "status": { "$eq": 200 } }
|
||||
/// { "status": { "$ne": 500 } }
|
||||
/// { "status": { "$gte": 200, "$lt": 300 } }
|
||||
/// { "body": { "$contains": "healthy" } }
|
||||
/// { "body": { "$regex": "ok|healthy" } }
|
||||
///
|
||||
/// CSS selector (HTML parsing):
|
||||
/// { "$select": "span.status", "$eq": "operational" }
|
||||
///
|
||||
/// Logical:
|
||||
/// { "$and": [ { "status": 200 }, { "body": { "$contains": "ok" } } ] }
|
||||
/// { "$or": [ { "status": 200 }, { "status": 204 } ] }
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use regex::Regex;
|
||||
use scraper::{Html, Selector};
|
||||
use serde_json::Value;
|
||||
|
||||
pub struct Response {
|
||||
pub status: u16,
|
||||
pub body: String,
|
||||
pub headers: std::collections::HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Returns true if `query` matches `response`. No query = always up.
|
||||
pub fn evaluate(query: &Value, response: &Response) -> Result<bool> {
|
||||
match query {
|
||||
Value::Object(map) => {
|
||||
// $and / $or
|
||||
if let Some(and) = map.get("$and") {
|
||||
let Value::Array(clauses) = and else { bail!("$and expects array") };
|
||||
return Ok(clauses.iter().all(|c| evaluate(c, response).unwrap_or(false)));
|
||||
}
|
||||
if let Some(or) = map.get("$or") {
|
||||
let Value::Array(clauses) = or else { bail!("$or expects array") };
|
||||
return Ok(clauses.iter().any(|c| evaluate(c, response).unwrap_or(false)));
|
||||
}
|
||||
// CSS selector shorthand: { "$select": "...", "$eq": "..." }
|
||||
if let Some(sel) = map.get("$select") {
|
||||
let sel_str = sel.as_str().unwrap_or("");
|
||||
let selected = css_select(&response.body, sel_str);
|
||||
if let Some(op_val) = map.get("$eq") {
|
||||
return Ok(selected.as_deref() == op_val.as_str());
|
||||
}
|
||||
if let Some(op_val) = map.get("$contains") {
|
||||
let needle = op_val.as_str().unwrap_or("");
|
||||
return Ok(selected.map(|s| s.contains(needle)).unwrap_or(false));
|
||||
}
|
||||
return Ok(selected.is_some());
|
||||
}
|
||||
// Field-level checks
|
||||
for (field, condition) in map {
|
||||
let field_val = resolve_field(field, response);
|
||||
if !eval_condition(condition, &field_val, response)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
_ => bail!("Query must be an object"),
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_field(field: &str, r: &Response) -> Value {
|
||||
match field {
|
||||
"status" | "status_code" => Value::Number(r.status.into()),
|
||||
"body" => Value::String(r.body.clone()),
|
||||
f if f.starts_with("headers.") => {
|
||||
let key = f.trim_start_matches("headers.").to_lowercase();
|
||||
r.headers.get(&key)
|
||||
.map(|v| Value::String(v.clone()))
|
||||
.unwrap_or(Value::Null)
|
||||
}
|
||||
_ => Value::Null,
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_condition(condition: &Value, field_val: &Value, response: &Response) -> Result<bool> {
|
||||
match condition {
|
||||
// Shorthand: { "status": 200 }
|
||||
Value::Number(n) => Ok(field_val.as_f64() == n.as_f64()),
|
||||
Value::String(s) => Ok(field_val.as_str() == Some(s.as_str())),
|
||||
Value::Bool(b) => Ok(field_val.as_bool() == Some(*b)),
|
||||
Value::Object(ops) => {
|
||||
for (op, val) in ops {
|
||||
let ok = match op.as_str() {
|
||||
"$eq" => field_val == val,
|
||||
"$ne" => field_val != val,
|
||||
"$gt" => cmp_num(field_val, val, |a,b| a > b),
|
||||
"$gte" => cmp_num(field_val, val, |a,b| a >= b),
|
||||
"$lt" => cmp_num(field_val, val, |a,b| a < b),
|
||||
"$lte" => cmp_num(field_val, val, |a,b| a <= b),
|
||||
"$contains" => {
|
||||
let needle = val.as_str().unwrap_or("");
|
||||
field_val.as_str().map(|s| s.contains(needle)).unwrap_or(false)
|
||||
}
|
||||
"$regex" => {
|
||||
let pattern = val.as_str().unwrap_or("");
|
||||
let re = Regex::new(pattern).unwrap_or_else(|_| Regex::new("$^").unwrap());
|
||||
field_val.as_str().map(|s| re.is_match(s)).unwrap_or(false)
|
||||
}
|
||||
"$select" => {
|
||||
// Nested: { "body": { "$select": "css", "$eq": "val" } }
|
||||
let sel_str = val.as_str().unwrap_or("");
|
||||
let selected = css_select(&response.body, sel_str);
|
||||
if let Some(eq_val) = ops.get("$eq") {
|
||||
selected.as_deref() == eq_val.as_str()
|
||||
} else {
|
||||
selected.is_some()
|
||||
}
|
||||
}
|
||||
_ => true, // unknown op — skip
|
||||
};
|
||||
if !ok { return Ok(false); }
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
_ => Ok(true),
|
||||
}
|
||||
}
|
||||
|
||||
fn cmp_num(a: &Value, b: &Value, f: impl Fn(f64, f64) -> bool) -> bool {
|
||||
match (a.as_f64(), b.as_f64()) {
|
||||
(Some(x), Some(y)) => f(x, y),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn css_select(html: &str, selector: &str) -> Option<String> {
|
||||
let doc = Html::parse_document(html);
|
||||
let sel = Selector::parse(selector).ok()?;
|
||||
doc.select(&sel).next().map(|el| el.text().collect::<String>().trim().to_string())
|
||||
}
|
||||
|
|
@ -0,0 +1,115 @@
|
|||
use crate::query::{self, Response};
|
||||
use crate::types::{CheckResult, Monitor};
|
||||
use anyhow::Result;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::HashMap;
|
||||
use std::time::Instant;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
/// Fetch due monitors from coordinator, run them, post results back.
|
||||
pub async fn fetch_and_run(
|
||||
client: &reqwest::Client,
|
||||
coordinator_url: &str,
|
||||
token: &str,
|
||||
) -> Result<usize> {
|
||||
// Fetch due monitors
|
||||
let monitors: Vec<Monitor> = client
|
||||
.get(format!("{coordinator_url}/internal/due"))
|
||||
.header("x-monitor-token", token)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
let n = monitors.len();
|
||||
if n == 0 { return Ok(0); }
|
||||
|
||||
// Run all checks concurrently
|
||||
let tasks: Vec<_> = monitors.into_iter().map(|monitor| {
|
||||
let client = client.clone();
|
||||
let coordinator_url = coordinator_url.to_string();
|
||||
let token = token.to_string();
|
||||
tokio::spawn(async move {
|
||||
let result = run_check(&client, &monitor).await;
|
||||
if let Err(e) = post_result(&client, &coordinator_url, &token, result).await {
|
||||
warn!("Failed to post result for {}: {e}", monitor.id);
|
||||
}
|
||||
})
|
||||
}).collect();
|
||||
|
||||
futures::future::join_all(tasks).await;
|
||||
Ok(n)
|
||||
}
|
||||
|
||||
async fn run_check(client: &reqwest::Client, monitor: &Monitor) -> CheckResult {
|
||||
let start = Instant::now();
|
||||
|
||||
let result = client.get(&monitor.url).send().await;
|
||||
let latency_ms = start.elapsed().as_millis() as u64;
|
||||
|
||||
match result {
|
||||
Err(e) => CheckResult {
|
||||
monitor_id: monitor.id.clone(),
|
||||
status_code: None,
|
||||
latency_ms: Some(latency_ms),
|
||||
up: false,
|
||||
error: Some(e.to_string()),
|
||||
meta: None,
|
||||
},
|
||||
Ok(resp) => {
|
||||
let status = resp.status().as_u16();
|
||||
let headers: HashMap<String, String> = resp.headers().iter()
|
||||
.filter_map(|(k, v)| Some((k.to_string(), v.to_str().ok()?.to_string())))
|
||||
.collect();
|
||||
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
|
||||
// Evaluate query if present
|
||||
let (up, query_error) = if let Some(q) = &monitor.query {
|
||||
let response = Response { status, body: body.clone(), headers: headers.clone() };
|
||||
match query::evaluate(q, &response) {
|
||||
Ok(result) => (result, None),
|
||||
Err(e) => {
|
||||
warn!("Query error for {}: {e}", monitor.id);
|
||||
// Fall back to status-based up/down
|
||||
(status < 400, Some(e.to_string()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Default: up if 2xx/3xx
|
||||
(status < 400, None)
|
||||
};
|
||||
|
||||
let meta = json!({
|
||||
"headers": headers,
|
||||
"body_preview": &body[..body.len().min(500)],
|
||||
});
|
||||
|
||||
debug!("{} → {status} {latency_ms}ms up={up}", monitor.url);
|
||||
|
||||
CheckResult {
|
||||
monitor_id: monitor.id.clone(),
|
||||
status_code: Some(status),
|
||||
latency_ms: Some(latency_ms),
|
||||
up,
|
||||
error: query_error,
|
||||
meta: Some(meta),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn post_result(
|
||||
client: &reqwest::Client,
|
||||
coordinator_url: &str,
|
||||
token: &str,
|
||||
result: CheckResult,
|
||||
) -> Result<()> {
|
||||
client
|
||||
.post(format!("{coordinator_url}/checks/ingest"))
|
||||
.header("x-monitor-token", token)
|
||||
.json(&result)
|
||||
.send()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Monitor {
|
||||
pub id: String,
|
||||
pub url: String,
|
||||
pub interval_s: i64,
|
||||
pub query: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct CheckResult {
|
||||
pub monitor_id: String,
|
||||
pub status_code: Option<u16>,
|
||||
pub latency_ms: Option<u64>,
|
||||
pub up: bool,
|
||||
pub error: Option<String>,
|
||||
pub meta: Option<Value>,
|
||||
}
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
# dependencies (bun install)
|
||||
node_modules
|
||||
|
||||
# output
|
||||
out
|
||||
dist
|
||||
*.tgz
|
||||
|
||||
# code coverage
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# logs
|
||||
logs
|
||||
_.log
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# caches
|
||||
.eslintcache
|
||||
.cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# IntelliJ based IDEs
|
||||
.idea
|
||||
|
||||
# Finder (MacOS) folder config
|
||||
.DS_Store
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
|
||||
Default to using Bun instead of Node.js.
|
||||
|
||||
- Use `bun <file>` instead of `node <file>` or `ts-node <file>`
|
||||
- Use `bun test` instead of `jest` or `vitest`
|
||||
- Use `bun build <file.html|file.ts|file.css>` instead of `webpack` or `esbuild`
|
||||
- Use `bun install` instead of `npm install` or `yarn install` or `pnpm install`
|
||||
- Use `bun run <script>` instead of `npm run <script>` or `yarn run <script>` or `pnpm run <script>`
|
||||
- Use `bunx <package> <command>` instead of `npx <package> <command>`
|
||||
- Bun automatically loads .env, so don't use dotenv.
|
||||
|
||||
## APIs
|
||||
|
||||
- `Bun.serve()` supports WebSockets, HTTPS, and routes. Don't use `express`.
|
||||
- `bun:sqlite` for SQLite. Don't use `better-sqlite3`.
|
||||
- `Bun.redis` for Redis. Don't use `ioredis`.
|
||||
- `Bun.sql` for Postgres. Don't use `pg` or `postgres.js`.
|
||||
- `WebSocket` is built-in. Don't use `ws`.
|
||||
- Prefer `Bun.file` over `node:fs`'s readFile/writeFile
|
||||
- Bun.$`ls` instead of execa.
|
||||
|
||||
## Testing
|
||||
|
||||
Use `bun test` to run tests.
|
||||
|
||||
```ts#index.test.ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("hello world", () => {
|
||||
expect(1).toBe(1);
|
||||
});
|
||||
```
|
||||
|
||||
## Frontend
|
||||
|
||||
Use HTML imports with `Bun.serve()`. Don't use `vite`. HTML imports fully support React, CSS, Tailwind.
|
||||
|
||||
Server:
|
||||
|
||||
```ts#index.ts
|
||||
import index from "./index.html"
|
||||
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/": index,
|
||||
"/api/users/:id": {
|
||||
GET: (req) => {
|
||||
return new Response(JSON.stringify({ id: req.params.id }));
|
||||
},
|
||||
},
|
||||
},
|
||||
// optional websocket support
|
||||
websocket: {
|
||||
open: (ws) => {
|
||||
ws.send("Hello, world!");
|
||||
},
|
||||
message: (ws, message) => {
|
||||
ws.send(message);
|
||||
},
|
||||
close: (ws) => {
|
||||
// handle close
|
||||
}
|
||||
},
|
||||
development: {
|
||||
hmr: true,
|
||||
console: true,
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
HTML files can import .tsx, .jsx or .js files directly and Bun's bundler will transpile & bundle automatically. `<link>` tags can point to stylesheets and Bun's CSS bundler will bundle.
|
||||
|
||||
```html#index.html
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello, world!</h1>
|
||||
<script type="module" src="./frontend.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
With the following `frontend.tsx`:
|
||||
|
||||
```tsx#frontend.tsx
|
||||
import React from "react";
|
||||
import { createRoot } from "react-dom/client";
|
||||
|
||||
// import .css files directly and it works
|
||||
import './index.css';
|
||||
|
||||
const root = createRoot(document.body);
|
||||
|
||||
export default function Frontend() {
|
||||
return <h1>Hello, world!</h1>;
|
||||
}
|
||||
|
||||
root.render(<Frontend />);
|
||||
```
|
||||
|
||||
Then, run index.ts
|
||||
|
||||
```sh
|
||||
bun --hot ./index.ts
|
||||
```
|
||||
|
||||
For more information, read the Bun API docs in `node_modules/bun-types/docs/**.mdx`.
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# web
|
||||
|
||||
To install dependencies:
|
||||
|
||||
```bash
|
||||
bun install
|
||||
```
|
||||
|
||||
To run:
|
||||
|
||||
```bash
|
||||
bun run index.ts
|
||||
```
|
||||
|
||||
This project was created using `bun init` in bun v1.3.10. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime.
|
||||
|
|
@ -0,0 +1 @@
|
|||
console.log("Hello via Bun!");
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"name": "@pingql/web",
|
||||
"version": "0.1.0",
|
||||
"scripts": {
|
||||
"dev": "bun run --hot src/index.ts",
|
||||
"start": "bun run src/index.ts",
|
||||
"build": "bun build src/index.ts --outdir dist"
|
||||
},
|
||||
"dependencies": {
|
||||
"elysia": "^1.4.27",
|
||||
"@elysiajs/cors": "^1.4.1",
|
||||
"@elysiajs/swagger": "^1.3.1",
|
||||
"postgres": "^3.4.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.3.10",
|
||||
"typescript": "^5.9.3"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
import postgres from "postgres";
|
||||
|
||||
const sql = postgres(process.env.DATABASE_URL ?? "postgres://pingql:pingql@localhost:5432/pingql");
|
||||
|
||||
export default sql;
|
||||
|
||||
// Run migrations on startup
|
||||
export async function migrate() {
|
||||
await sql`
|
||||
CREATE TABLE IF NOT EXISTS accounts (
|
||||
id TEXT PRIMARY KEY, -- random 16-digit key
|
||||
email_hash TEXT, -- optional, for recovery only
|
||||
created_at TIMESTAMPTZ DEFAULT now()
|
||||
)
|
||||
`;
|
||||
|
||||
await sql`
|
||||
CREATE TABLE IF NOT EXISTS monitors (
|
||||
id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text,
|
||||
account_id TEXT NOT NULL REFERENCES accounts(id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
interval_s INTEGER NOT NULL DEFAULT 60, -- check interval in seconds
|
||||
query JSONB, -- pingql query filter
|
||||
enabled BOOLEAN NOT NULL DEFAULT true,
|
||||
created_at TIMESTAMPTZ DEFAULT now()
|
||||
)
|
||||
`;
|
||||
|
||||
await sql`
|
||||
CREATE TABLE IF NOT EXISTS check_results (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
monitor_id TEXT NOT NULL REFERENCES monitors(id) ON DELETE CASCADE,
|
||||
checked_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
status_code INTEGER,
|
||||
latency_ms INTEGER,
|
||||
up BOOLEAN NOT NULL,
|
||||
error TEXT,
|
||||
meta JSONB -- headers, body snippet, etc.
|
||||
)
|
||||
`;
|
||||
|
||||
await sql`CREATE INDEX IF NOT EXISTS idx_results_monitor ON check_results(monitor_id, checked_at DESC)`;
|
||||
|
||||
console.log("DB ready");
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
import { Elysia } from "elysia";
|
||||
import { cors } from "@elysiajs/cors";
|
||||
import { swagger } from "@elysiajs/swagger";
|
||||
import { checks } from "./routes/checks";
|
||||
import { monitors } from "./routes/monitors";
|
||||
import { auth } from "./routes/auth";
|
||||
import { internal } from "./routes/internal";
|
||||
import { migrate } from "./db";
|
||||
|
||||
await migrate();
|
||||
|
||||
const app = new Elysia()
|
||||
.use(cors())
|
||||
.use(swagger({ path: "/docs", documentation: { info: { title: "PingQL API", version: "0.1.0" } } }))
|
||||
.get("/", () => ({ name: "PingQL", version: "0.1.0", docs: "/docs" }))
|
||||
.use(auth)
|
||||
.use(monitors)
|
||||
.use(checks)
|
||||
.use(internal)
|
||||
.listen(3000);
|
||||
|
||||
console.log(`PingQL running at http://localhost:${app.server?.port}`);
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
import { Elysia, t } from "elysia";
|
||||
import { randomBytes, createHash } from "crypto";
|
||||
import sql from "../db";
|
||||
|
||||
// Generate a memorable 16-digit account key: XXXX-XXXX-XXXX-XXXX
|
||||
function generateAccountKey(): string {
|
||||
const bytes = randomBytes(8);
|
||||
const hex = bytes.toString("hex").toUpperCase();
|
||||
return `${hex.slice(0, 4)}-${hex.slice(4, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}`;
|
||||
}
|
||||
|
||||
// Middleware: validate account key from Authorization header
|
||||
export function requireAuth(app: Elysia) {
|
||||
return app.derive(async ({ headers, error }) => {
|
||||
const key = headers["authorization"]?.replace("Bearer ", "").trim();
|
||||
if (!key) return error(401, { error: "Missing account key. Use: Authorization: Bearer <key>" });
|
||||
|
||||
const [account] = await sql`SELECT id FROM accounts WHERE id = ${key}`;
|
||||
if (!account) return error(401, { error: "Invalid account key" });
|
||||
|
||||
return { accountId: account.id };
|
||||
});
|
||||
}
|
||||
|
||||
export const auth = new Elysia({ prefix: "/auth" })
|
||||
// Create a new account — no email required
|
||||
.post("/register", async ({ body }) => {
|
||||
const key = generateAccountKey();
|
||||
const emailHash = body.email
|
||||
? createHash("sha256").update(body.email.toLowerCase().trim()).digest("hex")
|
||||
: null;
|
||||
|
||||
await sql`INSERT INTO accounts (id, email_hash) VALUES (${key}, ${emailHash})`;
|
||||
|
||||
return {
|
||||
key,
|
||||
message: "Save this key — it's your only credential. We don't store it.",
|
||||
...(body.email ? { email_registered: true } : { email_registered: false }),
|
||||
};
|
||||
}, {
|
||||
body: t.Object({
|
||||
email: t.Optional(t.String({ format: "email", description: "Optional. Only used for account recovery." })),
|
||||
}),
|
||||
detail: { summary: "Create account", tags: ["auth"] },
|
||||
});
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
import { Elysia } from "elysia";
|
||||
import { requireAuth } from "./auth";
|
||||
import sql from "../db";
|
||||
|
||||
export const checks = new Elysia({ prefix: "/checks" })
|
||||
.use(requireAuth)
|
||||
|
||||
// Get recent results for a monitor
|
||||
.get("/:monitorId", async ({ accountId, params, query, error }) => {
|
||||
// Verify ownership
|
||||
const [monitor] = await sql`
|
||||
SELECT id FROM monitors WHERE id = ${params.monitorId} AND account_id = ${accountId}
|
||||
`;
|
||||
if (!monitor) return error(404, { error: "Not found" });
|
||||
|
||||
const limit = Math.min(Number(query.limit ?? 100), 1000);
|
||||
return sql`
|
||||
SELECT * FROM check_results
|
||||
WHERE monitor_id = ${params.monitorId}
|
||||
ORDER BY checked_at DESC
|
||||
LIMIT ${limit}
|
||||
`;
|
||||
}, { detail: { summary: "Get check history", tags: ["checks"] } })
|
||||
|
||||
// Internal endpoint: monitor runner posts results here
|
||||
.post("/ingest", async ({ body, headers, error }) => {
|
||||
const token = headers["x-monitor-token"];
|
||||
if (token !== process.env.MONITOR_TOKEN) return error(401, { error: "Unauthorized" });
|
||||
|
||||
await sql`
|
||||
INSERT INTO check_results (monitor_id, status_code, latency_ms, up, error, meta)
|
||||
VALUES (
|
||||
${body.monitor_id},
|
||||
${body.status_code ?? null},
|
||||
${body.latency_ms ?? null},
|
||||
${body.up},
|
||||
${body.error ?? null},
|
||||
${body.meta ? sql.json(body.meta) : null}
|
||||
)
|
||||
`;
|
||||
return { ok: true };
|
||||
}, {
|
||||
detail: { summary: "Ingest check result (monitor runner only)", tags: ["internal"] },
|
||||
});
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
/// Internal endpoints used by the Rust monitor runner.
|
||||
/// Protected by MONITOR_TOKEN — not exposed to users.
|
||||
|
||||
import { Elysia } from "elysia";
|
||||
import sql from "../db";
|
||||
|
||||
export const internal = new Elysia({ prefix: "/internal" })
|
||||
.derive(({ headers, error }) => {
|
||||
if (headers["x-monitor-token"] !== process.env.MONITOR_TOKEN)
|
||||
return error(401, { error: "Unauthorized" });
|
||||
return {};
|
||||
})
|
||||
|
||||
// Returns monitors that are due for a check
|
||||
.get("/due", async () => {
|
||||
return sql`
|
||||
SELECT m.id, m.url, m.interval_s, m.query
|
||||
FROM monitors m
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT checked_at FROM check_results
|
||||
WHERE monitor_id = m.id
|
||||
ORDER BY checked_at DESC LIMIT 1
|
||||
) last ON true
|
||||
WHERE m.enabled = true
|
||||
AND (last.checked_at IS NULL
|
||||
OR last.checked_at < now() - (m.interval_s || ' seconds')::interval)
|
||||
`;
|
||||
});
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
import { Elysia, t } from "elysia";
|
||||
import { requireAuth } from "./auth";
|
||||
import sql from "../db";
|
||||
|
||||
const MonitorBody = t.Object({
|
||||
name: t.String({ description: "Human-readable name" }),
|
||||
url: t.String({ format: "uri", description: "URL to check" }),
|
||||
interval_s: t.Optional(t.Number({ minimum: 10, default: 60, description: "Check interval in seconds" })),
|
||||
query: t.Optional(t.Any({ description: "PingQL query — filter conditions for up/down" })),
|
||||
});
|
||||
|
||||
export const monitors = new Elysia({ prefix: "/monitors" })
|
||||
.use(requireAuth)
|
||||
|
||||
// List monitors
|
||||
.get("/", async ({ accountId }) => {
|
||||
return sql`SELECT * FROM monitors WHERE account_id = ${accountId} ORDER BY created_at DESC`;
|
||||
}, { detail: { summary: "List monitors", tags: ["monitors"] } })
|
||||
|
||||
// Create monitor
|
||||
.post("/", async ({ accountId, body }) => {
|
||||
const [monitor] = await sql`
|
||||
INSERT INTO monitors (account_id, name, url, interval_s, query)
|
||||
VALUES (${accountId}, ${body.name}, ${body.url}, ${body.interval_s ?? 60}, ${body.query ? sql.json(body.query) : null})
|
||||
RETURNING *
|
||||
`;
|
||||
return monitor;
|
||||
}, { body: MonitorBody, detail: { summary: "Create monitor", tags: ["monitors"] } })
|
||||
|
||||
// Get monitor + recent status
|
||||
.get("/:id", async ({ accountId, params, error }) => {
|
||||
const [monitor] = await sql`
|
||||
SELECT * FROM monitors WHERE id = ${params.id} AND account_id = ${accountId}
|
||||
`;
|
||||
if (!monitor) return error(404, { error: "Not found" });
|
||||
|
||||
const results = await sql`
|
||||
SELECT * FROM check_results WHERE monitor_id = ${params.id}
|
||||
ORDER BY checked_at DESC LIMIT 100
|
||||
`;
|
||||
return { ...monitor, results };
|
||||
}, { detail: { summary: "Get monitor with results", tags: ["monitors"] } })
|
||||
|
||||
// Update monitor
|
||||
.patch("/:id", async ({ accountId, params, body, error }) => {
|
||||
const [monitor] = await sql`
|
||||
UPDATE monitors SET
|
||||
name = COALESCE(${body.name ?? null}, name),
|
||||
url = COALESCE(${body.url ?? null}, url),
|
||||
interval_s = COALESCE(${body.interval_s ?? null}, interval_s),
|
||||
query = COALESCE(${body.query ? sql.json(body.query) : null}, query)
|
||||
WHERE id = ${params.id} AND account_id = ${accountId}
|
||||
RETURNING *
|
||||
`;
|
||||
if (!monitor) return error(404, { error: "Not found" });
|
||||
return monitor;
|
||||
}, { body: t.Partial(MonitorBody), detail: { summary: "Update monitor", tags: ["monitors"] } })
|
||||
|
||||
// Delete monitor
|
||||
.delete("/:id", async ({ accountId, params, error }) => {
|
||||
const [deleted] = await sql`
|
||||
DELETE FROM monitors WHERE id = ${params.id} AND account_id = ${accountId} RETURNING id
|
||||
`;
|
||||
if (!deleted) return error(404, { error: "Not found" });
|
||||
return { deleted: true };
|
||||
}, { detail: { summary: "Delete monitor", tags: ["monitors"] } })
|
||||
|
||||
// Toggle enabled
|
||||
.post("/:id/toggle", async ({ accountId, params, error }) => {
|
||||
const [monitor] = await sql`
|
||||
UPDATE monitors SET enabled = NOT enabled
|
||||
WHERE id = ${params.id} AND account_id = ${accountId}
|
||||
RETURNING id, enabled
|
||||
`;
|
||||
if (!monitor) return error(404, { error: "Not found" });
|
||||
return monitor;
|
||||
}, { detail: { summary: "Toggle monitor on/off", tags: ["monitors"] } });
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
// Environment setup & latest features
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "Preserve",
|
||||
"moduleDetection": "force",
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": true,
|
||||
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noImplicitOverride": true,
|
||||
|
||||
// Some stricter flags (disabled by default)
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"noPropertyAccessFromIndexSignature": false
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"name": "pingql",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"workspaces": ["apps/web", "cli"],
|
||||
"scripts": {
|
||||
"dev": "bun run --cwd apps/web dev",
|
||||
"build": "bun run --cwd apps/web build",
|
||||
"monitor": "cargo run --manifest-path apps/monitor/Cargo.toml"
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue