Examples

These examples demonstrate how to integrate MetricFlow into common web frameworks. Each example adds request counting and latency tracking middleware.

Flask (Python)

Add metrics middleware to a Flask application. This tracks request count and latency per endpoint.

from flask import Flask, request, g
import time
import metricflow

app = Flask(__name__)
mf = metricflow.init(exporter="prometheus", prometheus={"port": 9090})

http_requests = mf.counter(
    "http_requests_total",
    description="Total HTTP requests",
    labels=["method", "endpoint", "status"]
)
http_duration = mf.histogram(
    "http_request_duration_seconds",
    description="HTTP request latency",
    labels=["method", "endpoint"],
    buckets=[0.01, 0.05, 0.1, 0.25, 0.5, 1.0]
)

@app.before_request
def start_timer():
    g.start_time = time.monotonic()

@app.after_request
def record_metrics(response):
    duration = time.monotonic() - g.start_time
    http_requests.inc(
        method=request.method,
        endpoint=request.path,
        status=str(response.status_code)
    )
    http_duration.observe(
        duration,
        method=request.method,
        endpoint=request.path
    )
    return response

@app.route("/api/users")
def get_users():
    return {"users": ["alice", "bob"]}

@app.route("/api/health")
def health():
    return {"status": "ok"}

Go net/http

Wrap your HTTP handlers with a metrics middleware function.

package main

import (
    "net/http"
    "time"

    "github.com/metricflow/metricflow-go"
)

func main() {
    mf := metricflow.Init(metricflow.Config{
        Exporter: "prometheus",
        Prometheus: metricflow.PrometheusConfig{
            Port: 9090,
            Path: "/metrics",
        },
    })
    defer mf.Shutdown()

    httpRequests := mf.Counter("http_requests_total",
        metricflow.Labels("method", "path", "status"),
    )
    httpDuration := mf.Histogram("http_request_duration_seconds",
        metricflow.Labels("method", "path"),
        metricflow.Buckets(0.01, 0.05, 0.1, 0.5, 1.0),
    )

    handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
        start := time.Now()
        w.Write([]byte("Hello, World!"))
        duration := time.Since(start).Seconds()

        httpRequests.Inc(r.Method, r.URL.Path, "200")
        httpDuration.Observe(duration, r.Method, r.URL.Path)
    })

    http.Handle("/", handler)
    http.ListenAndServe(":8080", nil)
}

Node.js Express

Add metrics collection to an Express application using middleware.

const express = require("express");
const metricflow = require("@metricflow/sdk");

const app = express();
const mf = metricflow.init({
    exporter: "prometheus",
    prometheus: { port: 9090 }
});

const httpRequests = mf.counter("http_requests_total", {
    description: "Total HTTP requests",
    labels: ["method", "route", "status"]
});
const httpDuration = mf.histogram("http_request_duration_seconds", {
    description: "HTTP request latency",
    labels: ["method", "route"],
    buckets: [0.01, 0.05, 0.1, 0.25, 0.5, 1.0]
});

app.use((req, res, next) => {
    const start = Date.now();
    res.on("finish", () => {
        const duration = (Date.now() - start) / 1000;
        httpRequests.inc({
            method: req.method,
            route: req.route?.path || req.path,
            status: String(res.statusCode)
        });
        httpDuration.observe(duration, {
            method: req.method,
            route: req.route?.path || req.path
        });
    });
    next();
});

app.get("/api/users", (req, res) => {
    res.json({ users: ["alice", "bob"] });
});

app.listen(3000, () => {
    console.log("Server running on port 3000");
});