commit b06823e03e3bdd212fff28d23050bb7f1aaae37e Author: Matthew Knight Date: Mon Mar 2 13:39:10 2026 -0800 Initial commit: crash artifact aggregator and regression detection system Cairn is a web service for collecting, fingerprinting, and analyzing crash artifacts across repositories. Includes a CLI client, REST API, web dashboard, PostgreSQL storage with migrations, S3-compatible blob storage, multi-format crash parsers (ASan, GDB, Zig, generic), regression detection between commits, campaign tracking, and optional Forgejo integration for issue syncing. Co-Authored-By: Claude Opus 4.6 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ae5a6b9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +cairn-server +cairn +*.exe +*.test +*.out +.env diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e55fdf4 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +FROM golang:1.25-alpine AS builder + +WORKDIR /src +COPY go.mod go.sum ./ +RUN go mod download +COPY . . +RUN CGO_ENABLED=0 go build -o /cairn-server ./cmd/cairn-server + +FROM alpine:3.21 +RUN apk add --no-cache ca-certificates tzdata +COPY --from=builder /cairn-server /usr/local/bin/cairn-server +ENTRYPOINT ["cairn-server"] diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..0645417 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,38 @@ +services: + postgres: + image: postgres:16-alpine + environment: + POSTGRES_DB: cairn + POSTGRES_USER: cairn + POSTGRES_PASSWORD: cairn + ports: + - "5432:5432" + volumes: + - pgdata:/var/lib/postgresql/data + + minio: + image: minio/minio:latest + command: server /data --console-address ":9001" + environment: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + ports: + - "9000:9000" + - "9001:9001" + volumes: + - miniodata:/data + + minio-init: + image: minio/mc:latest + depends_on: + - minio + entrypoint: > + /bin/sh -c " + sleep 2; + mc alias set local http://minio:9000 minioadmin minioadmin; + mc mb --ignore-existing local/cairn-artifacts; + " + +volumes: + pgdata: + miniodata: diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..b8a7975 --- /dev/null +++ b/go.mod @@ -0,0 +1,57 @@ +module github.com/mattnite/cairn + +go 1.25.3 + +require ( + github.com/gin-gonic/gin v1.12.0 + github.com/jackc/pgx/v5 v5.8.0 + github.com/minio/minio-go/v7 v7.0.98 +) + +require ( + github.com/bytedance/gopkg v0.1.3 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect + github.com/cloudwego/base64x v0.1.6 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/gabriel-vasile/mimetype v1.4.12 // indirect + github.com/gin-contrib/sse v1.1.0 // indirect + github.com/go-ini/ini v1.67.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.30.1 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/goccy/go-yaml v1.19.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.18.2 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/klauspost/crc32 v1.3.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/minio/crc64nvme v1.1.1 // indirect + github.com/minio/md5-simd v1.1.2 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/philhofer/fwd v1.2.0 // indirect + github.com/quic-go/qpack v0.6.0 // indirect + github.com/quic-go/quic-go v0.59.0 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/rs/xid v1.6.0 // indirect + github.com/tinylib/msgp v1.6.1 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.3.1 // indirect + go.mongodb.org/mongo-driver/v2 v2.5.0 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect + golang.org/x/arch v0.22.0 // indirect + golang.org/x/crypto v0.48.0 // indirect + golang.org/x/net v0.51.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.41.0 // indirect + golang.org/x/text v0.34.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..af5a7b8 --- /dev/null +++ b/go.sum @@ -0,0 +1,133 @@ +github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= +github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= +github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw= +github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= +github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= +github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= +github.com/gin-gonic/gin v1.12.0 h1:b3YAbrZtnf8N//yjKeU2+MQsh2mY5htkZidOM7O0wG8= +github.com/gin-gonic/gin v1.12.0/go.mod h1:VxccKfsSllpKshkBWgVgRniFFAzFb9csfngsqANjnLc= +github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= +github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w= +github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM= +github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.8.0 h1:TYPDoleBBme0xGSAX3/+NujXXtpZn9HBONkQC7IEZSo= +github.com/jackc/pgx/v5 v5.8.0/go.mod h1:QVeDInX2m9VyzvNeiCJVjCkNFqzsNb43204HshNSZKw= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk= +github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4= +github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/klauspost/crc32 v1.3.0 h1:sSmTt3gUt81RP655XGZPElI0PelVTZ6YwCRnPSupoFM= +github.com/klauspost/crc32 v1.3.0/go.mod h1:D7kQaZhnkX/Y0tstFGf8VUzv2UofNGqCjnC3zdHB0Hw= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI= +github.com/minio/crc64nvme v1.1.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg= +github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= +github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= +github.com/minio/minio-go/v7 v7.0.98 h1:MeAVKjLVz+XJ28zFcuYyImNSAh8Mq725uNW4beRisi0= +github.com/minio/minio-go/v7 v7.0.98/go.mod h1:cY0Y+W7yozf0mdIclrttzo1Iiu7mEf9y7nk2uXqMOvM= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM= +github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8= +github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII= +github.com/quic-go/quic-go v0.59.0 h1:OLJkp1Mlm/aS7dpKgTc6cnpynnD2Xg7C1pwL6vy/SAw= +github.com/quic-go/quic-go v0.59.0/go.mod h1:upnsH4Ju1YkqpLXC305eW3yDZ4NfnNbmQRCMWS58IKU= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY= +github.com/tinylib/msgp v1.6.1/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77roAqEA= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY= +github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= +go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE= +go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +golang.org/x/arch v0.22.0 h1:c/Zle32i5ttqRXjdLyyHZESLD/bB90DCU1g9l/0YBDI= +golang.org/x/arch v0.22.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo= +golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= +golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/blob/blob.go b/internal/blob/blob.go new file mode 100644 index 0000000..355785f --- /dev/null +++ b/internal/blob/blob.go @@ -0,0 +1,12 @@ +package blob + +import ( + "context" + "io" +) + +type Store interface { + Put(ctx context.Context, key string, reader io.Reader, size int64) error + Get(ctx context.Context, key string) (io.ReadCloser, error) + Delete(ctx context.Context, key string) error +} diff --git a/internal/blob/s3.go b/internal/blob/s3.go new file mode 100644 index 0000000..6ca8017 --- /dev/null +++ b/internal/blob/s3.go @@ -0,0 +1,63 @@ +package blob + +import ( + "context" + "fmt" + "io" + + "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" +) + +type S3Store struct { + client *minio.Client + bucket string +} + +func NewS3Store(endpoint, accessKey, secretKey, bucket string, useSSL bool) (*S3Store, error) { + client, err := minio.New(endpoint, &minio.Options{ + Creds: credentials.NewStaticV4(accessKey, secretKey, ""), + Secure: useSSL, + }) + if err != nil { + return nil, fmt.Errorf("creating S3 client: %w", err) + } + + return &S3Store{client: client, bucket: bucket}, nil +} + +func (s *S3Store) EnsureBucket(ctx context.Context) error { + exists, err := s.client.BucketExists(ctx, s.bucket) + if err != nil { + return fmt.Errorf("checking bucket: %w", err) + } + if !exists { + if err := s.client.MakeBucket(ctx, s.bucket, minio.MakeBucketOptions{}); err != nil { + return fmt.Errorf("creating bucket: %w", err) + } + } + return nil +} + +func (s *S3Store) Put(ctx context.Context, key string, reader io.Reader, size int64) error { + _, err := s.client.PutObject(ctx, s.bucket, key, reader, size, minio.PutObjectOptions{}) + if err != nil { + return fmt.Errorf("uploading object %s: %w", key, err) + } + return nil +} + +func (s *S3Store) Get(ctx context.Context, key string) (io.ReadCloser, error) { + obj, err := s.client.GetObject(ctx, s.bucket, key, minio.GetObjectOptions{}) + if err != nil { + return nil, fmt.Errorf("getting object %s: %w", key, err) + } + return obj, nil +} + +func (s *S3Store) Delete(ctx context.Context, key string) error { + if err := s.client.RemoveObject(ctx, s.bucket, key, minio.RemoveObjectOptions{}); err != nil { + return fmt.Errorf("deleting object %s: %w", key, err) + } + return nil +} diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 0000000..6ad1931 --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,63 @@ +package config + +import ( + "fmt" + "os" + "strconv" +) + +type Config struct { + ListenAddr string + + DatabaseURL string + + S3Endpoint string + S3Bucket string + S3AccessKey string + S3SecretKey string + S3UseSSL bool + + ForgejoURL string + ForgejoToken string + ForgejoWebhookSecret string +} + +func Load() (*Config, error) { + c := &Config{ + ListenAddr: envOr("CAIRN_LISTEN_ADDR", ":8080"), + DatabaseURL: envOr("CAIRN_DATABASE_URL", "postgres://cairn:cairn@localhost:5432/cairn?sslmode=disable"), + S3Endpoint: envOr("CAIRN_S3_ENDPOINT", "localhost:9000"), + S3Bucket: envOr("CAIRN_S3_BUCKET", "cairn-artifacts"), + S3AccessKey: envOr("CAIRN_S3_ACCESS_KEY", "minioadmin"), + S3SecretKey: envOr("CAIRN_S3_SECRET_KEY", "minioadmin"), + S3UseSSL: envBool("CAIRN_S3_USE_SSL", false), + ForgejoURL: envOr("CAIRN_FORGEJO_URL", ""), + ForgejoToken: envOr("CAIRN_FORGEJO_TOKEN", ""), + ForgejoWebhookSecret: envOr("CAIRN_FORGEJO_WEBHOOK_SECRET", ""), + } + + if c.DatabaseURL == "" { + return nil, fmt.Errorf("CAIRN_DATABASE_URL is required") + } + + return c, nil +} + +func envOr(key, fallback string) string { + if v := os.Getenv(key); v != "" { + return v + } + return fallback +} + +func envBool(key string, fallback bool) bool { + v := os.Getenv(key) + if v == "" { + return fallback + } + b, err := strconv.ParseBool(v) + if err != nil { + return fallback + } + return b +} diff --git a/internal/database/database.go b/internal/database/database.go new file mode 100644 index 0000000..953948b --- /dev/null +++ b/internal/database/database.go @@ -0,0 +1,27 @@ +package database + +import ( + "context" + "fmt" + + "github.com/jackc/pgx/v5/pgxpool" +) + +func Connect(ctx context.Context, databaseURL string) (*pgxpool.Pool, error) { + config, err := pgxpool.ParseConfig(databaseURL) + if err != nil { + return nil, fmt.Errorf("parsing database URL: %w", err) + } + + pool, err := pgxpool.NewWithConfig(ctx, config) + if err != nil { + return nil, fmt.Errorf("creating connection pool: %w", err) + } + + if err := pool.Ping(ctx); err != nil { + pool.Close() + return nil, fmt.Errorf("pinging database: %w", err) + } + + return pool, nil +} diff --git a/internal/database/migrate.go b/internal/database/migrate.go new file mode 100644 index 0000000..f64ac8e --- /dev/null +++ b/internal/database/migrate.go @@ -0,0 +1,83 @@ +package database + +import ( + "context" + "embed" + "fmt" + "io/fs" + "log" + "sort" + "strings" + + "github.com/jackc/pgx/v5/pgxpool" +) + +//go:embed migrations/*.sql +var migrationsFS embed.FS + +func Migrate(ctx context.Context, pool *pgxpool.Pool) error { + _, err := pool.Exec(ctx, ` + CREATE TABLE IF NOT EXISTS schema_migrations ( + version TEXT PRIMARY KEY, + applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ) + `) + if err != nil { + return fmt.Errorf("creating migrations table: %w", err) + } + + entries, err := fs.ReadDir(migrationsFS, "migrations") + if err != nil { + return fmt.Errorf("reading migrations directory: %w", err) + } + + // Sort by filename to ensure order. + sort.Slice(entries, func(i, j int) bool { + return entries[i].Name() < entries[j].Name() + }) + + for _, entry := range entries { + if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".sql") { + continue + } + + version := strings.TrimSuffix(entry.Name(), ".sql") + + var exists bool + err := pool.QueryRow(ctx, "SELECT EXISTS(SELECT 1 FROM schema_migrations WHERE version = $1)", version).Scan(&exists) + if err != nil { + return fmt.Errorf("checking migration %s: %w", version, err) + } + if exists { + continue + } + + sql, err := migrationsFS.ReadFile("migrations/" + entry.Name()) + if err != nil { + return fmt.Errorf("reading migration %s: %w", version, err) + } + + tx, err := pool.Begin(ctx) + if err != nil { + return fmt.Errorf("beginning transaction for %s: %w", version, err) + } + + if _, err := tx.Exec(ctx, string(sql)); err != nil { + tx.Rollback(ctx) + return fmt.Errorf("executing migration %s: %w", version, err) + } + + if _, err := tx.Exec(ctx, "INSERT INTO schema_migrations (version) VALUES ($1)", version); err != nil { + tx.Rollback(ctx) + return fmt.Errorf("recording migration %s: %w", version, err) + } + + if err := tx.Commit(ctx); err != nil { + return fmt.Errorf("committing migration %s: %w", version, err) + } + + log.Printf("Applied migration: %s", version) + } + + return nil +} diff --git a/internal/database/migrations/001_initial.sql b/internal/database/migrations/001_initial.sql new file mode 100644 index 0000000..4d6c60b --- /dev/null +++ b/internal/database/migrations/001_initial.sql @@ -0,0 +1,56 @@ +CREATE EXTENSION IF NOT EXISTS pgcrypto; + +CREATE TABLE repositories ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name TEXT NOT NULL UNIQUE, + owner TEXT NOT NULL, + forgejo_url TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE TABLE commits ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + repository_id UUID NOT NULL REFERENCES repositories(id), + sha TEXT NOT NULL, + author TEXT, + message TEXT, + branch TEXT, + committed_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE (repository_id, sha) +); + +CREATE INDEX idx_commits_repo_sha ON commits (repository_id, sha); + +CREATE TABLE builds ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + repository_id UUID NOT NULL REFERENCES repositories(id), + commit_id UUID NOT NULL REFERENCES commits(id), + builder TEXT, + build_flags TEXT, + tags JSONB DEFAULT '{}', + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_builds_commit ON builds (commit_id); + +CREATE TABLE artifacts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + repository_id UUID NOT NULL REFERENCES repositories(id), + commit_id UUID NOT NULL REFERENCES commits(id), + build_id UUID REFERENCES builds(id), + type TEXT NOT NULL, + blob_key TEXT NOT NULL, + blob_size BIGINT NOT NULL, + crash_message TEXT, + stack_trace TEXT, + tags JSONB DEFAULT '{}', + metadata JSONB DEFAULT '{}', + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_artifacts_repo ON artifacts (repository_id); +CREATE INDEX idx_artifacts_commit ON artifacts (commit_id); +CREATE INDEX idx_artifacts_type ON artifacts (type); +CREATE INDEX idx_artifacts_created ON artifacts (created_at DESC); diff --git a/internal/database/migrations/002_crash_groups.sql b/internal/database/migrations/002_crash_groups.sql new file mode 100644 index 0000000..2a05088 --- /dev/null +++ b/internal/database/migrations/002_crash_groups.sql @@ -0,0 +1,56 @@ +CREATE TABLE crash_signatures ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + repository_id UUID NOT NULL REFERENCES repositories(id), + fingerprint TEXT NOT NULL, + sample_trace TEXT, + first_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + last_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + occurrence_count INT NOT NULL DEFAULT 1, + UNIQUE (repository_id, fingerprint) +); + +CREATE INDEX idx_crash_signatures_repo ON crash_signatures (repository_id); +CREATE INDEX idx_crash_signatures_last_seen ON crash_signatures (last_seen_at DESC); + +CREATE TABLE crash_groups ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + crash_signature_id UUID NOT NULL UNIQUE REFERENCES crash_signatures(id), + repository_id UUID NOT NULL REFERENCES repositories(id), + title TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'open', + forgejo_issue_id INT, + forgejo_issue_url TEXT, + first_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + last_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_crash_groups_repo ON crash_groups (repository_id); +CREATE INDEX idx_crash_groups_status ON crash_groups (status); +CREATE INDEX idx_crash_groups_sig ON crash_groups (crash_signature_id); + +ALTER TABLE artifacts ADD COLUMN signature_id UUID REFERENCES crash_signatures(id); +ALTER TABLE artifacts ADD COLUMN fingerprint TEXT; + +CREATE INDEX idx_artifacts_signature ON artifacts (signature_id); +CREATE INDEX idx_artifacts_fingerprint ON artifacts (fingerprint); + +-- Full-text search support +ALTER TABLE artifacts ADD COLUMN search_vector tsvector; + +CREATE INDEX idx_artifacts_search ON artifacts USING GIN (search_vector); + +CREATE OR REPLACE FUNCTION artifacts_search_update() RETURNS trigger AS $$ +BEGIN + NEW.search_vector := + setweight(to_tsvector('english', COALESCE(NEW.crash_message, '')), 'A') || + setweight(to_tsvector('english', COALESCE(NEW.stack_trace, '')), 'B') || + setweight(to_tsvector('english', COALESCE(NEW.type, '')), 'C'); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER artifacts_search_trigger + BEFORE INSERT OR UPDATE ON artifacts + FOR EACH ROW EXECUTE FUNCTION artifacts_search_update(); diff --git a/internal/database/migrations/003_campaigns.sql b/internal/database/migrations/003_campaigns.sql new file mode 100644 index 0000000..1d7c763 --- /dev/null +++ b/internal/database/migrations/003_campaigns.sql @@ -0,0 +1,18 @@ +CREATE TABLE campaigns ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + repository_id UUID NOT NULL REFERENCES repositories(id), + name TEXT NOT NULL, + type TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'running', + started_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + finished_at TIMESTAMPTZ, + tags JSONB DEFAULT '{}', + metadata JSONB DEFAULT '{}', + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_campaigns_repo ON campaigns (repository_id); +CREATE INDEX idx_campaigns_status ON campaigns (status); + +ALTER TABLE artifacts ADD COLUMN campaign_id UUID REFERENCES campaigns(id); +CREATE INDEX idx_artifacts_campaign ON artifacts (campaign_id); diff --git a/internal/fingerprint/fingerprint.go b/internal/fingerprint/fingerprint.go new file mode 100644 index 0000000..32d3cc7 --- /dev/null +++ b/internal/fingerprint/fingerprint.go @@ -0,0 +1,70 @@ +package fingerprint + +// Frame represents a single stack frame parsed from a crash report. +type Frame struct { + Index int + Address string + Function string + File string + Line int + Module string + Inline bool +} + +// NormalizedFrame is a frame after normalization for stable fingerprinting. +type NormalizedFrame struct { + Function string + File string +} + +// Result contains the output of the fingerprinting pipeline. +type Result struct { + Fingerprint string + Frames []Frame + Normalized []NormalizedFrame + Parser string +} + +// Compute runs the full fingerprinting pipeline on raw crash text: +// parse -> normalize -> hash. +func Compute(raw string) *Result { + frames, parser := Parse(raw) + if len(frames) == 0 { + return nil + } + + normalized := Normalize(frames) + if len(normalized) == 0 { + return nil + } + + fp := Hash(normalized) + + return &Result{ + Fingerprint: fp, + Frames: frames, + Normalized: normalized, + Parser: parser, + } +} + +// Parse tries each parser in priority order and returns the first successful result. +func Parse(raw string) ([]Frame, string) { + parsers := []struct { + name string + fn func(string) []Frame + }{ + {"asan", ParseASan}, + {"gdb", ParseGDB}, + {"zig", ParseZig}, + {"generic", ParseGeneric}, + } + + for _, p := range parsers { + frames := p.fn(raw) + if len(frames) > 0 { + return frames, p.name + } + } + return nil, "" +} diff --git a/internal/fingerprint/fingerprint_test.go b/internal/fingerprint/fingerprint_test.go new file mode 100644 index 0000000..5990745 --- /dev/null +++ b/internal/fingerprint/fingerprint_test.go @@ -0,0 +1,170 @@ +package fingerprint + +import ( + "testing" +) + +const asanTrace = `==12345==ERROR: AddressSanitizer: heap-buffer-overflow on address 0x602000000014 +READ of size 4 at 0x602000000014 thread T0 + #0 0x55a3b4c2d123 in vulnerable_func /home/user/project/src/parser.c:42:13 + #1 0x55a3b4c2e456 in process_input /home/user/project/src/main.c:108:5 + #2 0x55a3b4c2f789 in main /home/user/project/src/main.c:210:12 + #3 0x7f1234567890 in __libc_start_main /build/glibc/csu/../csu/libc-start.c:308:16 + #4 0x55a3b4c2a000 in _start (/home/user/project/build/app+0x2a000) +` + +const asanTrace2 = `==99999==ERROR: AddressSanitizer: heap-buffer-overflow on address 0xbeefcafe0014 +READ of size 4 at 0xbeefcafe0014 thread T0 + #0 0xdeadbeef1234 in vulnerable_func /different/path/to/parser.c:99:13 + #1 0xdeadbeef5678 in process_input /different/path/to/main.c:200:5 + #2 0xdeadbeef9abc in main /different/path/to/main.c:300:12 + #3 0x7fabcdef0000 in __libc_start_main /build/glibc/csu/../csu/libc-start.c:308:16 +` + +func TestASanParser(t *testing.T) { + frames := ParseASan(asanTrace) + if len(frames) == 0 { + t.Fatal("expected frames from ASan trace") + } + if frames[0].Function != "vulnerable_func" { + t.Errorf("expected function 'vulnerable_func', got %q", frames[0].Function) + } + if frames[0].Line != 42 { + t.Errorf("expected line 42, got %d", frames[0].Line) + } +} + +func TestASanFingerprint_StableAcrossAddressesAndPaths(t *testing.T) { + r1 := Compute(asanTrace) + r2 := Compute(asanTrace2) + + if r1 == nil || r2 == nil { + t.Fatal("expected non-nil results") + } + + if r1.Fingerprint != r2.Fingerprint { + t.Errorf("fingerprints should match across ASLR/path changes:\n %s\n %s", r1.Fingerprint, r2.Fingerprint) + } +} + +func TestASanFingerprint_DifferentFunctions(t *testing.T) { + different := `==12345==ERROR: AddressSanitizer: heap-use-after-free + #0 0x55a3b4c2d123 in other_function /home/user/project/src/parser.c:42:13 + #1 0x55a3b4c2e456 in process_input /home/user/project/src/main.c:108:5 +` + r1 := Compute(asanTrace) + r2 := Compute(different) + + if r1 == nil || r2 == nil { + t.Fatal("expected non-nil results") + } + + if r1.Fingerprint == r2.Fingerprint { + t.Error("fingerprints should differ for different stack traces") + } +} + +const gdbTrace = `#0 crash_here (ptr=0x0) at /home/user/src/crash.c:15 +#1 0x00005555555551a0 in process_data (buf=0x7fffffffe000, len=1024) at /home/user/src/process.c:89 +#2 0x0000555555555300 in main (argc=2, argv=0x7fffffffe1a8) at /home/user/src/main.c:42 +#3 0x00007ffff7c29d90 in __libc_start_call_main (main=0x555555555280, argc=2, argv=0x7fffffffe1a8) at ../sysdeps/nptl/libc_start_call_main.h:58 +` + +func TestGDBParser(t *testing.T) { + frames := ParseGDB(gdbTrace) + if len(frames) == 0 { + t.Fatal("expected frames from GDB trace") + } + if frames[0].Function != "crash_here" { + t.Errorf("expected function 'crash_here', got %q", frames[0].Function) + } +} + +const zigTrace = `thread 1 panic: index out of bounds +/home/user/src/parser.zig:42:13: 0x20da40 in parse (parser) +/home/user/src/main.zig:108:5: 0x20e100 in main (main) +/usr/lib/zig/std/start.zig:614:22: 0x20f000 in std.start.callMain (main) +` + +func TestZigParser(t *testing.T) { + frames := ParseZig(zigTrace) + if len(frames) == 0 { + t.Fatal("expected frames from Zig trace") + } + if frames[0].Function != "parse" { + t.Errorf("expected function 'parse', got %q", frames[0].Function) + } + if frames[0].Line != 42 { + t.Errorf("expected line 42, got %d", frames[0].Line) + } +} + +func TestNormalization_StripsRuntimeFrames(t *testing.T) { + r := Compute(asanTrace) + if r == nil { + t.Fatal("expected non-nil result") + } + for _, nf := range r.Normalized { + if nf.Function == "__libc_start_main" || nf.Function == "_start" { + t.Errorf("runtime frame should have been filtered: %q", nf.Function) + } + } +} + +func TestNormalization_StripsPathsToFilename(t *testing.T) { + r := Compute(asanTrace) + if r == nil { + t.Fatal("expected non-nil result") + } + for _, nf := range r.Normalized { + if nf.File != "" && nf.File != "parser.c" && nf.File != "main.c" { + t.Errorf("expected bare filename, got %q", nf.File) + } + } +} + +func TestNormalization_MaxFrames(t *testing.T) { + // Build a trace with many frames. + raw := "==1==ERROR: AddressSanitizer: stack-overflow\n" + for i := 0; i < 20; i++ { + raw += " #" + itoa(i) + " 0xdead in func_" + itoa(i) + " /a/b/c.c:1:1\n" + } + r := Compute(raw) + if r == nil { + t.Fatal("expected non-nil result") + } + if len(r.Normalized) > maxFrames { + t.Errorf("expected at most %d frames, got %d", maxFrames, len(r.Normalized)) + } +} + +func itoa(i int) string { + return string(rune('0'+i/10)) + string(rune('0'+i%10)) +} + +func TestNormalization_StripsCppTemplates(t *testing.T) { + frames := []Frame{ + {Function: "std::vector>::push_back", File: "vector.h", Index: 0}, + {Function: "MyClass::process", File: "myclass.h", Index: 1}, + } + normalized := Normalize(frames) + for _, nf := range normalized { + if nf.Function == "std::vector>::push_back" { + t.Error("template params should be stripped") + } + } +} + +func TestGenericParser(t *testing.T) { + raw := `at do_something (util.c:42) +at process (main.c:108) +at run (runner.c:15) +` + frames := ParseGeneric(raw) + if len(frames) < 2 { + t.Fatalf("expected at least 2 frames, got %d", len(frames)) + } + if frames[0].Function != "do_something" { + t.Errorf("expected 'do_something', got %q", frames[0].Function) + } +} diff --git a/internal/fingerprint/hash.go b/internal/fingerprint/hash.go new file mode 100644 index 0000000..480775a --- /dev/null +++ b/internal/fingerprint/hash.go @@ -0,0 +1,18 @@ +package fingerprint + +import ( + "crypto/sha256" + "fmt" + "strings" +) + +// Hash computes a stable SHA-256 fingerprint from normalized frames. +func Hash(frames []NormalizedFrame) string { + var parts []string + for _, f := range frames { + parts = append(parts, f.Function+"\x00"+f.File) + } + data := strings.Join(parts, "\n") + sum := sha256.Sum256([]byte(data)) + return fmt.Sprintf("%x", sum) +} diff --git a/internal/fingerprint/normalize.go b/internal/fingerprint/normalize.go new file mode 100644 index 0000000..a593a5a --- /dev/null +++ b/internal/fingerprint/normalize.go @@ -0,0 +1,106 @@ +package fingerprint + +import ( + "path/filepath" + "regexp" + "strings" +) + +const maxFrames = 8 + +var ( + hexAddrRe = regexp.MustCompile(`0x[0-9a-fA-F]+`) + templateParamRe = regexp.MustCompile(`<[^>]*>`) + abiTagRe = regexp.MustCompile(`\[abi:[^\]]*\]`) +) + +// runtimePrefixes are function prefixes for runtime/library frames to filter out. +var runtimePrefixes = []string{ + "__libc_", + "__GI_", + "_start", + "__clone", + "start_thread", + "__pthread_", + "__sigaction", + "_dl_", + "__tls_", + // glibc allocator internals + "__libc_malloc", + "__libc_free", + "malloc", + "free", + "realloc", + "calloc", + // ASan runtime + "__asan_", + "__sanitizer_", + "__interceptor_", + "__interception::", + // Zig std runtime + "std.debug.", + "std.start.", + "std.os.linux.", + "posixCallNative", +} + +// Normalize applies stability-oriented transformations to parsed frames. +func Normalize(frames []Frame) []NormalizedFrame { + var result []NormalizedFrame + + for _, f := range frames { + // Skip inline frames. + if f.Inline { + continue + } + + fn := f.Function + + // Skip runtime/library frames. + if isRuntimeFrame(fn) { + continue + } + + // Strip hex addresses. + fn = hexAddrRe.ReplaceAllString(fn, "") + + // Strip C++ template parameters. + fn = templateParamRe.ReplaceAllString(fn, "<>") + + // Strip ABI tags. + fn = abiTagRe.ReplaceAllString(fn, "") + + // Clean up whitespace. + fn = strings.TrimSpace(fn) + + // Strip paths to just filename. + file := f.File + if file != "" { + file = filepath.Base(file) + } + + if fn == "" { + continue + } + + result = append(result, NormalizedFrame{ + Function: fn, + File: file, + }) + + if len(result) >= maxFrames { + break + } + } + + return result +} + +func isRuntimeFrame(fn string) bool { + for _, prefix := range runtimePrefixes { + if strings.HasPrefix(fn, prefix) { + return true + } + } + return false +} diff --git a/internal/fingerprint/parser_asan.go b/internal/fingerprint/parser_asan.go new file mode 100644 index 0000000..3a25a49 --- /dev/null +++ b/internal/fingerprint/parser_asan.go @@ -0,0 +1,66 @@ +package fingerprint + +import ( + "regexp" + "strconv" + "strings" +) + +// ASan/MSan/TSan/UBSan frame patterns: +// #0 0x55a3b4 in function_name /path/to/file.c:42:13 +// #0 0x55a3b4 in function_name (/path/to/binary+0x1234) +// #1 0x55a3b4 (/path/to/binary+0x1234) +var asanFrameRe = regexp.MustCompile( + `^\s*#(\d+)\s+(0x[0-9a-fA-F]+)\s+(?:in\s+(\S+)\s+)?(.*)$`, +) + +// ASan error header line, e.g.: +// ==12345==ERROR: AddressSanitizer: heap-buffer-overflow +var asanHeaderRe = regexp.MustCompile( + `==\d+==ERROR:\s+(Address|Memory|Thread|Undefined)Sanitizer`, +) + +// ParseASan parses AddressSanitizer, MemorySanitizer, ThreadSanitizer, +// and UndefinedBehaviorSanitizer stack traces. +func ParseASan(raw string) []Frame { + if !asanHeaderRe.MatchString(raw) { + return nil + } + + var frames []Frame + for _, line := range strings.Split(raw, "\n") { + m := asanFrameRe.FindStringSubmatch(line) + if m == nil { + continue + } + + idx, _ := strconv.Atoi(m[1]) + addr := m[2] + fn := m[3] + location := m[4] + + var file string + var lineNo int + + // Try to extract file:line from location. + if parts := strings.SplitN(location, ":", 3); len(parts) >= 2 { + // Could be /path/to/file.c:42 or (/binary+0x1234) + if !strings.HasPrefix(parts[0], "(") { + file = parts[0] + if len(parts) >= 2 { + lineNo, _ = strconv.Atoi(parts[1]) + } + } + } + + frames = append(frames, Frame{ + Index: idx, + Address: addr, + Function: fn, + File: strings.TrimSpace(file), + Line: lineNo, + }) + } + + return frames +} diff --git a/internal/fingerprint/parser_gdb.go b/internal/fingerprint/parser_gdb.go new file mode 100644 index 0000000..eeb0054 --- /dev/null +++ b/internal/fingerprint/parser_gdb.go @@ -0,0 +1,58 @@ +package fingerprint + +import ( + "regexp" + "strconv" + "strings" +) + +// GDB backtrace frame patterns: +// #0 function_name (args) at /path/to/file.c:42 +// #0 0x00007fff in function_name () from /lib/libfoo.so +// #0 0x00007fff in ?? () +var gdbFrameRe = regexp.MustCompile( + `^\s*#(\d+)\s+(?:(0x[0-9a-fA-F]+)\s+in\s+)?(\S+)\s*\(([^)]*)\)\s*(?:at\s+(\S+?)(?::(\d+))?)?(?:\s+from\s+(\S+))?`, +) + +// ParseGDB parses GDB/LLDB backtrace format. +func ParseGDB(raw string) []Frame { + if !strings.Contains(raw, "#0") { + return nil + } + + var frames []Frame + for _, line := range strings.Split(raw, "\n") { + m := gdbFrameRe.FindStringSubmatch(line) + if m == nil { + continue + } + + idx, _ := strconv.Atoi(m[1]) + addr := m[2] + fn := m[3] + // args := m[4] // ignored + file := m[5] + lineNo, _ := strconv.Atoi(m[6]) + module := m[7] + + // Skip unknown frames. + if fn == "??" { + continue + } + + frames = append(frames, Frame{ + Index: idx, + Address: addr, + Function: fn, + File: file, + Line: lineNo, + Module: module, + }) + } + + if len(frames) < 2 { + return nil // Probably not a real GDB backtrace + } + + return frames +} diff --git a/internal/fingerprint/parser_generic.go b/internal/fingerprint/parser_generic.go new file mode 100644 index 0000000..df2b563 --- /dev/null +++ b/internal/fingerprint/parser_generic.go @@ -0,0 +1,57 @@ +package fingerprint + +import ( + "regexp" + "strconv" + "strings" +) + +// Generic fallback patterns for common crash formats. +var ( + // "at function_name (file.c:42)" or "in function_name at file.c:42" + genericAtRe = regexp.MustCompile( + `(?:at|in)\s+(\S+)\s+(?:\()?([^:)\s]+):(\d+)`, + ) + // "function_name+0x1234" (Linux kernel style, perf, etc.) + genericOffsetRe = regexp.MustCompile( + `^\s*(?:#\d+\s+)?(\S+)\+(0x[0-9a-fA-F]+)`, + ) +) + +// ParseGeneric is a fallback parser that tries heuristic patterns. +func ParseGeneric(raw string) []Frame { + var frames []Frame + + for _, line := range strings.Split(raw, "\n") { + line = strings.TrimSpace(line) + if line == "" { + continue + } + + if m := genericAtRe.FindStringSubmatch(line); m != nil { + lineNo, _ := strconv.Atoi(m[3]) + frames = append(frames, Frame{ + Function: m[1], + File: m[2], + Line: lineNo, + Index: len(frames), + }) + continue + } + + if m := genericOffsetRe.FindStringSubmatch(line); m != nil { + frames = append(frames, Frame{ + Function: m[1], + Address: m[2], + Index: len(frames), + }) + continue + } + } + + if len(frames) < 2 { + return nil + } + + return frames +} diff --git a/internal/fingerprint/parser_zig.go b/internal/fingerprint/parser_zig.go new file mode 100644 index 0000000..bec60b0 --- /dev/null +++ b/internal/fingerprint/parser_zig.go @@ -0,0 +1,58 @@ +package fingerprint + +import ( + "regexp" + "strconv" + "strings" +) + +// Zig panic/stack trace patterns: +// /path/to/file.zig:42:13: 0x1234 in function_name (module) +// ???:?:?: 0x1234 in ??? (???) +var zigFrameRe = regexp.MustCompile( + `^\s*(.+?):(\d+):\d+:\s+(0x[0-9a-fA-F]+)\s+in\s+(\S+)\s+\(([^)]*)\)`, +) + +// Zig panic header: "panic: ..." or "thread N panic: ..." +var zigPanicRe = regexp.MustCompile(`(?:thread \d+ )?panic: `) + +// ParseZig parses Zig panic stack traces. +func ParseZig(raw string) []Frame { + if !zigPanicRe.MatchString(raw) && !strings.Contains(raw, " in ") { + return nil + } + + var frames []Frame + for _, line := range strings.Split(raw, "\n") { + m := zigFrameRe.FindStringSubmatch(line) + if m == nil { + continue + } + + file := m[1] + lineNo, _ := strconv.Atoi(m[2]) + addr := m[3] + fn := m[4] + module := m[5] + + // Skip unknown frames. + if fn == "???" { + continue + } + + frames = append(frames, Frame{ + Address: addr, + Function: fn, + File: file, + Line: lineNo, + Module: module, + Index: len(frames), + }) + } + + if len(frames) < 1 { + return nil + } + + return frames +} diff --git a/internal/forgejo/client.go b/internal/forgejo/client.go new file mode 100644 index 0000000..3892eef --- /dev/null +++ b/internal/forgejo/client.go @@ -0,0 +1,154 @@ +package forgejo + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" +) + +type Client struct { + baseURL string + token string + httpClient *http.Client +} + +func NewClient(baseURL, token string) *Client { + return &Client{ + baseURL: baseURL, + token: token, + httpClient: &http.Client{ + Timeout: 30 * time.Second, + }, + } +} + +// Issue represents a Forgejo issue. +type Issue struct { + ID int64 `json:"id"` + Number int `json:"number"` + Title string `json:"title"` + Body string `json:"body"` + State string `json:"state"` + HTMLURL string `json:"html_url"` +} + +// CreateIssueRequest is the body for creating a Forgejo issue. +type CreateIssueRequest struct { + Title string `json:"title"` + Body string `json:"body"` + Labels []int64 `json:"labels,omitempty"` +} + +// CommitStatus represents a Forgejo commit status. +type CommitStatus struct { + State string `json:"state"` + TargetURL string `json:"target_url,omitempty"` + Description string `json:"description"` + Context string `json:"context"` +} + +// CreateIssue creates a new issue on a Forgejo repository. +func (c *Client) CreateIssue(ctx context.Context, owner, repo string, req CreateIssueRequest) (*Issue, error) { + path := fmt.Sprintf("/api/v1/repos/%s/%s/issues", owner, repo) + var issue Issue + if err := c.post(ctx, path, req, &issue); err != nil { + return nil, fmt.Errorf("creating issue: %w", err) + } + return &issue, nil +} + +// UpdateIssueState changes the state of an issue (open/closed). +func (c *Client) UpdateIssueState(ctx context.Context, owner, repo string, number int, state string) error { + path := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d", owner, repo, number) + body := map[string]string{"state": state} + return c.patch(ctx, path, body) +} + +// CreateCommitStatus posts a commit status (success/failure/pending). +func (c *Client) CreateCommitStatus(ctx context.Context, owner, repo, sha string, status CommitStatus) error { + path := fmt.Sprintf("/api/v1/repos/%s/%s/statuses/%s", owner, repo, sha) + return c.post(ctx, path, status, nil) +} + +// CommentOnIssue adds a comment to an issue. +func (c *Client) CommentOnIssue(ctx context.Context, owner, repo string, number int, body string) error { + path := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d/comments", owner, repo, number) + return c.post(ctx, path, map[string]string{"body": body}, nil) +} + +// CreateWebhook registers a webhook on a repository. +func (c *Client) CreateWebhook(ctx context.Context, owner, repo, targetURL, secret string) error { + path := fmt.Sprintf("/api/v1/repos/%s/%s/hooks", owner, repo) + body := map[string]any{ + "type": "forgejo", + "active": true, + "config": map[string]string{ + "url": targetURL, + "content_type": "json", + "secret": secret, + }, + "events": []string{"push", "issues", "pull_request"}, + } + return c.post(ctx, path, body, nil) +} + +func (c *Client) do(ctx context.Context, method, path string, body any) (*http.Response, error) { + var bodyReader io.Reader + if body != nil { + data, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(data) + } + + req, err := http.NewRequestWithContext(ctx, method, c.baseURL+path, bodyReader) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + if c.token != "" { + req.Header.Set("Authorization", "token "+c.token) + } + + resp, err := c.httpClient.Do(req) + if err != nil { + return nil, err + } + + if resp.StatusCode >= 400 { + respBody, _ := io.ReadAll(resp.Body) + resp.Body.Close() + return nil, fmt.Errorf("forgejo API %s %s: %d %s", method, path, resp.StatusCode, string(respBody)) + } + + return resp, nil +} + +func (c *Client) post(ctx context.Context, path string, body any, result any) error { + resp, err := c.do(ctx, http.MethodPost, path, body) + if err != nil { + return err + } + defer resp.Body.Close() + + if result != nil { + return json.NewDecoder(resp.Body).Decode(result) + } + return nil +} + +func (c *Client) patch(ctx context.Context, path string, body any) error { + resp, err := c.do(ctx, http.MethodPatch, path, body) + if err != nil { + return err + } + resp.Body.Close() + return nil +} diff --git a/internal/forgejo/sync.go b/internal/forgejo/sync.go new file mode 100644 index 0000000..014c573 --- /dev/null +++ b/internal/forgejo/sync.go @@ -0,0 +1,84 @@ +package forgejo + +import ( + "context" + "fmt" + "log" + "strings" + + "github.com/jackc/pgx/v5/pgxpool" + "github.com/mattnite/cairn/internal/models" +) + +// Sync handles bidirectional state synchronization between Cairn and Forgejo. +type Sync struct { + Client *Client + Pool *pgxpool.Pool +} + +// CreateIssueForCrashGroup creates a Forgejo issue for a new crash group. +func (s *Sync) CreateIssueForCrashGroup(ctx context.Context, group *models.CrashGroup, sampleTrace string) error { + if s.Client == nil { + return nil + } + + repo, err := models.GetRepositoryByID(ctx, s.Pool, group.RepositoryID) + if err != nil { + return fmt.Errorf("getting repository: %w", err) + } + + body := fmt.Sprintf(`## Crash Group + +**Fingerprint:** `+"`%s`"+` +**First seen:** %s +**Type:** %s + +### Sample Stack Trace + +`+"```"+` +%s +`+"```"+` + +--- +*Auto-created by [Cairn](/) — crash artifact aggregator* +`, group.Fingerprint, group.FirstSeenAt.Format("2006-01-02 15:04:05"), group.Title, sampleTrace) + + issue, err := s.Client.CreateIssue(ctx, repo.Owner, repo.Name, CreateIssueRequest{ + Title: "[Cairn] " + group.Title, + Body: body, + }) + if err != nil { + return fmt.Errorf("creating issue: %w", err) + } + + return models.UpdateCrashGroupIssue(ctx, s.Pool, group.ID, issue.Number, issue.HTMLURL) +} + +// HandleIssueEvent processes a Forgejo issue webhook event for state sync. +func (s *Sync) HandleIssueEvent(ctx context.Context, event *WebhookEvent) error { + if event.Issue == nil { + return nil + } + + // Only handle issues that start with [Cairn] prefix. + if !strings.HasPrefix(event.Issue.Title, "[Cairn] ") { + return nil + } + + switch event.Action { + case "closed": + return models.ResolveCrashGroupByIssue(ctx, s.Pool, event.Issue.Number) + case "reopened": + return models.ReopenCrashGroupByIssue(ctx, s.Pool, event.Issue.Number) + } + + return nil +} + +// HandlePushEvent processes a push webhook event for commit enrichment. +func (s *Sync) HandlePushEvent(ctx context.Context, event *WebhookEvent) { + if event.Repo == nil || event.After == "" { + return + } + log.Printf("Push event: %s -> %s", event.Repo.FullName, event.After[:8]) +} diff --git a/internal/forgejo/webhooks.go b/internal/forgejo/webhooks.go new file mode 100644 index 0000000..08d2638 --- /dev/null +++ b/internal/forgejo/webhooks.go @@ -0,0 +1,80 @@ +package forgejo + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "net/http" +) + +// WebhookEvent is the parsed payload from a Forgejo webhook. +type WebhookEvent struct { + Action string `json:"action"` + Issue *WebhookIssue `json:"issue,omitempty"` + Repo *WebhookRepo `json:"repository,omitempty"` + Sender *WebhookUser `json:"sender,omitempty"` + Ref string `json:"ref,omitempty"` + After string `json:"after,omitempty"` + Before string `json:"before,omitempty"` +} + +type WebhookIssue struct { + ID int64 `json:"id"` + Number int `json:"number"` + Title string `json:"title"` + State string `json:"state"` + HTMLURL string `json:"html_url"` +} + +type WebhookRepo struct { + ID int64 `json:"id"` + Name string `json:"name"` + FullName string `json:"full_name"` +} + +type WebhookUser struct { + Login string `json:"login"` +} + +// VerifyAndParse reads the webhook body, verifies the HMAC signature, and parses the event. +func VerifyAndParse(r *http.Request, secret string) (*WebhookEvent, string, error) { + body, err := io.ReadAll(r.Body) + if err != nil { + return nil, "", fmt.Errorf("reading body: %w", err) + } + + if secret != "" { + sig := r.Header.Get("X-Forgejo-Signature") + if sig == "" { + sig = r.Header.Get("X-Gitea-Signature") + } + if !verifyHMAC(body, sig, secret) { + return nil, "", fmt.Errorf("HMAC verification failed") + } + } + + eventType := r.Header.Get("X-Forgejo-Event") + if eventType == "" { + eventType = r.Header.Get("X-Gitea-Event") + } + + var event WebhookEvent + if err := json.Unmarshal(body, &event); err != nil { + return nil, "", fmt.Errorf("parsing webhook: %w", err) + } + + return &event, eventType, nil +} + +func verifyHMAC(body []byte, signature, secret string) bool { + if signature == "" { + return false + } + mac := hmac.New(sha256.New, []byte(secret)) + mac.Write(body) + expected := hex.EncodeToString(mac.Sum(nil)) + return hmac.Equal([]byte(expected), []byte(signature)) +} diff --git a/internal/handler/campaigns.go b/internal/handler/campaigns.go new file mode 100644 index 0000000..2abf677 --- /dev/null +++ b/internal/handler/campaigns.go @@ -0,0 +1,95 @@ +package handler + +import ( + "net/http" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/mattnite/cairn/internal/models" +) + +type CampaignHandler struct { + Pool *pgxpool.Pool +} + +func (h *CampaignHandler) List(c *gin.Context) { + limit, _ := strconv.Atoi(c.Query("limit")) + offset, _ := strconv.Atoi(c.Query("offset")) + if limit <= 0 { + limit = 50 + } + + campaigns, total, err := models.ListCampaigns(c.Request.Context(), h.Pool, c.Query("repository_id"), limit, offset) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + if campaigns == nil { + campaigns = []models.Campaign{} + } + + c.JSON(http.StatusOK, gin.H{ + "campaigns": campaigns, + "total": total, + "limit": limit, + "offset": offset, + }) +} + +func (h *CampaignHandler) Detail(c *gin.Context) { + id := c.Param("id") + + campaign, err := models.GetCampaign(c.Request.Context(), h.Pool, id) + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "campaign not found"}) + return + } + + c.JSON(http.StatusOK, campaign) +} + +type CreateCampaignRequest struct { + Repository string `json:"repository" binding:"required"` + Owner string `json:"owner" binding:"required"` + Name string `json:"name" binding:"required"` + Type string `json:"type" binding:"required"` +} + +func (h *CampaignHandler) Create(c *gin.Context) { + var req CreateCampaignRequest + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + ctx := c.Request.Context() + + repo, err := models.GetOrCreateRepository(ctx, h.Pool, req.Owner, req.Repository) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + campaign, err := models.CreateCampaign(ctx, h.Pool, models.CreateCampaignParams{ + RepositoryID: repo.ID, + Name: req.Name, + Type: req.Type, + }) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + c.JSON(http.StatusCreated, campaign) +} + +func (h *CampaignHandler) Finish(c *gin.Context) { + id := c.Param("id") + if err := models.FinishCampaign(c.Request.Context(), h.Pool, id); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, gin.H{"status": "finished"}) +} diff --git a/internal/handler/crashgroups.go b/internal/handler/crashgroups.go new file mode 100644 index 0000000..7dabaea --- /dev/null +++ b/internal/handler/crashgroups.go @@ -0,0 +1,97 @@ +package handler + +import ( + "net/http" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/mattnite/cairn/internal/models" +) + +type CrashGroupHandler struct { + Pool *pgxpool.Pool +} + +type CrashGroupListResponse struct { + CrashGroups []models.CrashGroup `json:"crash_groups"` + Total int `json:"total"` + Limit int `json:"limit"` + Offset int `json:"offset"` +} + +func (h *CrashGroupHandler) List(c *gin.Context) { + limit, _ := strconv.Atoi(c.Query("limit")) + offset, _ := strconv.Atoi(c.Query("offset")) + if limit <= 0 { + limit = 50 + } + + groups, total, err := models.ListCrashGroups( + c.Request.Context(), h.Pool, + c.Query("repository_id"), c.Query("status"), + limit, offset, + ) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + if groups == nil { + groups = []models.CrashGroup{} + } + + c.JSON(http.StatusOK, CrashGroupListResponse{ + CrashGroups: groups, + Total: total, + Limit: limit, + Offset: offset, + }) +} + +func (h *CrashGroupHandler) Detail(c *gin.Context) { + id := c.Param("id") + + group, err := models.GetCrashGroup(c.Request.Context(), h.Pool, id) + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "crash group not found"}) + return + } + + c.JSON(http.StatusOK, group) +} + +type SearchHandler struct { + Pool *pgxpool.Pool +} + +func (h *SearchHandler) Search(c *gin.Context) { + q := c.Query("q") + if q == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing 'q' parameter"}) + return + } + + limit, _ := strconv.Atoi(c.Query("limit")) + offset, _ := strconv.Atoi(c.Query("offset")) + if limit <= 0 { + limit = 50 + } + + artifacts, total, err := models.SearchArtifacts(c.Request.Context(), h.Pool, q, limit, offset) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + if artifacts == nil { + artifacts = []models.Artifact{} + } + + c.JSON(http.StatusOK, gin.H{ + "artifacts": artifacts, + "total": total, + "limit": limit, + "offset": offset, + }) +} diff --git a/internal/handler/dashboard.go b/internal/handler/dashboard.go new file mode 100644 index 0000000..860ee46 --- /dev/null +++ b/internal/handler/dashboard.go @@ -0,0 +1,98 @@ +package handler + +import ( + "net/http" + "time" + + "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5/pgxpool" +) + +type DashboardHandler struct { + Pool *pgxpool.Pool +} + +type DashboardStats struct { + TotalArtifacts int `json:"total_artifacts"` + TotalRepos int `json:"total_repos"` + TotalCrashGroups int `json:"total_crash_groups"` + OpenCrashGroups int `json:"open_crash_groups"` + ActiveCampaigns int `json:"active_campaigns"` +} + +type TrendPoint struct { + Date string `json:"date"` + Count int `json:"count"` +} + +type TopCrasher struct { + Title string `json:"title"` + OccurrenceCount int `json:"occurrence_count"` + RepoName string `json:"repo_name"` + CrashGroupID string `json:"crash_group_id"` +} + +type DashboardResponse struct { + Stats DashboardStats `json:"stats"` + Trend []TrendPoint `json:"trend"` + TopCrashers []TopCrasher `json:"top_crashers"` +} + +func (h *DashboardHandler) Stats(c *gin.Context) { + ctx := c.Request.Context() + var stats DashboardStats + + h.Pool.QueryRow(ctx, "SELECT COUNT(*) FROM artifacts").Scan(&stats.TotalArtifacts) + h.Pool.QueryRow(ctx, "SELECT COUNT(*) FROM repositories").Scan(&stats.TotalRepos) + h.Pool.QueryRow(ctx, "SELECT COUNT(*) FROM crash_groups").Scan(&stats.TotalCrashGroups) + h.Pool.QueryRow(ctx, "SELECT COUNT(*) FROM crash_groups WHERE status = 'open'").Scan(&stats.OpenCrashGroups) + h.Pool.QueryRow(ctx, "SELECT COUNT(*) FROM campaigns WHERE status = 'running'").Scan(&stats.ActiveCampaigns) + + // Artifact trend for the last 30 days. + var trend []TrendPoint + rows, err := h.Pool.Query(ctx, ` + SELECT DATE(created_at) as day, COUNT(*) + FROM artifacts + WHERE created_at >= $1 + GROUP BY day + ORDER BY day + `, time.Now().AddDate(0, 0, -30)) + if err == nil { + defer rows.Close() + for rows.Next() { + var tp TrendPoint + var d time.Time + if rows.Scan(&d, &tp.Count) == nil { + tp.Date = d.Format("2006-01-02") + trend = append(trend, tp) + } + } + } + + // Top crashers (most frequent open crash groups). + var topCrashers []TopCrasher + rows2, err := h.Pool.Query(ctx, ` + SELECT cg.id, cg.title, cs.occurrence_count, r.name + FROM crash_groups cg + JOIN crash_signatures cs ON cs.id = cg.crash_signature_id + JOIN repositories r ON r.id = cg.repository_id + WHERE cg.status = 'open' + ORDER BY cs.occurrence_count DESC + LIMIT 10 + `) + if err == nil { + defer rows2.Close() + for rows2.Next() { + var tc TopCrasher + if rows2.Scan(&tc.CrashGroupID, &tc.Title, &tc.OccurrenceCount, &tc.RepoName) == nil { + topCrashers = append(topCrashers, tc) + } + } + } + + c.JSON(http.StatusOK, DashboardResponse{ + Stats: stats, + Trend: trend, + TopCrashers: topCrashers, + }) +} diff --git a/internal/handler/ingest.go b/internal/handler/ingest.go new file mode 100644 index 0000000..62e5b9e --- /dev/null +++ b/internal/handler/ingest.go @@ -0,0 +1,152 @@ +package handler + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/mattnite/cairn/internal/blob" + "github.com/mattnite/cairn/internal/fingerprint" + "github.com/mattnite/cairn/internal/forgejo" + "github.com/mattnite/cairn/internal/models" +) + +type IngestHandler struct { + Pool *pgxpool.Pool + Store blob.Store + ForgejoSync *forgejo.Sync +} + +type IngestRequest struct { + Repository string `json:"repository"` + Owner string `json:"owner"` + CommitSHA string `json:"commit_sha"` + Type string `json:"type"` + CrashMessage string `json:"crash_message,omitempty"` + StackTrace string `json:"stack_trace,omitempty"` + Tags json.RawMessage `json:"tags,omitempty"` + Metadata json.RawMessage `json:"metadata,omitempty"` +} + +func (h *IngestHandler) Create(c *gin.Context) { + metaJSON := c.PostForm("meta") + if metaJSON == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing 'meta' form field"}) + return + } + + var req IngestRequest + if err := json.Unmarshal([]byte(metaJSON), &req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid meta JSON: " + err.Error()}) + return + } + + if req.Repository == "" || req.Owner == "" || req.CommitSHA == "" || req.Type == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "repository, owner, commit_sha, and type are required"}) + return + } + + file, header, err := c.Request.FormFile("file") + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing 'file' form field: " + err.Error()}) + return + } + defer file.Close() + + ctx := c.Request.Context() + + repo, err := models.GetOrCreateRepository(ctx, h.Pool, req.Owner, req.Repository) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + commit, err := models.GetOrCreateCommit(ctx, h.Pool, repo.ID, req.CommitSHA) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + blobKey := fmt.Sprintf("%s/%s/%s/%s", repo.Name, commit.SHA[:8], req.Type, header.Filename) + + if err := h.Store.Put(ctx, blobKey, file, header.Size); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "storing blob: " + err.Error()}) + return + } + + var crashMsg, stackTrace *string + if req.CrashMessage != "" { + crashMsg = &req.CrashMessage + } + if req.StackTrace != "" { + stackTrace = &req.StackTrace + } + + artifact, err := models.CreateArtifact(ctx, h.Pool, models.CreateArtifactParams{ + RepositoryID: repo.ID, + CommitID: commit.ID, + Type: req.Type, + BlobKey: blobKey, + BlobSize: header.Size, + CrashMessage: crashMsg, + StackTrace: stackTrace, + Tags: req.Tags, + Metadata: req.Metadata, + }) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + // Run fingerprinting pipeline if we have a stack trace. + if req.StackTrace != "" { + if result := fingerprint.Compute(req.StackTrace); result != nil { + sig, isNew, err := models.GetOrCreateSignature(ctx, h.Pool, repo.ID, result.Fingerprint, stackTrace) + if err == nil { + models.UpdateArtifactSignature(ctx, h.Pool, artifact.ID, sig.ID, result.Fingerprint) + + if isNew { + title := req.Type + " crash in " + req.Repository + if len(result.Frames) > 0 { + title = req.Type + ": " + result.Frames[0].Function + } + group, groupErr := models.CreateCrashGroup(ctx, h.Pool, sig.ID, repo.ID, title) + if groupErr == nil && h.ForgejoSync != nil { + h.ForgejoSync.CreateIssueForCrashGroup(ctx, group, req.StackTrace) + } + } + } + } + } + + c.JSON(http.StatusCreated, artifact) +} + +type DownloadHandler struct { + Pool *pgxpool.Pool + Store blob.Store +} + +func (h *DownloadHandler) Download(c *gin.Context) { + id := c.Param("id") + + artifact, err := models.GetArtifact(c.Request.Context(), h.Pool, id) + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "artifact not found"}) + return + } + + reader, err := h.Store.Get(c.Request.Context(), artifact.BlobKey) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "reading blob: " + err.Error()}) + return + } + defer reader.Close() + + c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%q", artifact.BlobKey)) + c.Header("Content-Type", "application/octet-stream") + io.Copy(c.Writer, reader) +} diff --git a/internal/handler/regression.go b/internal/handler/regression.go new file mode 100644 index 0000000..3b93401 --- /dev/null +++ b/internal/handler/regression.go @@ -0,0 +1,64 @@ +package handler + +import ( + "fmt" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/mattnite/cairn/internal/forgejo" + "github.com/mattnite/cairn/internal/models" + "github.com/mattnite/cairn/internal/regression" +) + +type RegressionHandler struct { + Pool *pgxpool.Pool + ForgejoSync *forgejo.Sync +} + +type RegressionCheckRequest struct { + Repository string `json:"repository" binding:"required"` + BaseSHA string `json:"base_sha" binding:"required"` + HeadSHA string `json:"head_sha" binding:"required"` +} + +func (h *RegressionHandler) Check(c *gin.Context) { + var req RegressionCheckRequest + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + ctx := c.Request.Context() + + repo, err := models.GetRepositoryByName(ctx, h.Pool, req.Repository) + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "repository not found"}) + return + } + + result, err := regression.Compare(ctx, h.Pool, repo.ID, req.BaseSHA, req.HeadSHA) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + result.RepoName = repo.Name + + // Post commit status to Forgejo if integration is configured. + if h.ForgejoSync != nil && h.ForgejoSync.Client != nil { + state := "success" + description := "No new crash signatures" + if result.IsRegression { + state = "failure" + description = fmt.Sprintf("%d new crash signature(s) detected", len(result.New)) + } + + h.ForgejoSync.Client.CreateCommitStatus(ctx, repo.Owner, repo.Name, req.HeadSHA, forgejo.CommitStatus{ + State: state, + Description: description, + Context: "cairn/regression", + }) + } + + c.JSON(http.StatusOK, result) +} diff --git a/internal/handler/search.go b/internal/handler/search.go new file mode 100644 index 0000000..04bd842 --- /dev/null +++ b/internal/handler/search.go @@ -0,0 +1,64 @@ +package handler + +import ( + "net/http" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/mattnite/cairn/internal/models" +) + +type ArtifactHandler struct { + Pool *pgxpool.Pool +} + +type ArtifactListResponse struct { + Artifacts []models.Artifact `json:"artifacts"` + Total int `json:"total"` + Limit int `json:"limit"` + Offset int `json:"offset"` +} + +func (h *ArtifactHandler) List(c *gin.Context) { + limit, _ := strconv.Atoi(c.Query("limit")) + offset, _ := strconv.Atoi(c.Query("offset")) + if limit <= 0 { + limit = 50 + } + + artifacts, total, err := models.ListArtifacts(c.Request.Context(), h.Pool, models.ListArtifactsParams{ + RepositoryID: c.Query("repository_id"), + CommitSHA: c.Query("commit_sha"), + Type: c.Query("type"), + Limit: limit, + Offset: offset, + }) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + if artifacts == nil { + artifacts = []models.Artifact{} + } + + c.JSON(http.StatusOK, ArtifactListResponse{ + Artifacts: artifacts, + Total: total, + Limit: limit, + Offset: offset, + }) +} + +func (h *ArtifactHandler) Detail(c *gin.Context) { + id := c.Param("id") + + artifact, err := models.GetArtifact(c.Request.Context(), h.Pool, id) + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "artifact not found"}) + return + } + + c.JSON(http.StatusOK, artifact) +} diff --git a/internal/handler/webhooks.go b/internal/handler/webhooks.go new file mode 100644 index 0000000..4d57245 --- /dev/null +++ b/internal/handler/webhooks.go @@ -0,0 +1,36 @@ +package handler + +import ( + "log" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/mattnite/cairn/internal/forgejo" +) + +type WebhookHandler struct { + Sync *forgejo.Sync + Secret string +} + +func (h *WebhookHandler) Handle(c *gin.Context) { + event, eventType, err := forgejo.VerifyAndParse(c.Request, h.Secret) + if err != nil { + log.Printf("Webhook error: %v", err) + c.JSON(http.StatusUnauthorized, gin.H{"error": err.Error()}) + return + } + + ctx := c.Request.Context() + + switch eventType { + case "issues": + if err := h.Sync.HandleIssueEvent(ctx, event); err != nil { + log.Printf("Issue event error: %v", err) + } + case "push": + h.Sync.HandlePushEvent(ctx, event) + } + + c.JSON(http.StatusOK, gin.H{"status": "ok"}) +} diff --git a/internal/models/artifact.go b/internal/models/artifact.go new file mode 100644 index 0000000..c43e865 --- /dev/null +++ b/internal/models/artifact.go @@ -0,0 +1,153 @@ +package models + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/jackc/pgx/v5/pgxpool" +) + +type CreateArtifactParams struct { + RepositoryID string + CommitID string + BuildID *string + Type string + BlobKey string + BlobSize int64 + CrashMessage *string + StackTrace *string + Tags json.RawMessage + Metadata json.RawMessage +} + +func CreateArtifact(ctx context.Context, pool *pgxpool.Pool, p CreateArtifactParams) (*Artifact, error) { + if p.Tags == nil { + p.Tags = json.RawMessage("{}") + } + if p.Metadata == nil { + p.Metadata = json.RawMessage("{}") + } + + a := &Artifact{} + err := pool.QueryRow(ctx, ` + INSERT INTO artifacts (repository_id, commit_id, build_id, type, blob_key, blob_size, crash_message, stack_trace, tags, metadata) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + RETURNING id, repository_id, commit_id, build_id, type, blob_key, blob_size, crash_message, stack_trace, tags, metadata, created_at + `, p.RepositoryID, p.CommitID, p.BuildID, p.Type, p.BlobKey, p.BlobSize, p.CrashMessage, p.StackTrace, p.Tags, p.Metadata).Scan( + &a.ID, &a.RepositoryID, &a.CommitID, &a.BuildID, &a.Type, &a.BlobKey, &a.BlobSize, + &a.CrashMessage, &a.StackTrace, &a.Tags, &a.Metadata, &a.CreatedAt, + ) + if err != nil { + return nil, fmt.Errorf("creating artifact: %w", err) + } + return a, nil +} + +func GetArtifact(ctx context.Context, pool *pgxpool.Pool, id string) (*Artifact, error) { + a := &Artifact{} + err := pool.QueryRow(ctx, ` + SELECT a.id, a.repository_id, a.commit_id, a.build_id, a.type, a.blob_key, a.blob_size, + a.crash_message, a.stack_trace, a.tags, a.metadata, a.created_at, + r.name, c.sha + FROM artifacts a + JOIN repositories r ON r.id = a.repository_id + JOIN commits c ON c.id = a.commit_id + WHERE a.id = $1 + `, id).Scan( + &a.ID, &a.RepositoryID, &a.CommitID, &a.BuildID, &a.Type, &a.BlobKey, &a.BlobSize, + &a.CrashMessage, &a.StackTrace, &a.Tags, &a.Metadata, &a.CreatedAt, + &a.RepoName, &a.CommitSHA, + ) + if err != nil { + return nil, fmt.Errorf("getting artifact: %w", err) + } + return a, nil +} + +type ListArtifactsParams struct { + RepositoryID string + CommitSHA string + Type string + SignatureID string + CampaignID string + Limit int + Offset int +} + +func ListArtifacts(ctx context.Context, pool *pgxpool.Pool, p ListArtifactsParams) ([]Artifact, int, error) { + if p.Limit <= 0 { + p.Limit = 50 + } + + baseQuery := ` + FROM artifacts a + JOIN repositories r ON r.id = a.repository_id + JOIN commits c ON c.id = a.commit_id + WHERE 1=1 + ` + args := []any{} + argN := 1 + + if p.RepositoryID != "" { + baseQuery += fmt.Sprintf(" AND a.repository_id = $%d", argN) + args = append(args, p.RepositoryID) + argN++ + } + if p.CommitSHA != "" { + baseQuery += fmt.Sprintf(" AND c.sha = $%d", argN) + args = append(args, p.CommitSHA) + argN++ + } + if p.Type != "" { + baseQuery += fmt.Sprintf(" AND a.type = $%d", argN) + args = append(args, p.Type) + argN++ + } + if p.SignatureID != "" { + baseQuery += fmt.Sprintf(" AND a.signature_id = $%d", argN) + args = append(args, p.SignatureID) + argN++ + } + if p.CampaignID != "" { + baseQuery += fmt.Sprintf(" AND a.campaign_id = $%d", argN) + args = append(args, p.CampaignID) + argN++ + } + + var total int + err := pool.QueryRow(ctx, "SELECT COUNT(*) "+baseQuery, args...).Scan(&total) + if err != nil { + return nil, 0, fmt.Errorf("counting artifacts: %w", err) + } + + selectQuery := fmt.Sprintf(` + SELECT a.id, a.repository_id, a.commit_id, a.build_id, a.type, a.blob_key, a.blob_size, + a.crash_message, a.stack_trace, a.tags, a.metadata, a.created_at, + r.name, c.sha + %s + ORDER BY a.created_at DESC + LIMIT $%d OFFSET $%d + `, baseQuery, argN, argN+1) + args = append(args, p.Limit, p.Offset) + + rows, err := pool.Query(ctx, selectQuery, args...) + if err != nil { + return nil, 0, fmt.Errorf("listing artifacts: %w", err) + } + defer rows.Close() + + var artifacts []Artifact + for rows.Next() { + var a Artifact + if err := rows.Scan( + &a.ID, &a.RepositoryID, &a.CommitID, &a.BuildID, &a.Type, &a.BlobKey, &a.BlobSize, + &a.CrashMessage, &a.StackTrace, &a.Tags, &a.Metadata, &a.CreatedAt, + &a.RepoName, &a.CommitSHA, + ); err != nil { + return nil, 0, fmt.Errorf("scanning artifact: %w", err) + } + artifacts = append(artifacts, a) + } + return artifacts, total, nil +} diff --git a/internal/models/campaign.go b/internal/models/campaign.go new file mode 100644 index 0000000..ac4edd4 --- /dev/null +++ b/internal/models/campaign.go @@ -0,0 +1,146 @@ +package models + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/jackc/pgx/v5/pgxpool" +) + +type Campaign struct { + ID string `json:"id"` + RepositoryID string `json:"repository_id"` + Name string `json:"name"` + Type string `json:"type"` + Status string `json:"status"` + StartedAt time.Time `json:"started_at"` + FinishedAt *time.Time `json:"finished_at,omitempty"` + Tags json.RawMessage `json:"tags,omitempty"` + Metadata json.RawMessage `json:"metadata,omitempty"` + CreatedAt time.Time `json:"created_at"` + + // Joined fields. + RepoName string `json:"repo_name,omitempty"` + ArtifactCount int `json:"artifact_count,omitempty"` +} + +type CreateCampaignParams struct { + RepositoryID string + Name string + Type string + Tags json.RawMessage + Metadata json.RawMessage +} + +func CreateCampaign(ctx context.Context, pool *pgxpool.Pool, p CreateCampaignParams) (*Campaign, error) { + if p.Tags == nil { + p.Tags = json.RawMessage("{}") + } + if p.Metadata == nil { + p.Metadata = json.RawMessage("{}") + } + + c := &Campaign{} + err := pool.QueryRow(ctx, ` + INSERT INTO campaigns (repository_id, name, type, tags, metadata) + VALUES ($1, $2, $3, $4, $5) + RETURNING id, repository_id, name, type, status, started_at, finished_at, tags, metadata, created_at + `, p.RepositoryID, p.Name, p.Type, p.Tags, p.Metadata).Scan( + &c.ID, &c.RepositoryID, &c.Name, &c.Type, &c.Status, + &c.StartedAt, &c.FinishedAt, &c.Tags, &c.Metadata, &c.CreatedAt, + ) + if err != nil { + return nil, fmt.Errorf("creating campaign: %w", err) + } + return c, nil +} + +func FinishCampaign(ctx context.Context, pool *pgxpool.Pool, id string) error { + _, err := pool.Exec(ctx, ` + UPDATE campaigns SET status = 'finished', finished_at = NOW() WHERE id = $1 + `, id) + if err != nil { + return fmt.Errorf("finishing campaign: %w", err) + } + return nil +} + +func GetCampaign(ctx context.Context, pool *pgxpool.Pool, id string) (*Campaign, error) { + c := &Campaign{} + err := pool.QueryRow(ctx, ` + SELECT c.id, c.repository_id, c.name, c.type, c.status, c.started_at, c.finished_at, + c.tags, c.metadata, c.created_at, + r.name, + (SELECT COUNT(*) FROM artifacts a WHERE a.campaign_id = c.id) + FROM campaigns c + JOIN repositories r ON r.id = c.repository_id + WHERE c.id = $1 + `, id).Scan( + &c.ID, &c.RepositoryID, &c.Name, &c.Type, &c.Status, + &c.StartedAt, &c.FinishedAt, &c.Tags, &c.Metadata, &c.CreatedAt, + &c.RepoName, &c.ArtifactCount, + ) + if err != nil { + return nil, fmt.Errorf("getting campaign: %w", err) + } + return c, nil +} + +func ListCampaigns(ctx context.Context, pool *pgxpool.Pool, repoID string, limit, offset int) ([]Campaign, int, error) { + if limit <= 0 { + limit = 50 + } + + baseQuery := ` + FROM campaigns c + JOIN repositories r ON r.id = c.repository_id + WHERE 1=1 + ` + args := []any{} + argN := 1 + + if repoID != "" { + baseQuery += fmt.Sprintf(" AND c.repository_id = $%d", argN) + args = append(args, repoID) + argN++ + } + + var total int + err := pool.QueryRow(ctx, "SELECT COUNT(*) "+baseQuery, args...).Scan(&total) + if err != nil { + return nil, 0, fmt.Errorf("counting campaigns: %w", err) + } + + selectQuery := fmt.Sprintf(` + SELECT c.id, c.repository_id, c.name, c.type, c.status, c.started_at, c.finished_at, + c.tags, c.metadata, c.created_at, + r.name, + (SELECT COUNT(*) FROM artifacts a WHERE a.campaign_id = c.id) + %s + ORDER BY c.created_at DESC + LIMIT $%d OFFSET $%d + `, baseQuery, argN, argN+1) + args = append(args, limit, offset) + + rows, err := pool.Query(ctx, selectQuery, args...) + if err != nil { + return nil, 0, fmt.Errorf("listing campaigns: %w", err) + } + defer rows.Close() + + var campaigns []Campaign + for rows.Next() { + var c Campaign + if err := rows.Scan( + &c.ID, &c.RepositoryID, &c.Name, &c.Type, &c.Status, + &c.StartedAt, &c.FinishedAt, &c.Tags, &c.Metadata, &c.CreatedAt, + &c.RepoName, &c.ArtifactCount, + ); err != nil { + return nil, 0, fmt.Errorf("scanning campaign: %w", err) + } + campaigns = append(campaigns, c) + } + return campaigns, total, nil +} diff --git a/internal/models/commit.go b/internal/models/commit.go new file mode 100644 index 0000000..56de788 --- /dev/null +++ b/internal/models/commit.go @@ -0,0 +1,22 @@ +package models + +import ( + "context" + "fmt" + + "github.com/jackc/pgx/v5/pgxpool" +) + +func GetOrCreateCommit(ctx context.Context, pool *pgxpool.Pool, repositoryID, sha string) (*Commit, error) { + c := &Commit{} + err := pool.QueryRow(ctx, ` + INSERT INTO commits (repository_id, sha) + VALUES ($1, $2) + ON CONFLICT (repository_id, sha) DO UPDATE SET repository_id = EXCLUDED.repository_id + RETURNING id, repository_id, sha, author, message, branch, committed_at, created_at + `, repositoryID, sha).Scan(&c.ID, &c.RepositoryID, &c.SHA, &c.Author, &c.Message, &c.Branch, &c.CommittedAt, &c.CreatedAt) + if err != nil { + return nil, fmt.Errorf("get or create commit: %w", err) + } + return c, nil +} diff --git a/internal/models/crash_group.go b/internal/models/crash_group.go new file mode 100644 index 0000000..500c0ca --- /dev/null +++ b/internal/models/crash_group.go @@ -0,0 +1,264 @@ +package models + +import ( + "context" + "fmt" + "time" + + "github.com/jackc/pgx/v5/pgxpool" +) + +type CrashSignature struct { + ID string `json:"id"` + RepositoryID string `json:"repository_id"` + Fingerprint string `json:"fingerprint"` + SampleTrace *string `json:"sample_trace,omitempty"` + FirstSeenAt time.Time `json:"first_seen_at"` + LastSeenAt time.Time `json:"last_seen_at"` + OccurrenceCount int `json:"occurrence_count"` +} + +type CrashGroup struct { + ID string `json:"id"` + CrashSignatureID string `json:"crash_signature_id"` + RepositoryID string `json:"repository_id"` + Title string `json:"title"` + Status string `json:"status"` + ForgejoIssueID *int `json:"forgejo_issue_id,omitempty"` + ForgejoIssueURL *string `json:"forgejo_issue_url,omitempty"` + FirstSeenAt time.Time `json:"first_seen_at"` + LastSeenAt time.Time `json:"last_seen_at"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + + // Joined fields + RepoName string `json:"repo_name,omitempty"` + Fingerprint string `json:"fingerprint,omitempty"` + OccurrenceCount int `json:"occurrence_count,omitempty"` +} + +// GetOrCreateSignature upserts a crash signature, incrementing occurrence count. +func GetOrCreateSignature(ctx context.Context, pool *pgxpool.Pool, repoID, fingerprint string, sampleTrace *string) (*CrashSignature, bool, error) { + sig := &CrashSignature{} + var created bool + + // Try insert first. + err := pool.QueryRow(ctx, ` + INSERT INTO crash_signatures (repository_id, fingerprint, sample_trace) + VALUES ($1, $2, $3) + ON CONFLICT (repository_id, fingerprint) + DO UPDATE SET + last_seen_at = NOW(), + occurrence_count = crash_signatures.occurrence_count + 1 + RETURNING id, repository_id, fingerprint, sample_trace, first_seen_at, last_seen_at, occurrence_count + `, repoID, fingerprint, sampleTrace).Scan( + &sig.ID, &sig.RepositoryID, &sig.Fingerprint, &sig.SampleTrace, + &sig.FirstSeenAt, &sig.LastSeenAt, &sig.OccurrenceCount, + ) + if err != nil { + return nil, false, fmt.Errorf("get or create signature: %w", err) + } + + // If occurrence count is 1, this is a new signature. + created = sig.OccurrenceCount == 1 + return sig, created, nil +} + +// CreateCrashGroup creates a crash group for a new signature. +func CreateCrashGroup(ctx context.Context, pool *pgxpool.Pool, sigID, repoID, title string) (*CrashGroup, error) { + cg := &CrashGroup{} + err := pool.QueryRow(ctx, ` + INSERT INTO crash_groups (crash_signature_id, repository_id, title) + VALUES ($1, $2, $3) + RETURNING id, crash_signature_id, repository_id, title, status, + forgejo_issue_id, forgejo_issue_url, first_seen_at, last_seen_at, created_at, updated_at + `, sigID, repoID, title).Scan( + &cg.ID, &cg.CrashSignatureID, &cg.RepositoryID, &cg.Title, &cg.Status, + &cg.ForgejoIssueID, &cg.ForgejoIssueURL, &cg.FirstSeenAt, &cg.LastSeenAt, + &cg.CreatedAt, &cg.UpdatedAt, + ) + if err != nil { + return nil, fmt.Errorf("creating crash group: %w", err) + } + return cg, nil +} + +// ListCrashGroups returns crash groups with joined data. +func ListCrashGroups(ctx context.Context, pool *pgxpool.Pool, repoID, status string, limit, offset int) ([]CrashGroup, int, error) { + if limit <= 0 { + limit = 50 + } + + baseQuery := ` + FROM crash_groups cg + JOIN crash_signatures cs ON cs.id = cg.crash_signature_id + JOIN repositories r ON r.id = cg.repository_id + WHERE 1=1 + ` + args := []any{} + argN := 1 + + if repoID != "" { + baseQuery += fmt.Sprintf(" AND cg.repository_id = $%d", argN) + args = append(args, repoID) + argN++ + } + if status != "" { + baseQuery += fmt.Sprintf(" AND cg.status = $%d", argN) + args = append(args, status) + argN++ + } + + var total int + err := pool.QueryRow(ctx, "SELECT COUNT(*) "+baseQuery, args...).Scan(&total) + if err != nil { + return nil, 0, fmt.Errorf("counting crash groups: %w", err) + } + + selectQuery := fmt.Sprintf(` + SELECT cg.id, cg.crash_signature_id, cg.repository_id, cg.title, cg.status, + cg.forgejo_issue_id, cg.forgejo_issue_url, cg.first_seen_at, cg.last_seen_at, + cg.created_at, cg.updated_at, + r.name, cs.fingerprint, cs.occurrence_count + %s + ORDER BY cg.last_seen_at DESC + LIMIT $%d OFFSET $%d + `, baseQuery, argN, argN+1) + args = append(args, limit, offset) + + rows, err := pool.Query(ctx, selectQuery, args...) + if err != nil { + return nil, 0, fmt.Errorf("listing crash groups: %w", err) + } + defer rows.Close() + + var groups []CrashGroup + for rows.Next() { + var cg CrashGroup + if err := rows.Scan( + &cg.ID, &cg.CrashSignatureID, &cg.RepositoryID, &cg.Title, &cg.Status, + &cg.ForgejoIssueID, &cg.ForgejoIssueURL, &cg.FirstSeenAt, &cg.LastSeenAt, + &cg.CreatedAt, &cg.UpdatedAt, + &cg.RepoName, &cg.Fingerprint, &cg.OccurrenceCount, + ); err != nil { + return nil, 0, fmt.Errorf("scanning crash group: %w", err) + } + groups = append(groups, cg) + } + return groups, total, nil +} + +// GetCrashGroup returns a single crash group by ID. +func GetCrashGroup(ctx context.Context, pool *pgxpool.Pool, id string) (*CrashGroup, error) { + cg := &CrashGroup{} + err := pool.QueryRow(ctx, ` + SELECT cg.id, cg.crash_signature_id, cg.repository_id, cg.title, cg.status, + cg.forgejo_issue_id, cg.forgejo_issue_url, cg.first_seen_at, cg.last_seen_at, + cg.created_at, cg.updated_at, + r.name, cs.fingerprint, cs.occurrence_count + FROM crash_groups cg + JOIN crash_signatures cs ON cs.id = cg.crash_signature_id + JOIN repositories r ON r.id = cg.repository_id + WHERE cg.id = $1 + `, id).Scan( + &cg.ID, &cg.CrashSignatureID, &cg.RepositoryID, &cg.Title, &cg.Status, + &cg.ForgejoIssueID, &cg.ForgejoIssueURL, &cg.FirstSeenAt, &cg.LastSeenAt, + &cg.CreatedAt, &cg.UpdatedAt, + &cg.RepoName, &cg.Fingerprint, &cg.OccurrenceCount, + ) + if err != nil { + return nil, fmt.Errorf("getting crash group: %w", err) + } + return cg, nil +} + +// UpdateCrashGroupIssue links a crash group to a Forgejo issue. +func UpdateCrashGroupIssue(ctx context.Context, pool *pgxpool.Pool, groupID string, issueNumber int, issueURL string) error { + _, err := pool.Exec(ctx, ` + UPDATE crash_groups SET forgejo_issue_id = $1, forgejo_issue_url = $2, updated_at = NOW() WHERE id = $3 + `, issueNumber, issueURL, groupID) + if err != nil { + return fmt.Errorf("updating crash group issue: %w", err) + } + return nil +} + +// ResolveCrashGroupByIssue marks a crash group as resolved when its Forgejo issue is closed. +func ResolveCrashGroupByIssue(ctx context.Context, pool *pgxpool.Pool, issueNumber int) error { + _, err := pool.Exec(ctx, ` + UPDATE crash_groups SET status = 'resolved', updated_at = NOW() WHERE forgejo_issue_id = $1 + `, issueNumber) + if err != nil { + return fmt.Errorf("resolving crash group by issue: %w", err) + } + return nil +} + +// ReopenCrashGroupByIssue reopens a crash group when its Forgejo issue is reopened. +func ReopenCrashGroupByIssue(ctx context.Context, pool *pgxpool.Pool, issueNumber int) error { + _, err := pool.Exec(ctx, ` + UPDATE crash_groups SET status = 'open', updated_at = NOW() WHERE forgejo_issue_id = $1 + `, issueNumber) + if err != nil { + return fmt.Errorf("reopening crash group by issue: %w", err) + } + return nil +} + +// UpdateArtifactSignature links an artifact to a signature. +func UpdateArtifactSignature(ctx context.Context, pool *pgxpool.Pool, artifactID, signatureID, fingerprint string) error { + _, err := pool.Exec(ctx, ` + UPDATE artifacts SET signature_id = $1, fingerprint = $2 WHERE id = $3 + `, signatureID, fingerprint, artifactID) + if err != nil { + return fmt.Errorf("updating artifact signature: %w", err) + } + return nil +} + +// SearchArtifacts performs full-text search on artifacts. +func SearchArtifacts(ctx context.Context, pool *pgxpool.Pool, query string, limit, offset int) ([]Artifact, int, error) { + if limit <= 0 { + limit = 50 + } + + var total int + err := pool.QueryRow(ctx, ` + SELECT COUNT(*) + FROM artifacts a + WHERE a.search_vector @@ plainto_tsquery('english', $1) + `, query).Scan(&total) + if err != nil { + return nil, 0, fmt.Errorf("counting search results: %w", err) + } + + rows, err := pool.Query(ctx, ` + SELECT a.id, a.repository_id, a.commit_id, a.build_id, a.type, a.blob_key, a.blob_size, + a.crash_message, a.stack_trace, a.tags, a.metadata, a.created_at, + r.name, c.sha + FROM artifacts a + JOIN repositories r ON r.id = a.repository_id + JOIN commits c ON c.id = a.commit_id + WHERE a.search_vector @@ plainto_tsquery('english', $1) + ORDER BY ts_rank(a.search_vector, plainto_tsquery('english', $1)) DESC + LIMIT $2 OFFSET $3 + `, query, limit, offset) + if err != nil { + return nil, 0, fmt.Errorf("searching artifacts: %w", err) + } + defer rows.Close() + + var artifacts []Artifact + for rows.Next() { + var a Artifact + if err := rows.Scan( + &a.ID, &a.RepositoryID, &a.CommitID, &a.BuildID, &a.Type, &a.BlobKey, &a.BlobSize, + &a.CrashMessage, &a.StackTrace, &a.Tags, &a.Metadata, &a.CreatedAt, + &a.RepoName, &a.CommitSHA, + ); err != nil { + return nil, 0, fmt.Errorf("scanning search result: %w", err) + } + artifacts = append(artifacts, a) + } + return artifacts, total, nil +} diff --git a/internal/models/models.go b/internal/models/models.go new file mode 100644 index 0000000..32c933c --- /dev/null +++ b/internal/models/models.go @@ -0,0 +1,55 @@ +package models + +import ( + "encoding/json" + "time" +) + +type Repository struct { + ID string `json:"id"` + Name string `json:"name"` + Owner string `json:"owner"` + ForgejoURL string `json:"forgejo_url,omitempty"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +type Commit struct { + ID string `json:"id"` + RepositoryID string `json:"repository_id"` + SHA string `json:"sha"` + Author *string `json:"author,omitempty"` + Message *string `json:"message,omitempty"` + Branch *string `json:"branch,omitempty"` + CommittedAt *time.Time `json:"committed_at,omitempty"` + CreatedAt time.Time `json:"created_at"` +} + +type Build struct { + ID string `json:"id"` + RepositoryID string `json:"repository_id"` + CommitID string `json:"commit_id"` + Builder *string `json:"builder,omitempty"` + BuildFlags *string `json:"build_flags,omitempty"` + Tags json.RawMessage `json:"tags,omitempty"` + CreatedAt time.Time `json:"created_at"` +} + +type Artifact struct { + ID string `json:"id"` + RepositoryID string `json:"repository_id"` + CommitID string `json:"commit_id"` + BuildID *string `json:"build_id,omitempty"` + Type string `json:"type"` + BlobKey string `json:"blob_key"` + BlobSize int64 `json:"blob_size"` + CrashMessage *string `json:"crash_message,omitempty"` + StackTrace *string `json:"stack_trace,omitempty"` + Tags json.RawMessage `json:"tags,omitempty"` + Metadata json.RawMessage `json:"metadata,omitempty"` + CreatedAt time.Time `json:"created_at"` + + // Joined fields for display. + RepoName string `json:"repo_name,omitempty"` + CommitSHA string `json:"commit_sha,omitempty"` +} diff --git a/internal/models/repository.go b/internal/models/repository.go new file mode 100644 index 0000000..0b89ec4 --- /dev/null +++ b/internal/models/repository.go @@ -0,0 +1,67 @@ +package models + +import ( + "context" + "fmt" + + "github.com/jackc/pgx/v5/pgxpool" +) + +func GetOrCreateRepository(ctx context.Context, pool *pgxpool.Pool, owner, name string) (*Repository, error) { + repo := &Repository{} + err := pool.QueryRow(ctx, ` + INSERT INTO repositories (owner, name) + VALUES ($1, $2) + ON CONFLICT (name) DO UPDATE SET updated_at = NOW() + RETURNING id, name, owner, forgejo_url, created_at, updated_at + `, owner, name).Scan(&repo.ID, &repo.Name, &repo.Owner, &repo.ForgejoURL, &repo.CreatedAt, &repo.UpdatedAt) + if err != nil { + return nil, fmt.Errorf("get or create repository: %w", err) + } + return repo, nil +} + +func GetRepositoryByName(ctx context.Context, pool *pgxpool.Pool, name string) (*Repository, error) { + repo := &Repository{} + err := pool.QueryRow(ctx, ` + SELECT id, name, owner, forgejo_url, created_at, updated_at + FROM repositories WHERE name = $1 + `, name).Scan(&repo.ID, &repo.Name, &repo.Owner, &repo.ForgejoURL, &repo.CreatedAt, &repo.UpdatedAt) + if err != nil { + return nil, fmt.Errorf("get repository by name: %w", err) + } + return repo, nil +} + +func GetRepositoryByID(ctx context.Context, pool *pgxpool.Pool, id string) (*Repository, error) { + repo := &Repository{} + err := pool.QueryRow(ctx, ` + SELECT id, name, owner, forgejo_url, created_at, updated_at + FROM repositories WHERE id = $1 + `, id).Scan(&repo.ID, &repo.Name, &repo.Owner, &repo.ForgejoURL, &repo.CreatedAt, &repo.UpdatedAt) + if err != nil { + return nil, fmt.Errorf("get repository by id: %w", err) + } + return repo, nil +} + +func ListRepositories(ctx context.Context, pool *pgxpool.Pool) ([]Repository, error) { + rows, err := pool.Query(ctx, ` + SELECT id, name, owner, forgejo_url, created_at, updated_at + FROM repositories ORDER BY name + `) + if err != nil { + return nil, fmt.Errorf("listing repositories: %w", err) + } + defer rows.Close() + + var repos []Repository + for rows.Next() { + var r Repository + if err := rows.Scan(&r.ID, &r.Name, &r.Owner, &r.ForgejoURL, &r.CreatedAt, &r.UpdatedAt); err != nil { + return nil, fmt.Errorf("scanning repository: %w", err) + } + repos = append(repos, r) + } + return repos, nil +} diff --git a/internal/regression/regression.go b/internal/regression/regression.go new file mode 100644 index 0000000..fd90418 --- /dev/null +++ b/internal/regression/regression.go @@ -0,0 +1,91 @@ +package regression + +import ( + "context" + "fmt" + + "github.com/jackc/pgx/v5/pgxpool" +) + +// Result holds the regression comparison between two commits. +type Result struct { + BaseSHA string `json:"base_sha"` + HeadSHA string `json:"head_sha"` + RepoName string `json:"repo_name"` + New []string `json:"new"` // Fingerprints in head but not base. + Fixed []string `json:"fixed"` // Fingerprints in base but not head. + Recurring []string `json:"recurring"` // Fingerprints in both. + IsRegression bool `json:"is_regression"` +} + +// Compare computes the set difference of crash fingerprints between a base and head commit. +func Compare(ctx context.Context, pool *pgxpool.Pool, repoID, baseSHA, headSHA string) (*Result, error) { + baseFingerprints, err := fingerprintsForCommit(ctx, pool, repoID, baseSHA) + if err != nil { + return nil, fmt.Errorf("base commit fingerprints: %w", err) + } + + headFingerprints, err := fingerprintsForCommit(ctx, pool, repoID, headSHA) + if err != nil { + return nil, fmt.Errorf("head commit fingerprints: %w", err) + } + + baseSet := toSet(baseFingerprints) + headSet := toSet(headFingerprints) + + var newFPs, fixedFPs, recurringFPs []string + + for fp := range headSet { + if baseSet[fp] { + recurringFPs = append(recurringFPs, fp) + } else { + newFPs = append(newFPs, fp) + } + } + + for fp := range baseSet { + if !headSet[fp] { + fixedFPs = append(fixedFPs, fp) + } + } + + return &Result{ + BaseSHA: baseSHA, + HeadSHA: headSHA, + New: newFPs, + Fixed: fixedFPs, + Recurring: recurringFPs, + IsRegression: len(newFPs) > 0, + }, nil +} + +func fingerprintsForCommit(ctx context.Context, pool *pgxpool.Pool, repoID, sha string) ([]string, error) { + rows, err := pool.Query(ctx, ` + SELECT DISTINCT a.fingerprint + FROM artifacts a + JOIN commits c ON c.id = a.commit_id + WHERE a.repository_id = $1 AND c.sha = $2 AND a.fingerprint IS NOT NULL + `, repoID, sha) + if err != nil { + return nil, err + } + defer rows.Close() + + var fps []string + for rows.Next() { + var fp string + if err := rows.Scan(&fp); err != nil { + return nil, err + } + fps = append(fps, fp) + } + return fps, nil +} + +func toSet(items []string) map[string]bool { + s := make(map[string]bool, len(items)) + for _, item := range items { + s[item] = true + } + return s +} diff --git a/internal/web/middleware.go b/internal/web/middleware.go new file mode 100644 index 0000000..d4e2b37 --- /dev/null +++ b/internal/web/middleware.go @@ -0,0 +1,16 @@ +package web + +import ( + "log" + "time" + + "github.com/gin-gonic/gin" +) + +func LoggingMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + start := time.Now() + c.Next() + log.Printf("%s %s %d %s", c.Request.Method, c.Request.URL.Path, c.Writer.Status(), time.Since(start)) + } +} diff --git a/internal/web/routes.go b/internal/web/routes.go new file mode 100644 index 0000000..1360814 --- /dev/null +++ b/internal/web/routes.go @@ -0,0 +1,82 @@ +package web + +import ( + "io/fs" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/mattnite/cairn/internal/blob" + "github.com/mattnite/cairn/internal/forgejo" + "github.com/mattnite/cairn/internal/handler" + assets "github.com/mattnite/cairn/web" +) + +type RouterConfig struct { + Pool *pgxpool.Pool + Store blob.Store + ForgejoClient *forgejo.Client + WebhookSecret string +} + +func NewRouter(cfg RouterConfig) (*gin.Engine, error) { + templates, err := LoadTemplates() + if err != nil { + return nil, err + } + + forgejoSync := &forgejo.Sync{Client: cfg.ForgejoClient, Pool: cfg.Pool} + + pages := &PageHandler{Pool: cfg.Pool, Templates: templates} + ingest := &handler.IngestHandler{Pool: cfg.Pool, Store: cfg.Store, ForgejoSync: forgejoSync} + artifactAPI := &handler.ArtifactHandler{Pool: cfg.Pool} + download := &handler.DownloadHandler{Pool: cfg.Pool, Store: cfg.Store} + crashGroupAPI := &handler.CrashGroupHandler{Pool: cfg.Pool} + searchAPI := &handler.SearchHandler{Pool: cfg.Pool} + regressionAPI := &handler.RegressionHandler{Pool: cfg.Pool, ForgejoSync: forgejoSync} + campaignAPI := &handler.CampaignHandler{Pool: cfg.Pool} + dashboardAPI := &handler.DashboardHandler{Pool: cfg.Pool} + webhookH := &handler.WebhookHandler{Sync: forgejoSync, Secret: cfg.WebhookSecret} + + r := gin.Default() + + // Static files + staticFS, err := fs.Sub(assets.Assets, "static") + if err != nil { + return nil, err + } + r.StaticFS("/static", http.FS(staticFS)) + + // HTML pages + r.GET("/", pages.Index) + r.GET("/artifacts", pages.Artifacts) + r.GET("/artifacts/:id", pages.ArtifactDetail) + r.GET("/repos", pages.Repos) + r.GET("/crashgroups", pages.CrashGroups) + r.GET("/crashgroups/:id", pages.CrashGroupDetail) + r.GET("/campaigns", pages.Campaigns) + r.GET("/campaigns/:id", pages.CampaignDetail) + r.GET("/search", pages.Search) + r.GET("/regression", pages.Regression) + + // JSON API + api := r.Group("/api/v1") + api.POST("/artifacts", ingest.Create) + api.GET("/artifacts", artifactAPI.List) + api.GET("/artifacts/:id", artifactAPI.Detail) + api.GET("/artifacts/:id/download", download.Download) + api.GET("/crashgroups", crashGroupAPI.List) + api.GET("/crashgroups/:id", crashGroupAPI.Detail) + api.GET("/search", searchAPI.Search) + api.POST("/regression/check", regressionAPI.Check) + api.POST("/campaigns", campaignAPI.Create) + api.GET("/campaigns", campaignAPI.List) + api.GET("/campaigns/:id", campaignAPI.Detail) + api.POST("/campaigns/:id/finish", campaignAPI.Finish) + api.GET("/dashboard", dashboardAPI.Stats) + + // Webhooks + r.POST("/webhooks/forgejo", webhookH.Handle) + + return r, nil +} diff --git a/internal/web/templates.go b/internal/web/templates.go new file mode 100644 index 0000000..6ec7fac --- /dev/null +++ b/internal/web/templates.go @@ -0,0 +1,109 @@ +package web + +import ( + "fmt" + "html/template" + "io" + "strings" + "time" + + assets "github.com/mattnite/cairn/web" +) + +var funcMap = template.FuncMap{ + "timeAgo": func(t time.Time) string { + d := time.Since(t) + switch { + case d < time.Minute: + return "just now" + case d < time.Hour: + return fmt.Sprintf("%dm ago", int(d.Minutes())) + case d < 24*time.Hour: + return fmt.Sprintf("%dh ago", int(d.Hours())) + default: + return fmt.Sprintf("%dd ago", int(d.Hours()/24)) + } + }, + "shortSHA": func(sha string) string { + if len(sha) > 8 { + return sha[:8] + } + return sha + }, + "formatSize": func(size int64) string { + switch { + case size < 1024: + return fmt.Sprintf("%d B", size) + case size < 1024*1024: + return fmt.Sprintf("%.1f KB", float64(size)/1024) + default: + return fmt.Sprintf("%.1f MB", float64(size)/(1024*1024)) + } + }, + "deref": func(s *string) string { + if s == nil { + return "" + } + return *s + }, + "truncate": func(s string, n int) string { + if len(s) <= n { + return s + } + return s[:n] + "..." + }, + "join": strings.Join, + "derefTime": func(t *time.Time) time.Time { + if t == nil { + return time.Time{} + } + return *t + }, +} + +type Templates struct { + layout *template.Template + pages map[string]*template.Template +} + +func LoadTemplates() (*Templates, error) { + layout, err := template.New("layout").Funcs(funcMap).ParseFS(assets.Assets, "templates/layout.html") + if err != nil { + return nil, fmt.Errorf("parsing layout: %w", err) + } + + pageFiles := []string{ + "templates/pages/index.html", + "templates/pages/artifacts.html", + "templates/pages/artifact_detail.html", + "templates/pages/repos.html", + "templates/pages/crashgroups.html", + "templates/pages/crashgroup_detail.html", + "templates/pages/search.html", + "templates/pages/regression.html", + "templates/pages/campaigns.html", + "templates/pages/campaign_detail.html", + } + + pages := map[string]*template.Template{} + for _, pf := range pageFiles { + name := strings.TrimPrefix(pf, "templates/pages/") + name = strings.TrimSuffix(name, ".html") + + t, err := template.Must(layout.Clone()).ParseFS(assets.Assets, pf) + if err != nil { + return nil, fmt.Errorf("parsing page %s: %w", name, err) + } + pages[name] = t + } + + return &Templates{layout: layout, pages: pages}, nil +} + +func (t *Templates) Render(w io.Writer, page string, data any) error { + tmpl, ok := t.pages[page] + if !ok { + return fmt.Errorf("template %q not found", page) + } + return tmpl.ExecuteTemplate(w, "layout", data) +} diff --git a/internal/web/web.go b/internal/web/web.go new file mode 100644 index 0000000..b8ddfc6 --- /dev/null +++ b/internal/web/web.go @@ -0,0 +1,305 @@ +package web + +import ( + "net/http" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/mattnite/cairn/internal/models" + "github.com/mattnite/cairn/internal/regression" +) + +type PageHandler struct { + Pool *pgxpool.Pool + Templates *Templates +} + +type PageData struct { + Title string + Content any +} + +func (h *PageHandler) Index(c *gin.Context) { + ctx := c.Request.Context() + + artifacts, total, err := models.ListArtifacts(ctx, h.Pool, models.ListArtifactsParams{ + Limit: 10, + }) + if err != nil { + c.String(http.StatusInternalServerError, err.Error()) + return + } + + repos, err := models.ListRepositories(ctx, h.Pool) + if err != nil { + c.String(http.StatusInternalServerError, err.Error()) + return + } + + var totalCG, openCG int + h.Pool.QueryRow(ctx, "SELECT COUNT(*) FROM crash_groups").Scan(&totalCG) + h.Pool.QueryRow(ctx, "SELECT COUNT(*) FROM crash_groups WHERE status = 'open'").Scan(&openCG) + + // Top crashers + type topCrasher struct { + CrashGroupID string + Title string + OccurrenceCount int + RepoName string + } + var topCrashers []topCrasher + rows, err := h.Pool.Query(ctx, ` + SELECT cg.id, cg.title, cs.occurrence_count, r.name + FROM crash_groups cg + JOIN crash_signatures cs ON cs.id = cg.crash_signature_id + JOIN repositories r ON r.id = cg.repository_id + WHERE cg.status = 'open' + ORDER BY cs.occurrence_count DESC + LIMIT 5 + `) + if err == nil { + defer rows.Close() + for rows.Next() { + var tc topCrasher + if rows.Scan(&tc.CrashGroupID, &tc.Title, &tc.OccurrenceCount, &tc.RepoName) == nil { + topCrashers = append(topCrashers, tc) + } + } + } + + data := PageData{ + Title: "Dashboard", + Content: map[string]any{ + "Artifacts": artifacts, + "TotalArtifacts": total, + "Repositories": repos, + "TotalCrashGroups": totalCG, + "OpenCrashGroups": openCG, + "TopCrashers": topCrashers, + }, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "index", data) +} + +func (h *PageHandler) Artifacts(c *gin.Context) { + limit, _ := strconv.Atoi(c.Query("limit")) + offset, _ := strconv.Atoi(c.Query("offset")) + if limit <= 0 { + limit = 50 + } + + artifacts, total, err := models.ListArtifacts(c.Request.Context(), h.Pool, models.ListArtifactsParams{ + RepositoryID: c.Query("repository_id"), + Type: c.Query("type"), + Limit: limit, + Offset: offset, + }) + if err != nil { + c.String(http.StatusInternalServerError, err.Error()) + return + } + + data := PageData{ + Title: "Artifacts", + Content: map[string]any{ + "Artifacts": artifacts, + "Total": total, + "Limit": limit, + "Offset": offset, + }, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "artifacts", data) +} + +func (h *PageHandler) ArtifactDetail(c *gin.Context) { + id := c.Param("id") + + artifact, err := models.GetArtifact(c.Request.Context(), h.Pool, id) + if err != nil { + c.String(http.StatusNotFound, "artifact not found") + return + } + + data := PageData{ + Title: "Artifact " + artifact.ID[:8], + Content: artifact, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "artifact_detail", data) +} + +func (h *PageHandler) Repos(c *gin.Context) { + repos, err := models.ListRepositories(c.Request.Context(), h.Pool) + if err != nil { + c.String(http.StatusInternalServerError, err.Error()) + return + } + + data := PageData{ + Title: "Repositories", + Content: map[string]any{ + "Repositories": repos, + }, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "repos", data) +} + +func (h *PageHandler) CrashGroups(c *gin.Context) { + limit, _ := strconv.Atoi(c.Query("limit")) + offset, _ := strconv.Atoi(c.Query("offset")) + if limit <= 0 { + limit = 50 + } + + groups, total, err := models.ListCrashGroups( + c.Request.Context(), h.Pool, + c.Query("repository_id"), c.Query("status"), + limit, offset, + ) + if err != nil { + c.String(http.StatusInternalServerError, err.Error()) + return + } + + data := PageData{ + Title: "Crash Groups", + Content: map[string]any{ + "CrashGroups": groups, + "Total": total, + "Limit": limit, + "Offset": offset, + }, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "crashgroups", data) +} + +func (h *PageHandler) CrashGroupDetail(c *gin.Context) { + id := c.Param("id") + + group, err := models.GetCrashGroup(c.Request.Context(), h.Pool, id) + if err != nil { + c.String(http.StatusNotFound, "crash group not found") + return + } + + // Get artifacts linked to this crash group's signature. + artifacts, _, _ := models.ListArtifacts(c.Request.Context(), h.Pool, models.ListArtifactsParams{ + SignatureID: group.CrashSignatureID, + Limit: 50, + }) + + data := PageData{ + Title: "Crash Group: " + group.Title, + Content: map[string]any{ + "Group": group, + "Artifacts": artifacts, + }, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "crashgroup_detail", data) +} + +func (h *PageHandler) Search(c *gin.Context) { + q := c.Query("q") + + var artifacts []models.Artifact + var total int + if q != "" { + artifacts, total, _ = models.SearchArtifacts(c.Request.Context(), h.Pool, q, 50, 0) + } + + data := PageData{ + Title: "Search", + Content: map[string]any{ + "Query": q, + "Artifacts": artifacts, + "Total": total, + }, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "search", data) +} + +func (h *PageHandler) Regression(c *gin.Context) { + repo := c.Query("repo") + base := c.Query("base") + head := c.Query("head") + + content := map[string]any{ + "Repo": repo, + "Base": base, + "Head": head, + } + + if repo != "" && base != "" && head != "" { + r, err := models.GetRepositoryByName(c.Request.Context(), h.Pool, repo) + if err == nil { + result, err := regression.Compare(c.Request.Context(), h.Pool, r.ID, base, head) + if err == nil { + result.RepoName = repo + content["Result"] = result + } + } + } + + data := PageData{ + Title: "Regression Check", + Content: content, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "regression", data) +} + +func (h *PageHandler) Campaigns(c *gin.Context) { + limit, _ := strconv.Atoi(c.Query("limit")) + offset, _ := strconv.Atoi(c.Query("offset")) + if limit <= 0 { + limit = 50 + } + + campaigns, total, err := models.ListCampaigns(c.Request.Context(), h.Pool, c.Query("repository_id"), limit, offset) + if err != nil { + c.String(http.StatusInternalServerError, err.Error()) + return + } + + data := PageData{ + Title: "Campaigns", + Content: map[string]any{ + "Campaigns": campaigns, + "Total": total, + }, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "campaigns", data) +} + +func (h *PageHandler) CampaignDetail(c *gin.Context) { + id := c.Param("id") + + campaign, err := models.GetCampaign(c.Request.Context(), h.Pool, id) + if err != nil { + c.String(http.StatusNotFound, "campaign not found") + return + } + + artifacts, _, _ := models.ListArtifacts(c.Request.Context(), h.Pool, models.ListArtifactsParams{ + CampaignID: campaign.ID, + Limit: 50, + }) + + data := PageData{ + Title: "Campaign: " + campaign.Name, + Content: map[string]any{ + "Campaign": campaign, + "Artifacts": artifacts, + }, + } + c.Header("Content-Type", "text/html; charset=utf-8") + h.Templates.Render(c.Writer, "campaign_detail", data) +} diff --git a/web/embed.go b/web/embed.go new file mode 100644 index 0000000..3d797dc --- /dev/null +++ b/web/embed.go @@ -0,0 +1,6 @@ +package web + +import "embed" + +//go:embed templates static +var Assets embed.FS diff --git a/web/static/css/cairn.css b/web/static/css/cairn.css new file mode 100644 index 0000000..8a76387 --- /dev/null +++ b/web/static/css/cairn.css @@ -0,0 +1,280 @@ +:root { + --bg: #0f1117; + --bg-surface: #1a1d27; + --bg-hover: #242836; + --border: #2a2e3d; + --text: #e1e4ed; + --text-muted: #8b90a0; + --accent: #6c8cff; + --accent-hover: #8ba4ff; + --danger: #ff6b6b; + --warning: #ffd666; + --success: #69db7c; + --sidebar-width: 220px; + --radius: 6px; + --font-mono: 'SF Mono', 'Cascadia Code', 'Fira Code', monospace; +} + +* { margin: 0; padding: 0; box-sizing: border-box; } + +body { + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; + background: var(--bg); + color: var(--text); + display: flex; + min-height: 100vh; +} + +/* Sidebar */ +.sidebar { + width: var(--sidebar-width); + background: var(--bg-surface); + border-right: 1px solid var(--border); + padding: 1.5rem 0; + position: fixed; + top: 0; + bottom: 0; + overflow-y: auto; +} + +.sidebar-header { padding: 0 1.25rem 1.5rem; } +.logo { font-size: 1.25rem; font-weight: 700; color: var(--accent); } + +.nav-links { list-style: none; } +.nav-links a { + display: block; + padding: 0.625rem 1.25rem; + color: var(--text-muted); + text-decoration: none; + font-size: 0.875rem; + transition: all 0.15s; +} +.nav-links a:hover { + color: var(--text); + background: var(--bg-hover); +} + +/* Main content */ +.content { + margin-left: var(--sidebar-width); + flex: 1; + padding: 2rem; + max-width: 1200px; +} + +.page-header { + margin-bottom: 1.5rem; + padding-bottom: 1rem; + border-bottom: 1px solid var(--border); +} +.page-header h2 { font-size: 1.5rem; font-weight: 600; } + +/* Stats */ +.stats-row { + display: flex; + gap: 1rem; + margin-bottom: 2rem; +} + +.stat-card { + background: var(--bg-surface); + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 1.25rem 1.5rem; + display: flex; + flex-direction: column; + min-width: 160px; +} +.stat-value { font-size: 2rem; font-weight: 700; color: var(--accent); } +.stat-label { font-size: 0.8rem; color: var(--text-muted); margin-top: 0.25rem; } + +/* Tables */ +.table { + width: 100%; + border-collapse: collapse; + font-size: 0.875rem; +} +.table th { + text-align: left; + padding: 0.75rem 1rem; + border-bottom: 2px solid var(--border); + color: var(--text-muted); + font-weight: 500; + font-size: 0.75rem; + text-transform: uppercase; + letter-spacing: 0.05em; +} +.table td { + padding: 0.75rem 1rem; + border-bottom: 1px solid var(--border); +} +.table tr:hover td { background: var(--bg-hover); } + +/* Badges */ +.badge { + display: inline-block; + padding: 0.2rem 0.5rem; + border-radius: 3px; + font-size: 0.75rem; + font-weight: 600; + font-family: var(--font-mono); +} +.badge-coredump { background: #3b2a2a; color: var(--danger); } +.badge-fuzz { background: #2a3b2a; color: var(--success); } +.badge-sanitizer { background: #3b3b2a; color: var(--warning); } +.badge-simulation { background: #2a2a3b; color: var(--accent); } + +/* Buttons */ +.btn { + display: inline-block; + padding: 0.5rem 1rem; + background: var(--bg-surface); + color: var(--text); + border: 1px solid var(--border); + border-radius: var(--radius); + font-size: 0.875rem; + text-decoration: none; + cursor: pointer; + transition: all 0.15s; +} +.btn:hover { background: var(--bg-hover); border-color: var(--accent); } +.btn-sm { padding: 0.25rem 0.625rem; font-size: 0.8rem; } + +/* Code */ +code { + font-family: var(--font-mono); + font-size: 0.85em; + background: var(--bg-hover); + padding: 0.15rem 0.35rem; + border-radius: 3px; +} + +.code-block { + background: var(--bg-surface); + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 1rem; + overflow-x: auto; + font-family: var(--font-mono); + font-size: 0.8rem; + line-height: 1.6; + white-space: pre-wrap; + word-break: break-all; +} + +/* Detail pages */ +.detail-header { + display: flex; + align-items: center; + gap: 1rem; + margin-bottom: 1.5rem; +} + +.detail-grid { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(250px, 1fr)); + gap: 1rem; + margin-bottom: 2rem; +} + +.detail-item { + background: var(--bg-surface); + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 1rem; +} +.detail-item label { + display: block; + font-size: 0.75rem; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.05em; + margin-bottom: 0.375rem; +} + +.detail-actions { margin-top: 2rem; } + +/* Sections */ +.section { margin-bottom: 2rem; } +.section h3 { margin-bottom: 0.75rem; font-size: 1rem; } + +/* Utilities */ +.empty-state { + color: var(--text-muted); + padding: 3rem; + text-align: center; + font-size: 0.9rem; +} + +.toolbar { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 1rem; +} + +.result-count { color: var(--text-muted); font-size: 0.875rem; } + +.pagination { + display: flex; + gap: 0.5rem; + justify-content: center; + margin-top: 1.5rem; +} + +/* Status badges */ +.badge-status-open { background: #3b2a2a; color: var(--danger); } +.badge-status-resolved { background: #2a3b2a; color: var(--success); } + +/* Campaign badges */ +.badge-campaign-running { background: #2a2a3b; color: var(--accent); } +.badge-campaign-finished { background: #2a3b2a; color: var(--success); } + +/* Search */ +.search-form { + display: flex; + gap: 0.75rem; + margin-bottom: 1.5rem; +} +.search-input { + flex: 1; + padding: 0.625rem 1rem; + background: var(--bg-surface); + border: 1px solid var(--border); + border-radius: var(--radius); + color: var(--text); + font-size: 0.9rem; +} +.search-input:focus { + outline: none; + border-color: var(--accent); +} + +/* Crash group detail */ +.crashgroup-title { + font-size: 1.25rem; + margin-bottom: 1.5rem; +} + +/* Regression */ +.form-row { + display: flex; + gap: 0.75rem; + align-items: flex-end; +} +.form-group { display: flex; flex-direction: column; gap: 0.375rem; flex: 1; } +.form-group label { font-size: 0.75rem; color: var(--text-muted); text-transform: uppercase; letter-spacing: 0.05em; } + +.regression-result { + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 1.5rem; + margin-top: 1.5rem; +} +.regression-pass { border-color: var(--success); } +.regression-fail { border-color: var(--danger); } +.regression-verdict { font-size: 1.1rem; margin-bottom: 1rem; } +.regression-fail .regression-verdict { color: var(--danger); } +.regression-pass .regression-verdict { color: var(--success); } +.fingerprint-list { list-style: none; padding: 0; } +.fingerprint-list li { padding: 0.375rem 0; } diff --git a/web/static/js/cairn.js b/web/static/js/cairn.js new file mode 100644 index 0000000..3667be3 --- /dev/null +++ b/web/static/js/cairn.js @@ -0,0 +1,24 @@ +// Cairn - minimal client-side JS for interactive fragments +(function() { + 'use strict'; + + // Helper: fetch an HTML fragment and swap into a target element + window.cairn = { + loadFragment: function(url, targetSelector) { + var target = document.querySelector(targetSelector); + if (!target) return; + + fetch(url, { headers: { 'Accept': 'text/html' } }) + .then(function(resp) { + if (!resp.ok) throw new Error('HTTP ' + resp.status); + return resp.text(); + }) + .then(function(html) { + target.innerHTML = html; + }) + .catch(function(err) { + console.error('Fragment load failed:', err); + }); + } + }; +})(); diff --git a/web/templates/layout.html b/web/templates/layout.html new file mode 100644 index 0000000..bab6275 --- /dev/null +++ b/web/templates/layout.html @@ -0,0 +1,34 @@ +{{define "layout"}} + + + + + {{.Title}} - Cairn + + + + +
+ +
+ {{template "content" .Content}} +
+
+ + +{{end}} diff --git a/web/templates/pages/artifact_detail.html b/web/templates/pages/artifact_detail.html new file mode 100644 index 0000000..aae6b30 --- /dev/null +++ b/web/templates/pages/artifact_detail.html @@ -0,0 +1,46 @@ +{{define "content"}} +
+
+ {{.Type}} + {{.RepoName}} + {{shortSHA .CommitSHA}} +
+ +
+
+ + {{.ID}} +
+
+ + {{formatSize .BlobSize}} +
+
+ + {{timeAgo .CreatedAt}} +
+
+ + {{.BlobKey}} +
+
+ + {{if .CrashMessage}} +
+

Crash Message

+
{{deref .CrashMessage}}
+
+ {{end}} + + {{if .StackTrace}} +
+

Stack Trace

+
{{deref .StackTrace}}
+
+ {{end}} + + +
+{{end}} diff --git a/web/templates/pages/artifacts.html b/web/templates/pages/artifacts.html new file mode 100644 index 0000000..d5bfae2 --- /dev/null +++ b/web/templates/pages/artifacts.html @@ -0,0 +1,48 @@ +{{define "content"}} +
+
+ {{.Total}} artifacts +
+ + {{if .Artifacts}} + + + + + + + + + + + + + + {{range .Artifacts}} + + + + + + + + + + {{end}} + +
TypeRepositoryCommitCrash MessageSizeCreated
{{.Type}}{{.RepoName}}{{shortSHA .CommitSHA}}{{if .CrashMessage}}{{truncate (deref .CrashMessage) 80}}{{else}}-{{end}}{{formatSize .BlobSize}}{{timeAgo .CreatedAt}}View
+ + {{if gt .Total .Limit}} + + {{end}} + {{else}} +

No artifacts found.

+ {{end}} +
+{{end}} diff --git a/web/templates/pages/campaign_detail.html b/web/templates/pages/campaign_detail.html new file mode 100644 index 0000000..b015ee4 --- /dev/null +++ b/web/templates/pages/campaign_detail.html @@ -0,0 +1,63 @@ +{{define "content"}} +
+
+ {{.Campaign.Status}} + {{.Campaign.RepoName}} +
+ +

{{.Campaign.Name}}

+ +
+
+ + {{.Campaign.Type}} +
+
+ + {{.Campaign.ArtifactCount}} +
+
+ + {{timeAgo .Campaign.StartedAt}} +
+ {{if .Campaign.FinishedAt}} +
+ + {{timeAgo (derefTime .Campaign.FinishedAt)}} +
+ {{end}} +
+ +
+

Artifacts

+ {{if .Artifacts}} + + + + + + + + + + + + + {{range .Artifacts}} + + + + + + + + + {{end}} + +
TypeCommitMessageSizeCreated
{{.Type}}{{shortSHA .CommitSHA}}{{if .CrashMessage}}{{truncate (deref .CrashMessage) 60}}{{else}}-{{end}}{{formatSize .BlobSize}}{{timeAgo .CreatedAt}}View
+ {{else}} +

No artifacts in this campaign yet.

+ {{end}} +
+
+{{end}} diff --git a/web/templates/pages/campaigns.html b/web/templates/pages/campaigns.html new file mode 100644 index 0000000..e2a8197 --- /dev/null +++ b/web/templates/pages/campaigns.html @@ -0,0 +1,38 @@ +{{define "content"}} +
+
+ {{.Total}} campaigns +
+ + {{if .Campaigns}} + + + + + + + + + + + + + + {{range .Campaigns}} + + + + + + + + + + {{end}} + +
StatusNameTypeRepositoryArtifactsStarted
{{.Status}}{{.Name}}{{.Type}}{{.RepoName}}{{.ArtifactCount}}{{timeAgo .StartedAt}}View
+ {{else}} +

No campaigns yet. Use cairn campaign start to begin a campaign.

+ {{end}} +
+{{end}} diff --git a/web/templates/pages/crashgroup_detail.html b/web/templates/pages/crashgroup_detail.html new file mode 100644 index 0000000..06bdcfa --- /dev/null +++ b/web/templates/pages/crashgroup_detail.html @@ -0,0 +1,67 @@ +{{define "content"}} +
+
+ {{.Group.Status}} + {{.Group.RepoName}} +
+ +

{{.Group.Title}}

+ +
+
+ + {{shortSHA .Group.Fingerprint}} +
+
+ + {{.Group.OccurrenceCount}} +
+
+ + {{timeAgo .Group.FirstSeenAt}} +
+
+ + {{timeAgo .Group.LastSeenAt}} +
+ {{if .Group.ForgejoIssueURL}} +
+ + View Issue +
+ {{end}} +
+ +
+

Related Artifacts

+ {{if .Artifacts}} + + + + + + + + + + + + + {{range .Artifacts}} + + + + + + + + + {{end}} + +
TypeCommitMessageSizeCreated
{{.Type}}{{shortSHA .CommitSHA}}{{if .CrashMessage}}{{truncate (deref .CrashMessage) 60}}{{else}}-{{end}}{{formatSize .BlobSize}}{{timeAgo .CreatedAt}}View
+ {{else}} +

No artifacts linked to this crash group yet.

+ {{end}} +
+
+{{end}} diff --git a/web/templates/pages/crashgroups.html b/web/templates/pages/crashgroups.html new file mode 100644 index 0000000..da67c9a --- /dev/null +++ b/web/templates/pages/crashgroups.html @@ -0,0 +1,38 @@ +{{define "content"}} +
+
+ {{.Total}} crash groups +
+ + {{if .CrashGroups}} + + + + + + + + + + + + + + {{range .CrashGroups}} + + + + + + + + + + {{end}} + +
StatusTitleRepositoryOccurrencesFirst SeenLast Seen
{{.Status}}{{.Title}}{{.RepoName}}{{.OccurrenceCount}}{{timeAgo .FirstSeenAt}}{{timeAgo .LastSeenAt}}View
+ {{else}} +

No crash groups yet. Crash groups are created automatically when artifacts with stack traces are uploaded.

+ {{end}} +
+{{end}} diff --git a/web/templates/pages/index.html b/web/templates/pages/index.html new file mode 100644 index 0000000..4292aa9 --- /dev/null +++ b/web/templates/pages/index.html @@ -0,0 +1,80 @@ +{{define "content"}} +
+
+
+ {{.TotalArtifacts}} + Artifacts +
+
+ {{len .Repositories}} + Repositories +
+
+ {{.TotalCrashGroups}} + Crash Groups +
+
+ {{.OpenCrashGroups}} + Open +
+
+ + {{if .TopCrashers}} +
+

Top Crashers

+ + + + + + + + + + + {{range .TopCrashers}} + + + + + + + {{end}} + +
TitleRepositoryOccurrences
{{.Title}}{{.RepoName}}{{.OccurrenceCount}}View
+
+ {{end}} + +
+

Recent Artifacts

+ {{if .Artifacts}} + + + + + + + + + + + + + {{range .Artifacts}} + + + + + + + + + {{end}} + +
TypeRepositoryCommitMessageSizeCreated
{{.Type}}{{.RepoName}}{{shortSHA .CommitSHA}}{{if .CrashMessage}}{{truncate (deref .CrashMessage) 60}}{{else}}-{{end}}{{formatSize .BlobSize}}{{timeAgo .CreatedAt}}
+ {{else}} +

No artifacts yet. Use cairn upload to ingest your first artifact.

+ {{end}} +
+
+{{end}} diff --git a/web/templates/pages/regression.html b/web/templates/pages/regression.html new file mode 100644 index 0000000..5502030 --- /dev/null +++ b/web/templates/pages/regression.html @@ -0,0 +1,66 @@ +{{define "content"}} +
+
+
+
+ + +
+
+ + +
+
+ + +
+ +
+
+ + {{if .Result}} +
+
+ {{if .Result.IsRegression}} + REGRESSION DETECTED + {{else}} + No regression + {{end}} +
+ +
+
+ {{len .Result.New}} + New Crashes +
+
+ {{len .Result.Fixed}} + Fixed +
+
+ {{len .Result.Recurring}} + Recurring +
+
+ + {{if .Result.New}} +
+

New Crash Signatures

+
    + {{range .Result.New}}
  • {{shortSHA .}}
  • {{end}} +
+
+ {{end}} + + {{if .Result.Fixed}} +
+

Fixed Crash Signatures

+
    + {{range .Result.Fixed}}
  • {{shortSHA .}}
  • {{end}} +
+
+ {{end}} +
+ {{end}} +
+{{end}} diff --git a/web/templates/pages/repos.html b/web/templates/pages/repos.html new file mode 100644 index 0000000..cdd839b --- /dev/null +++ b/web/templates/pages/repos.html @@ -0,0 +1,26 @@ +{{define "content"}} +
+ {{if .Repositories}} + + + + + + + + + + {{range .Repositories}} + + + + + + {{end}} + +
NameOwnerCreated
{{.Name}}{{.Owner}}{{timeAgo .CreatedAt}}
+ {{else}} +

No repositories yet. Repositories are created automatically when you upload an artifact.

+ {{end}} +
+{{end}} diff --git a/web/templates/pages/search.html b/web/templates/pages/search.html new file mode 100644 index 0000000..19bb00f --- /dev/null +++ b/web/templates/pages/search.html @@ -0,0 +1,45 @@ +{{define "content"}} +
+
+ + +
+ + {{if .Query}} +
+ {{.Total}} results for "{{.Query}}" +
+ + {{if .Artifacts}} + + + + + + + + + + + + + + {{range .Artifacts}} + + + + + + + + + + {{end}} + +
TypeRepositoryCommitCrash MessageSizeCreated
{{.Type}}{{.RepoName}}{{shortSHA .CommitSHA}}{{if .CrashMessage}}{{truncate (deref .CrashMessage) 80}}{{else}}-{{end}}{{formatSize .BlobSize}}{{timeAgo .CreatedAt}}View
+ {{else}} +

No results found.

+ {{end}} + {{end}} +
+{{end}}