Merge pull request 'Corpus downloader' (#12) from quiet-afl into main

Reviewed-on: https://git.ts.mattnite.net/mattnite/cairn/pulls/12
This commit is contained in:
Matthew Knight 2026-03-06 10:18:29 +00:00
commit f47b51ec6f
4 changed files with 93 additions and 33 deletions

View File

@ -190,17 +190,18 @@ runs:
mkdir -p "${FINDINGS}" mkdir -p "${FINDINGS}"
AFL_EXIT=0 AFL_EXIT=0
AFL_NO_UI=1 \ {
AFL_SKIP_CPUFREQ=1 \ AFL_NO_UI=1 \
AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1 \ AFL_SKIP_CPUFREQ=1 \
AFL_NO_CRASH_README=1 \ AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1 \
afl-fuzz \ AFL_NO_CRASH_README=1 \
-V "${DURATION}" \ afl-fuzz \
-i "${SEEDS}" \ -V "${DURATION}" \
-o "${FINDINGS}" \ -i "${SEEDS}" \
${EXTRA_AFL_ARGS} \ -o "${FINDINGS}" \
-- "${FUZZ_BIN}" \ ${EXTRA_AFL_ARGS} \
|| AFL_EXIT=$? -- "${FUZZ_BIN}"
} >/dev/null 2>&1 || AFL_EXIT=$?
if [ "${AFL_EXIT}" -eq 0 ]; then if [ "${AFL_EXIT}" -eq 0 ]; then
echo "AFL++ exited normally (completed run)" echo "AFL++ exited normally (completed run)"

View File

@ -1,7 +1,9 @@
package main package main
import ( import (
"archive/tar"
"bytes" "bytes"
"compress/gzip"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
@ -544,45 +546,44 @@ func cmdCorpus(subcmd string, args []string) error {
return fmt.Errorf("creating output dir: %w", err) return fmt.Errorf("creating output dir: %w", err)
} }
// List all corpus entries. resp, err := http.Get(serverURL + "/api/v1/targets/" + targetID + "/corpus/download")
resp, err := http.Get(serverURL + "/api/v1/targets/" + targetID + "/corpus?limit=10000")
if err != nil { if err != nil {
return fmt.Errorf("listing corpus: %w", err) return fmt.Errorf("downloading corpus: %w", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
if resp.StatusCode == http.StatusNoContent {
fmt.Printf("Downloaded 0 corpus entries to %s\n", dir)
return nil
}
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return fmt.Errorf("server returned %d: %s", resp.StatusCode, body) return fmt.Errorf("server returned %d: %s", resp.StatusCode, body)
} }
var listResp struct { gr, err := gzip.NewReader(resp.Body)
Entries []struct { if err != nil {
ID float64 `json:"id"` return fmt.Errorf("decompressing corpus: %w", err)
BlobKey string `json:"blob_key"`
} `json:"entries"`
}
if err := json.Unmarshal(body, &listResp); err != nil {
return fmt.Errorf("parsing response: %w", err)
} }
defer gr.Close()
tr := tar.NewReader(gr)
var downloaded int var downloaded int
for _, entry := range listResp.Entries { for {
entryID := fmt.Sprintf("%d", int(entry.ID)) hdr, err := tr.Next()
dlURL := serverURL + "/api/v1/targets/" + targetID + "/corpus/" + entryID + "/download" if err == io.EOF {
dlResp, err := http.Get(dlURL) break
}
if err != nil { if err != nil {
return fmt.Errorf("downloading entry %s: %w", entryID, err) return fmt.Errorf("reading tar: %w", err)
} }
filename := filepath.Base(entry.BlobKey) outPath := filepath.Join(dir, filepath.Base(hdr.Name))
outPath := filepath.Join(dir, filename)
out, err := os.Create(outPath) out, err := os.Create(outPath)
if err != nil { if err != nil {
dlResp.Body.Close()
return fmt.Errorf("creating file %s: %w", outPath, err) return fmt.Errorf("creating file %s: %w", outPath, err)
} }
_, err = io.Copy(out, dlResp.Body) _, err = io.Copy(out, tr)
dlResp.Body.Close()
out.Close() out.Close()
if err != nil { if err != nil {
return fmt.Errorf("writing file %s: %w", outPath, err) return fmt.Errorf("writing file %s: %w", outPath, err)

View File

@ -1,9 +1,12 @@
package handler package handler
import ( import (
"archive/tar"
"compress/gzip"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"path/filepath"
"strconv" "strconv"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
@ -317,3 +320,57 @@ func (h *CorpusHandler) Download(c *gin.Context) {
c.Header("Content-Type", "application/octet-stream") c.Header("Content-Type", "application/octet-stream")
_, _ = io.Copy(c.Writer, reader) _, _ = io.Copy(c.Writer, reader)
} }
func (h *CorpusHandler) DownloadAll(c *gin.Context) {
targetID, err := parseUintID(c.Param("id"), "target id")
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
ctx := c.Request.Context()
var entries []models.CorpusEntry
if err := h.DB.WithContext(ctx).Where("target_id = ?", targetID).Find(&entries).Error; err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if len(entries) == 0 {
c.Status(http.StatusNoContent)
return
}
c.Header("Content-Type", "application/gzip")
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"corpus-%d.tar.gz\"", targetID))
gw := gzip.NewWriter(c.Writer)
defer gw.Close()
tw := tar.NewWriter(gw)
defer tw.Close()
for _, entry := range entries {
reader, err := h.Store.Get(ctx, entry.BlobKey)
if err != nil {
continue
}
data, err := io.ReadAll(reader)
reader.Close()
if err != nil {
continue
}
hdr := &tar.Header{
Name: filepath.Base(entry.BlobKey),
Mode: 0o644,
Size: int64(len(data)),
}
if err := tw.WriteHeader(hdr); err != nil {
return
}
if _, err := tw.Write(data); err != nil {
return
}
}
}

View File

@ -80,6 +80,7 @@ func NewRouter(cfg RouterConfig) (*gin.Engine, error) {
api.GET("/targets/:id", targetAPI.Detail) api.GET("/targets/:id", targetAPI.Detail)
api.POST("/targets/:id/corpus", corpusAPI.Upload) api.POST("/targets/:id/corpus", corpusAPI.Upload)
api.GET("/targets/:id/corpus", corpusAPI.List) api.GET("/targets/:id/corpus", corpusAPI.List)
api.GET("/targets/:id/corpus/download", corpusAPI.DownloadAll)
api.GET("/targets/:id/corpus/:entry_id/download", corpusAPI.Download) api.GET("/targets/:id/corpus/:entry_id/download", corpusAPI.Download)
api.POST("/runs", runAPI.Start) api.POST("/runs", runAPI.Start)
api.GET("/runs", runAPI.List) api.GET("/runs", runAPI.List)