// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"encoding/json"
"fmt"
"os"
"regexp"
"strconv"
"strings"
"time"
"github.com/fatih/color"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/storage"
)
var bugCmd = &cobra.Command{
Use: "bug",
Short: "Manage BUG-* CANARY tokens for tracking defects",
Long: `Bug command manages BUG-* CANARY tokens for defect tracking.
Unlike project-specific tokens (e.g., CBIN-XXX), bug tokens always use
the BUG- prefix followed by aspect and ID (e.g., BUG-API-001).
Subcommands:
list - List all bug canaries with filtering
create - Create a new bug canary token
update - Update an existing bug canary's status
show - Display details for a specific bug`,
}
var bugListCmd = &cobra.Command{
Use: "list",
Short: "List all BUG-* CANARY tokens",
Long: `List all BUG-* CANARY tokens with optional filtering.
Examples:
canary bug list
canary bug list --aspect API
canary bug list --status OPEN --severity S1
canary bug list --priority P0,P1
canary bug list --json`,
RunE: func(cmd *cobra.Command, args []string) error {
aspect, _ := cmd.Flags().GetString("aspect")
status, _ := cmd.Flags().GetString("status")
severity, _ := cmd.Flags().GetString("severity")
priority, _ := cmd.Flags().GetString("priority")
jsonOutput, _ := cmd.Flags().GetBool("json")
noColor, _ := cmd.Flags().GetBool("no-color")
limit, _ := cmd.Flags().GetInt("limit")
dbPath, _ := cmd.Flags().GetString("db")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
// Fallback to filesystem search if no database
return listBugsFromFilesystem(aspect, status, severity, priority, jsonOutput, noColor, limit)
}
defer db.Close()
// Build filters for BUG tokens
filters := make(map[string]string)
if aspect != "" {
filters["aspect"] = aspect
}
if status != "" {
filters["status"] = status
}
// Query database for all tokens (ListTokens is hardcoded for CBIN patterns)
allTokens, err := db.ListTokens(filters, "", "priority ASC, updated_at DESC", 0)
if err != nil {
return fmt.Errorf("query bugs: %w", err)
}
// Filter for BUG tokens only
var tokens []*storage.Token
bugPattern := regexp.MustCompile(`^BUG-[A-Za-z]+-[0-9]{3}$`)
for _, tok := range allTokens {
if bugPattern.MatchString(tok.ReqID) {
tokens = append(tokens, tok)
}
}
// Additional filtering for severity and priority (stored in token comments or metadata)
filteredTokens := filterBugTokens(tokens, severity, priority)
// Apply limit if specified
if limit > 0 && len(filteredTokens) > limit {
filteredTokens = filteredTokens[:limit]
}
if jsonOutput {
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
return enc.Encode(filteredTokens)
}
// Format output
if len(filteredTokens) == 0 {
fmt.Println("No bug tokens found")
return nil
}
formatBugList(filteredTokens, noColor)
return nil
},
}
var bugCreateCmd = &cobra.Command{
Use: "create <title>",
Short: "Create a new BUG-* CANARY token",
Long: `Create a new BUG-* CANARY token for tracking a defect.
The bug ID will be automatically generated in the format BUG-<ASPECT>-XXX.
Examples:
canary bug create "Login fails on first attempt"
canary bug create "API returns 500 error" --aspect API --severity S1 --priority P0
canary bug create "Memory leak in storage engine" --aspect Storage --file src/storage/cache.go:42`,
Args: cobra.MinimumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
title := strings.Join(args, " ")
aspect, _ := cmd.Flags().GetString("aspect")
severity, _ := cmd.Flags().GetString("severity")
priority, _ := cmd.Flags().GetString("priority")
status, _ := cmd.Flags().GetString("status")
file, _ := cmd.Flags().GetString("file")
owner, _ := cmd.Flags().GetString("owner")
dbPath, _ := cmd.Flags().GetString("db")
// Default values
if aspect == "" {
aspect = "API" // Default aspect
}
if status == "" {
status = "OPEN"
}
if severity == "" {
severity = "S3"
}
if priority == "" {
priority = "P2"
}
// Generate bug ID
bugID, err := generateBugID(aspect, dbPath)
if err != nil {
return fmt.Errorf("generate bug ID: %w", err)
}
// Parse file location if provided
var filePath string
var lineNum int
if file != "" {
parts := strings.Split(file, ":")
filePath = parts[0]
if len(parts) > 1 {
lineNum, _ = strconv.Atoi(parts[1])
}
} else {
// Default to main.go or most relevant file
filePath = "main.go"
lineNum = 1
}
// Create token
token := &storage.Token{
ReqID: bugID,
Feature: title,
Aspect: aspect,
Status: status,
FilePath: filePath,
LineNumber: lineNum,
UpdatedAt: time.Now().Format("2006-01-02"),
Owner: owner,
Priority: parsePriorityValue(priority),
Keywords: fmt.Sprintf("SEVERITY=%s;PRIORITY=%s", severity, priority),
}
// Save to database
db, err := storage.Open(dbPath)
if err != nil {
// Create CANARY comment in file if no database
return createBugCanaryComment(token, severity, priority)
}
defer db.Close()
if err := db.UpsertToken(token); err != nil {
return fmt.Errorf("save bug token: %w", err)
}
// Generate CANARY comment format
canaryComment := fmt.Sprintf(
"// CANARY: BUG=%s; TITLE=\"%s\";\n"+
"// ASPECT=%s; STATUS=%s;\n"+
"// SEVERITY=%s; PRIORITY=%s;\n"+
"// UPDATED=%s",
bugID, title, aspect, status,
severity, priority,
time.Now().Format("2006-01-02"),
)
fmt.Printf("β
Created bug token: %s\n", bugID)
fmt.Printf("π Title: %s\n", title)
fmt.Printf("π Severity: %s | Priority: %s\n", severity, priority)
fmt.Printf("π Location: %s:%d\n", filePath, lineNum)
fmt.Printf("\n%s CANARY comment to add:\n", color.YellowString("β"))
fmt.Println(canaryComment)
return nil
},
}
var bugUpdateCmd = &cobra.Command{
Use: "update <BUG-ID>",
Short: "Update a BUG-* CANARY token's status",
Long: `Update the status or other properties of a BUG-* CANARY token.
Examples:
canary bug update BUG-API-001 --status FIXED
canary bug update BUG-CLI-002 --status IN_PROGRESS --owner alice
canary bug update BUG-Storage-003 --priority P0 --severity S1`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
bugID := args[0]
status, _ := cmd.Flags().GetString("status")
severity, _ := cmd.Flags().GetString("severity")
priority, _ := cmd.Flags().GetString("priority")
owner, _ := cmd.Flags().GetString("owner")
dbPath, _ := cmd.Flags().GetString("db")
// Validate bug ID format
if !regexp.MustCompile(`^BUG-[A-Za-z]+-[0-9]{3}$`).MatchString(bugID) {
return fmt.Errorf("invalid bug ID format: %s (expected BUG-<ASPECT>-XXX)", bugID)
}
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Get existing token
tokens, err := db.GetTokensByReqID(bugID)
if err != nil {
return fmt.Errorf("find bug: %w", err)
}
if len(tokens) == 0 {
return fmt.Errorf("bug not found: %s", bugID)
}
token := tokens[0]
updated := false
// Update fields if provided
if status != "" {
token.Status = status
updated = true
}
if owner != "" {
token.Owner = owner
updated = true
}
if priority != "" {
token.Priority = parsePriorityValue(priority)
updated = true
}
// Update keywords for severity/priority
if severity != "" || priority != "" {
metaParts := strings.Split(token.Keywords, ";")
// Parse existing keywords
existingSev := "S3"
existingPri := "P2"
for _, part := range metaParts {
if strings.HasPrefix(part, "SEVERITY=") {
existingSev = strings.TrimPrefix(part, "SEVERITY=")
} else if strings.HasPrefix(part, "PRIORITY=") {
existingPri = strings.TrimPrefix(part, "PRIORITY=")
}
}
if severity != "" {
existingSev = severity
}
if priority != "" {
existingPri = priority
}
token.Keywords = fmt.Sprintf("SEVERITY=%s;PRIORITY=%s", existingSev, existingPri)
updated = true
}
if !updated {
fmt.Println("No changes specified")
return nil
}
// Update timestamp
token.UpdatedAt = time.Now().Format("2006-01-02")
// Save updated token
if err := db.UpsertToken(token); err != nil {
return fmt.Errorf("update bug: %w", err)
}
fmt.Printf("β
Updated bug %s\n", bugID)
fmt.Printf("π Status: %s\n", token.Status)
if owner != "" {
fmt.Printf("π€ Owner: %s\n", token.Owner)
}
if token.Keywords != "" {
fmt.Printf("π Metadata: %s\n", token.Keywords)
}
return nil
},
}
var bugShowCmd = &cobra.Command{
Use: "show <BUG-ID>",
Short: "Display details for a specific bug",
Long: `Show detailed information about a specific BUG-* CANARY token.
Examples:
canary bug show BUG-API-001
canary bug show BUG-CLI-002 --json`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
bugID := args[0]
jsonOutput, _ := cmd.Flags().GetBool("json")
dbPath, _ := cmd.Flags().GetString("db")
// Validate bug ID format
if !regexp.MustCompile(`^BUG-[A-Za-z]+-[0-9]{3}$`).MatchString(bugID) {
return fmt.Errorf("invalid bug ID format: %s (expected BUG-<ASPECT>-XXX)", bugID)
}
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Get bug token
tokens, err := db.GetTokensByReqID(bugID)
if err != nil {
return fmt.Errorf("query bug: %w", err)
}
if len(tokens) == 0 {
return fmt.Errorf("bug not found: %s", bugID)
}
token := tokens[0]
if jsonOutput {
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
return enc.Encode(token)
}
// Parse keywords for severity/priority
severity, priority := parseBugMetadata(token.Keywords)
// Format output
fmt.Printf("π Bug Details: %s\n\n", bugID)
fmt.Printf("π Title: %s\n", token.Feature)
fmt.Printf("π Status: %s | Aspect: %s\n", token.Status, token.Aspect)
fmt.Printf("β οΈ Severity: %s | Priority: %s\n", severity, priority)
fmt.Printf("π Location: %s:%d\n", token.FilePath, token.LineNumber)
if token.Owner != "" {
fmt.Printf("π€ Owner: %s\n", token.Owner)
}
fmt.Printf("π
Updated: %s\n", token.UpdatedAt)
if token.Test != "" {
fmt.Printf("π§ͺ Test: %s\n", token.Test)
}
return nil
},
}
// Helper functions
func generateBugID(aspect string, dbPath string) (string, error) {
// Normalize aspect to uppercase
aspect = strings.ToUpper(aspect)
// Open database to check existing IDs
db, err := storage.Open(dbPath)
if err != nil {
// If no database, start from 001
return fmt.Sprintf("BUG-%s-001", aspect), nil
}
defer db.Close()
return generateBugIDWithDB(aspect, db)
}
func generateBugIDWithDB(aspect string, db *storage.DB) (string, error) {
// Normalize aspect to uppercase
aspect = strings.ToUpper(aspect)
// Query ALL tokens (no pattern filter since ListTokens is hardcoded for CBIN)
tokens, err := db.ListTokens(nil, "", "req_id DESC", 0) // Get all tokens
if err != nil {
// If database doesn't have tokens table yet, start from 001
return fmt.Sprintf("BUG-%s-001", aspect), nil
}
// Find highest number for this aspect
maxNum := 0
pattern := fmt.Sprintf(`BUG-%s-([0-9]{3})`, aspect)
re := regexp.MustCompile(pattern)
for _, token := range tokens {
// Extract number from matching tokens
if matches := re.FindStringSubmatch(token.ReqID); len(matches) > 1 {
num, _ := strconv.Atoi(matches[1])
if num > maxNum {
maxNum = num
}
}
}
// Generate next ID
nextNum := maxNum + 1
return fmt.Sprintf("BUG-%s-%03d", aspect, nextNum), nil
}
func filterBugTokens(tokens []*storage.Token, severity, priority string) []*storage.Token {
if severity == "" && priority == "" {
return tokens
}
var filtered []*storage.Token
for _, token := range tokens {
sev, pri := parseBugMetadata(token.Keywords)
// Check severity filter
if severity != "" && !strings.Contains(severity, sev) {
continue
}
// Check priority filter
if priority != "" && !strings.Contains(priority, pri) {
continue
}
filtered = append(filtered, token)
}
return filtered
}
func parseBugMetadata(metadata string) (severity, priority string) {
severity = "S3" // default
priority = "P2" // default
parts := strings.Split(metadata, ";")
for _, part := range parts {
part = strings.TrimSpace(part)
if strings.HasPrefix(part, "SEVERITY=") {
severity = strings.TrimPrefix(part, "SEVERITY=")
} else if strings.HasPrefix(part, "PRIORITY=") {
priority = strings.TrimPrefix(part, "PRIORITY=")
}
}
return severity, priority
}
func parsePriorityValue(priority string) int {
// Convert P0, P1, P2, P3 to numeric values
switch priority {
case "P0":
return 0
case "P1":
return 1
case "P2":
return 2
case "P3":
return 3
default:
return 2 // default
}
}
func formatBugList(tokens []*storage.Token, noColor bool) {
// Group by status
statusGroups := make(map[string][]*storage.Token)
for _, token := range tokens {
statusGroups[token.Status] = append(statusGroups[token.Status], token)
}
// Display in order: OPEN, IN_PROGRESS, FIXED, others
statusOrder := []string{"OPEN", "IN_PROGRESS", "FIXED", "VERIFIED", "BLOCKED", "WONTFIX", "DUPLICATE"}
for _, status := range statusOrder {
if bugs, ok := statusGroups[status]; ok && len(bugs) > 0 {
fmt.Printf("\n## %s (%d)\n\n", status, len(bugs))
for _, bug := range bugs {
severity, priority := parseBugMetadata(bug.Keywords)
// Format with colors if enabled
var line string
if !noColor {
switch severity {
case "S1", "S1-Critical":
line = color.RedString("π΄")
case "S2", "S2-High":
line = color.YellowString("π ")
case "S3", "S3-Medium":
line = color.BlueString("π΅")
default:
line = "βͺ"
}
} else {
line = "β’"
}
line += fmt.Sprintf(" %s: %s", bug.ReqID, bug.Feature)
if bug.Owner != "" {
line += fmt.Sprintf(" [%s]", bug.Owner)
}
line += fmt.Sprintf(" (%s/%s)", severity, priority)
fmt.Println(line)
}
}
}
// Show any status not in the predefined order
for status, bugs := range statusGroups {
found := false
for _, s := range statusOrder {
if s == status {
found = true
break
}
}
if !found && len(bugs) > 0 {
fmt.Printf("\n## %s (%d)\n\n", status, len(bugs))
for _, bug := range bugs {
severity, priority := parseBugMetadata(bug.Keywords)
fmt.Printf("β’ %s: %s (%s/%s)\n", bug.ReqID, bug.Feature, severity, priority)
}
}
}
fmt.Printf("\nπ Total bugs: %d\n", len(tokens))
}
func listBugsFromFilesystem(aspect, status, severity, priority string, jsonOutput, noColor bool, limit int) error {
// Fallback implementation for when database is not available
fmt.Fprintf(os.Stderr, "β οΈ Database not found, using filesystem search (slower)\n")
fmt.Fprintf(os.Stderr, " Suggestion: Run 'canary index' to build database\n\n")
// TODO: Implement filesystem-based search for BUG tokens
// This would scan files for CANARY comments starting with BUG=
return fmt.Errorf("filesystem search not yet implemented for bug tokens")
}
func createBugCanaryComment(token *storage.Token, severity, priority string) error {
// Create CANARY comment in the specified file
canaryComment := fmt.Sprintf(
"// CANARY: BUG=%s; TITLE=\"%s\";\n"+
"// ASPECT=%s; STATUS=%s;\n"+
"// SEVERITY=%s; PRIORITY=%s;\n"+
"// UPDATED=%s",
token.ReqID, token.Feature, token.Aspect, token.Status,
severity, priority,
token.UpdatedAt,
)
fmt.Printf("β
Bug token created: %s\n", token.ReqID)
fmt.Printf("\nAdd this CANARY comment to %s:%d:\n\n", token.FilePath, token.LineNumber)
fmt.Println(canaryComment)
return nil
}
func init() {
// Add subcommands
bugCmd.AddCommand(bugListCmd)
bugCmd.AddCommand(bugCreateCmd)
bugCmd.AddCommand(bugUpdateCmd)
bugCmd.AddCommand(bugShowCmd)
// List command flags
bugListCmd.Flags().String("aspect", "", "Filter by aspect (API, CLI, Engine, Storage, etc.)")
bugListCmd.Flags().String("status", "", "Filter by status (OPEN, IN_PROGRESS, FIXED, etc.)")
bugListCmd.Flags().String("severity", "", "Filter by severity (S1, S2, S3, S4)")
bugListCmd.Flags().String("priority", "", "Filter by priority (P0, P1, P2, P3)")
bugListCmd.Flags().Bool("json", false, "Output in JSON format")
bugListCmd.Flags().Bool("no-color", false, "Disable colored output")
bugListCmd.Flags().Int("limit", 0, "Limit number of results (0 = unlimited)")
bugListCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
// Create command flags
bugCreateCmd.Flags().String("aspect", "", "Bug aspect (API, CLI, Engine, Storage, etc.)")
bugCreateCmd.Flags().String("severity", "S3", "Severity level (S1-Critical, S2-High, S3-Medium, S4-Low)")
bugCreateCmd.Flags().String("priority", "P2", "Priority level (P0, P1, P2, P3)")
bugCreateCmd.Flags().String("status", "OPEN", "Initial status")
bugCreateCmd.Flags().String("file", "", "File and line number (e.g., src/api/handler.go:42)")
bugCreateCmd.Flags().String("owner", "", "Bug owner/assignee")
bugCreateCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
// Update command flags
bugUpdateCmd.Flags().String("status", "", "New status (OPEN, IN_PROGRESS, FIXED, etc.)")
bugUpdateCmd.Flags().String("severity", "", "New severity (S1, S2, S3, S4)")
bugUpdateCmd.Flags().String("priority", "", "New priority (P0, P1, P2, P3)")
bugUpdateCmd.Flags().String("owner", "", "New owner/assignee")
bugUpdateCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
// Show command flags
bugShowCmd.Flags().Bool("json", false, "Output in JSON format")
bugShowCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
}
package main
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/specs"
"go.devnw.com/canary/internal/storage"
)
// CANARY: REQ=CBIN-147; FEATURE="DepsParentCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestDepsParentCommand; UPDATED=2025-10-18
// createDepsCommand creates the parent deps command
func createDepsCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "deps",
Short: "Manage requirement dependencies",
Long: `Commands for working with requirement dependencies.
Dependencies allow specifications to declare that they depend on other
specifications being complete before implementation can begin.
Available commands:
check - Check if dependencies are satisfied
graph - Show dependency tree visualization
reverse - Show what depends on a requirement
validate - Validate all dependencies for cycles`,
}
cmd.AddCommand(createDepsCheckCommand())
cmd.AddCommand(createDepsGraphCommand())
cmd.AddCommand(createDepsReverseCommand())
cmd.AddCommand(createDepsValidateCommand())
return cmd
}
// CANARY: REQ=CBIN-147; FEATURE="DepsCheckCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestDepsCheckCommand; UPDATED=2025-10-18
// createDepsCheckCommand creates the deps check command
func createDepsCheckCommand() *cobra.Command {
var showSatisfied bool
cmd := &cobra.Command{
Use: "check <req-id>",
Short: "Check if dependencies are satisfied",
Long: `Check if all dependencies for a requirement are satisfied.
This command loads the requirement's dependencies and checks their status
against the CANARY token database. Only TESTED and BENCHED status satisfy
dependencies - IMPL is insufficient.
Example:
canary deps check CBIN-147`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := args[0]
// Find spec file
specPath, err := findSpecFile(reqID)
if err != nil {
return fmt.Errorf("failed to find spec for %s: %w", reqID, err)
}
// Parse dependencies
deps, err := specs.ParseDependenciesFromFile(reqID, specPath)
if err != nil {
return fmt.Errorf("failed to parse dependencies: %w", err)
}
if len(deps) == 0 {
cmd.Println(fmt.Sprintf("%s has no dependencies", reqID))
return nil
}
// Load token provider
tokenProvider, err := createTokenProvider()
if err != nil {
return fmt.Errorf("failed to create token provider: %w", err)
}
// Check dependency status
checker := specs.NewStatusChecker(tokenProvider)
statuses := checker.CheckAllDependencies(deps)
// Display results
cmd.Println(fmt.Sprintf("Dependency status for %s:", reqID))
cmd.Println()
satisfiedCount := 0
blockingCount := 0
for _, status := range statuses {
if status.IsSatisfied {
satisfiedCount++
if showSatisfied {
cmd.Println(fmt.Sprintf("β
%s - %s", status.Dependency.Target, status.Message))
}
} else {
blockingCount++
cmd.Println(fmt.Sprintf("β %s - %s", status.Dependency.Target, status.Message))
if len(status.MissingFeatures) > 0 {
cmd.Println(fmt.Sprintf(" Missing: %s", strings.Join(status.MissingFeatures, ", ")))
}
}
}
cmd.Println()
cmd.Println(fmt.Sprintf("Summary: %d satisfied, %d blocking", satisfiedCount, blockingCount))
if blockingCount > 0 {
return fmt.Errorf("dependencies not satisfied")
}
return nil
},
}
cmd.Flags().BoolVar(&showSatisfied, "show-satisfied", false, "Show satisfied dependencies")
return cmd
}
// CANARY: REQ=CBIN-147; FEATURE="DepsGraphCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestDepsGraphCommand; UPDATED=2025-10-18
// createDepsGraphCommand creates the deps graph command
func createDepsGraphCommand() *cobra.Command {
var showStatus bool
cmd := &cobra.Command{
Use: "graph <req-id>",
Short: "Show dependency tree visualization",
Long: `Display a visual tree of all dependencies for a requirement.
The tree shows both direct and transitive dependencies with Unicode
box-drawing characters. When --status is used, shows whether each
dependency is satisfied (β
) or blocking (β).
Example:
canary deps graph CBIN-147
canary deps graph CBIN-147 --status`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := args[0]
// Build graph from all specs
graph, err := buildDependencyGraph()
if err != nil {
return fmt.Errorf("failed to build dependency graph: %w", err)
}
// Create generator
generator := specs.NewGraphGenerator(nil)
// Add status checker if requested
if showStatus {
tokenProvider, err := createTokenProvider()
if err == nil {
statusChecker := &dependencyStatusAdapter{
checker: specs.NewStatusChecker(tokenProvider),
}
generator.SetStatusChecker(statusChecker)
}
}
// Format and display tree
tree := generator.FormatASCIITree(graph, reqID)
cmd.Println(tree)
// Show summary
cmd.Println()
summary := generator.FormatDependencySummary(graph, reqID)
cmd.Println(summary)
return nil
},
}
cmd.Flags().BoolVar(&showStatus, "status", false, "Show dependency satisfaction status")
return cmd
}
// CANARY: REQ=CBIN-147; FEATURE="DepsReverseCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestDepsReverseCommand; UPDATED=2025-10-18
// createDepsReverseCommand creates the deps reverse command
func createDepsReverseCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "reverse <req-id>",
Short: "Show what depends on a requirement",
Long: `Display all requirements that depend on the specified requirement.
This answers the question: "What would be blocked if this requirement changes?"
Example:
canary deps reverse CBIN-146`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := args[0]
// Build graph from all specs
graph, err := buildDependencyGraph()
if err != nil {
return fmt.Errorf("failed to build dependency graph: %w", err)
}
// Get reverse dependencies
reverseDeps := graph.GetReverseDependencies(reqID)
if len(reverseDeps) == 0 {
cmd.Println(fmt.Sprintf("No requirements depend on %s", reqID))
return nil
}
cmd.Println(fmt.Sprintf("Requirements that depend on %s:", reqID))
cmd.Println()
for _, dep := range reverseDeps {
typeStr := ""
switch dep.Type {
case specs.DependencyTypePartialFeatures:
typeStr = fmt.Sprintf(" (features: %s)", strings.Join(dep.RequiredFeatures, ", "))
case specs.DependencyTypePartialAspect:
typeStr = fmt.Sprintf(" (aspect: %s)", dep.RequiredAspect)
}
cmd.Println(fmt.Sprintf(" %s%s", dep.Source, typeStr))
if dep.Description != "" {
cmd.Println(fmt.Sprintf(" %s", dep.Description))
}
}
cmd.Println()
cmd.Println(fmt.Sprintf("Total: %d requirements depend on %s", len(reverseDeps), reqID))
return nil
},
}
return cmd
}
// CANARY: REQ=CBIN-147; FEATURE="DepsValidateCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestDepsValidateCommand,TestDepsValidateCommand_DetectsCycle; UPDATED=2025-10-18
// createDepsValidateCommand creates the deps validate command
func createDepsValidateCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "validate",
Short: "Validate all dependencies for cycles",
Long: `Validate the entire dependency graph for issues.
Checks for:
- Circular dependencies (A depends on B, B depends on A)
- Missing requirements (dependencies on non-existent specs)
- Self-dependencies (A depends on A)
Example:
canary deps validate`,
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
// Build graph from all specs
graph, err := buildDependencyGraph()
if err != nil {
return fmt.Errorf("failed to build dependency graph: %w", err)
}
// Create validator
validator := specs.NewDependencyValidator(graph)
// Add spec finder to check for missing requirements
specFinder := &filesystemSpecFinder{}
validator.SetSpecFinder(specFinder)
// Validate
result := validator.Validate()
if result.IsValid {
cmd.Println("β
All dependencies are valid")
cmd.Println(fmt.Sprintf("Validated %d requirements with %d dependencies",
len(graph.GetAllRequirements()), countTotalDependencies(graph)))
return nil
}
// Display errors
cmd.Println("β Dependency validation failed:")
cmd.Println()
cmd.Println(result.FormatErrors())
return fmt.Errorf("validation failed")
},
}
return cmd
}
// Helper functions
// findSpecFile finds the spec.md file for a requirement ID
func findSpecFile(reqID string) (string, error) {
// Look in .canary/specs/
specsDir := ".canary/specs"
entries, err := os.ReadDir(specsDir)
if err != nil {
return "", fmt.Errorf("failed to read specs directory: %w", err)
}
for _, entry := range entries {
if !entry.IsDir() {
continue
}
// Check if directory name starts with the requirement ID
if strings.HasPrefix(entry.Name(), reqID) {
specPath := filepath.Join(specsDir, entry.Name(), "spec.md")
if _, err := os.Stat(specPath); err == nil {
return specPath, nil
}
}
}
return "", fmt.Errorf("spec file not found for %s", reqID)
}
// buildDependencyGraph builds the complete dependency graph from all specs
func buildDependencyGraph() (*specs.DependencyGraph, error) {
graph := specs.NewDependencyGraph()
// Find all spec directories
specsDir := ".canary/specs"
entries, err := os.ReadDir(specsDir)
if err != nil {
return nil, fmt.Errorf("failed to read specs directory: %w", err)
}
for _, entry := range entries {
if !entry.IsDir() {
continue
}
// Extract requirement ID from directory name (before first hyphen)
parts := strings.Split(entry.Name(), "-")
if len(parts) < 2 {
continue
}
reqID := parts[0] + "-" + parts[1] // CBIN-###
// Load dependencies
specPath := filepath.Join(specsDir, entry.Name(), "spec.md")
deps, err := specs.ParseDependenciesFromFile(reqID, specPath)
if err != nil {
// Skip specs without valid dependencies
continue
}
for _, dep := range deps {
graph.AddDependency(dep)
}
}
return graph, nil
}
// createTokenProvider creates a token provider from the database
func createTokenProvider() (specs.TokenProvider, error) {
// Try to open database
dbPath := getDatabasePath()
db, err := storage.Open(dbPath)
if err != nil {
// Return empty provider if no database
return &emptyTokenProvider{}, nil
}
return &dbTokenProvider{db: db}, nil
}
// getDatabasePath returns the path to the canary database
func getDatabasePath() string {
// Check for project-local database
if _, err := os.Stat(".canary/canary.db"); err == nil {
return ".canary/canary.db"
}
// Fall back to global database
home, err := os.UserHomeDir()
if err != nil {
return "canary.db"
}
return filepath.Join(home, ".canary", "canary.db")
}
// countTotalDependencies counts all dependencies in the graph
func countTotalDependencies(graph *specs.DependencyGraph) int {
count := 0
for _, deps := range graph.Nodes {
count += len(deps)
}
return count
}
// Adapter types
// dependencyStatusAdapter adapts StatusChecker to StatusCheckerInterface
type dependencyStatusAdapter struct {
checker *specs.StatusChecker
}
func (a *dependencyStatusAdapter) IsDependencySatisfied(dep specs.Dependency) bool {
status := a.checker.CheckDependency(dep)
return status.IsSatisfied
}
// filesystemSpecFinder implements SpecFinder using filesystem
type filesystemSpecFinder struct{}
func (f *filesystemSpecFinder) SpecExists(reqID string) bool {
_, err := findSpecFile(reqID)
return err == nil
}
func (f *filesystemSpecFinder) FindSpecPath(reqID string) (string, error) {
return findSpecFile(reqID)
}
// emptyTokenProvider returns empty token lists when database is unavailable
type emptyTokenProvider struct{}
func (e *emptyTokenProvider) GetTokensByReqID(reqID string) []specs.TokenInfo {
return []specs.TokenInfo{}
}
// dbTokenProvider fetches tokens from the database
type dbTokenProvider struct {
db *storage.DB
}
func (d *dbTokenProvider) GetTokensByReqID(reqID string) []specs.TokenInfo {
// Use DB method to get tokens
dbTokens, err := d.db.GetTokensByReqID(reqID)
if err != nil {
return []specs.TokenInfo{}
}
// Convert storage.Token to specs.TokenInfo
var tokens []specs.TokenInfo
for _, dbToken := range dbTokens {
tokens = append(tokens, specs.TokenInfo{
ReqID: dbToken.ReqID,
Feature: dbToken.Feature,
Aspect: dbToken.Aspect,
Status: dbToken.Status,
})
}
return tokens
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-136; FEATURE="DocCLICommands"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_136_CLI_DocWorkflow; DOC=user:docs/user/documentation-tracking-guide.md; DOC_HASH=1e32f44252c80284; UPDATED=2025-10-16
package main
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/docs"
"go.devnw.com/canary/internal/storage"
)
// CANARY: REQ=CBIN-136; FEATURE="DocParentCommand"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-16
var docCmd = &cobra.Command{
Use: "doc",
Short: "Documentation management commands",
Long: `Manage documentation tracking, creation, and verification for CANARY requirements.
Documentation tracking ensures that each CANARY token references up-to-date documentation
files. The system uses SHA256 hashing to detect staleness and keep docs in sync with code.`,
Example: ` # Create documentation from template
canary doc create CBIN-105 --type user --output docs/user/authentication.md
# Update documentation hash after editing
canary doc update CBIN-105
# Check documentation status
canary doc status CBIN-105
canary doc status --all`,
}
// CANARY: REQ=CBIN-136; FEATURE="DocCreateCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_136_CLI_DocCreate; UPDATED=2025-10-16
var docCreateCmd = &cobra.Command{
Use: "create <REQ-ID> --type <doc-type> --output <path>",
Short: "Create documentation from template",
Long: `Create a new documentation file from a template and link it to a requirement.
Supported documentation types:
- user: User-facing documentation
- technical: Technical design documentation
- feature: Feature specification documentation
- api: API reference documentation
- architecture: Architecture decision records (ADR)
The command will:
1. Create the documentation file from the appropriate template
2. Update the CANARY token with DOC= field
3. Calculate and store the initial DOC_HASH=`,
Example: ` canary doc create CBIN-105 --type user --output docs/user/auth.md
canary doc create CBIN-200 --type api --output docs/api/rest.md`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := strings.ToUpper(args[0])
docType, _ := cmd.Flags().GetString("type")
outputPath, _ := cmd.Flags().GetString("output")
if docType == "" {
return fmt.Errorf("--type flag is required (user, technical, feature, api, architecture)")
}
if outputPath == "" {
return fmt.Errorf("--output flag is required")
}
// Validate doc type
validTypes := map[string]bool{
"user": true, "technical": true, "feature": true,
"api": true, "architecture": true,
}
if !validTypes[docType] {
return fmt.Errorf("invalid doc type: %s (must be user, technical, feature, api, or architecture)", docType)
}
// Load template
templatePath := filepath.Join(".canary", "templates", "docs", docType+"-template.md")
templateContent, err := os.ReadFile(templatePath)
if err != nil {
// If template doesn't exist, create a basic one
templateContent = []byte(fmt.Sprintf(`# %s Documentation
**Requirement:** %s
**Type:** %s
**Created:** %s
## Overview
TODO: Provide an overview of this feature/component.
## Usage
TODO: Describe how to use this feature.
## Examples
TODO: Provide concrete examples.
## Notes
TODO: Additional notes, caveats, or considerations.
`, reqID, reqID, docType, time.Now().Format("2006-01-02")))
}
// Create output directory if needed
if err := os.MkdirAll(filepath.Dir(outputPath), 0755); err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}
// Write documentation file
if err := os.WriteFile(outputPath, templateContent, 0644); err != nil {
return fmt.Errorf("failed to write documentation file: %w", err)
}
// Calculate hash
hash, err := docs.CalculateHash(outputPath)
if err != nil {
return fmt.Errorf("failed to calculate hash: %w", err)
}
fmt.Printf("β
Created documentation: %s\n", outputPath)
fmt.Printf(" Requirement: %s\n", reqID)
fmt.Printf(" Type: %s\n", docType)
fmt.Printf(" Hash: %s\n", hash)
fmt.Println()
fmt.Println("Next steps:")
fmt.Printf(" 1. Edit the documentation file: %s\n", outputPath)
fmt.Println(" 2. Add DOC= field to your CANARY token:")
fmt.Printf(" DOC=%s:%s; DOC_HASH=%s\n", docType, outputPath, hash)
fmt.Println(" 3. After editing, run: canary doc update", reqID)
return nil
},
}
// CANARY: REQ=CBIN-136; FEATURE="DocUpdateCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_136_CLI_BatchUpdate; UPDATED=2025-10-16
var docUpdateCmd = &cobra.Command{
Use: "update [REQ-ID]",
Short: "Update documentation hash after changes",
Long: `Recalculate documentation hashes for a requirement and update the database.
This command should be run after editing documentation files to update the
DOC_HASH field in the database, marking the documentation as current.
Batch Operations:
--all Update all documentation in the database
--stale-only Only update stale documentation (requires --all)`,
Example: ` # Update specific requirement
canary doc update CBIN-105
# Update all documentation
canary doc update --all
# Update only stale documentation
canary doc update --all --stale-only`,
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath := cmd.Flag("db").Value.String()
updateAll, _ := cmd.Flags().GetBool("all")
staleOnly, _ := cmd.Flags().GetBool("stale-only")
// Validate flags
if staleOnly && !updateAll {
return fmt.Errorf("--stale-only requires --all flag")
}
if len(args) == 0 && !updateAll {
return fmt.Errorf("provide REQ-ID or use --all flag")
}
if len(args) > 0 && updateAll {
return fmt.Errorf("cannot specify REQ-ID with --all flag")
}
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("failed to open database: %w", err)
}
defer db.Close()
var tokens []*storage.Token
if updateAll {
// Get all tokens with documentation
tokens, err = db.ListTokens(map[string]string{}, "", "req_id ASC", 0)
if err != nil {
return fmt.Errorf("failed to query tokens: %w", err)
}
} else {
// Get tokens for specific requirement
reqID := strings.ToUpper(args[0])
tokens, err = db.GetTokensByReqID(reqID)
if err != nil {
return fmt.Errorf("failed to query tokens: %w", err)
}
if len(tokens) == 0 {
return fmt.Errorf("no tokens found for requirement: %s", reqID)
}
}
// Update documentation hashes
updated := 0
skipped := 0
for _, token := range tokens {
if token.DocPath == "" {
continue
}
// If stale-only, check if documentation is stale first
if staleOnly {
results, err := docs.CheckMultipleDocumentation(token)
if err != nil {
fmt.Printf("β οΈ Error checking %s: %v\n", token.DocPath, err)
continue
}
// Check if any docs are stale
hasStale := false
for _, status := range results {
if status == "DOC_STALE" {
hasStale = true
break
}
}
if !hasStale {
skipped++
continue
}
}
// Handle multiple documentation paths (comma-separated)
docPaths := strings.Split(token.DocPath, ",")
newHashes := make([]string, 0, len(docPaths))
for _, docPath := range docPaths {
// Strip type prefix (e.g., "user:docs/file.md" -> "docs/file.md")
docPath = strings.TrimSpace(docPath)
actualPath := docPath
if strings.Contains(docPath, ":") {
parts := strings.SplitN(docPath, ":", 2)
if len(parts) == 2 {
actualPath = parts[1]
}
}
// Recalculate hash
newHash, err := docs.CalculateHash(actualPath)
if err != nil {
fmt.Printf("β οΈ Failed to calculate hash for %s: %v\n", docPath, err)
continue
}
newHashes = append(newHashes, newHash)
if updateAll {
fmt.Printf("β
%s: %s (hash: %s)\n", token.ReqID, docPath, newHash)
} else {
fmt.Printf("β
Updated: %s (hash: %s)\n", docPath, newHash)
}
}
if len(newHashes) == 0 {
continue
}
// Update token with new hashes
token.DocHash = strings.Join(newHashes, ",")
token.DocCheckedAt = time.Now().UTC().Format(time.RFC3339)
token.DocStatus = "DOC_CURRENT"
if err := db.UpsertToken(token); err != nil {
fmt.Printf("β οΈ Failed to update token: %v\n", err)
continue
}
updated++
}
// Display summary
if updateAll {
fmt.Printf("\nβ
Updated %d requirement(s)", updated)
if skipped > 0 {
fmt.Printf(" (skipped %d current)", skipped)
}
fmt.Println()
} else {
if updated == 0 {
fmt.Printf("No documentation files found\n")
fmt.Println("Use 'canary doc create' to create documentation first.")
} else {
fmt.Printf("\nβ
Updated %d documentation file(s)\n", updated)
}
}
return nil
},
}
// CANARY: REQ=CBIN-136; FEATURE="DocStatusCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_136_CLI_DocStatus; UPDATED=2025-10-16
var docStatusCmd = &cobra.Command{
Use: "status [REQ-ID]",
Short: "Check documentation staleness status",
Long: `Check the staleness status of documentation for one or all requirements.
Status values:
- DOC_CURRENT: Documentation hash matches file content
- DOC_STALE: Documentation has been modified since last hash
- DOC_MISSING: Documentation file does not exist
- DOC_UNHASHED: No hash tracking enabled for this documentation`,
Example: ` canary doc status CBIN-105
canary doc status --all
canary doc status --stale-only`,
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath := cmd.Flag("db").Value.String()
showAll, _ := cmd.Flags().GetBool("all")
staleOnly, _ := cmd.Flags().GetBool("stale-only")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("failed to open database: %w", err)
}
defer db.Close()
var tokens []*storage.Token
if len(args) == 1 {
// Check specific requirement
reqID := strings.ToUpper(args[0])
tokens, err = db.GetTokensByReqID(reqID)
if err != nil {
return fmt.Errorf("failed to query tokens: %w", err)
}
} else if showAll {
// Check all requirements with documentation
tokens, err = db.ListTokens(map[string]string{}, "", "req_id ASC", 0)
if err != nil {
return fmt.Errorf("failed to query tokens: %w", err)
}
} else {
return fmt.Errorf("provide REQ-ID or use --all flag")
}
// Check staleness for each token
stats := map[string]int{
"DOC_CURRENT": 0,
"DOC_STALE": 0,
"DOC_MISSING": 0,
"DOC_UNHASHED": 0,
}
for _, token := range tokens {
if token.DocPath == "" {
continue
}
// Use CheckMultipleDocumentation to handle type prefixes and multiple paths
results, err := docs.CheckMultipleDocumentation(token)
if err != nil {
fmt.Printf("β οΈ Error checking %s: %v\n", token.DocPath, err)
continue
}
// Process each documentation path result
for docPath, status := range results {
stats[status]++
// Filter output based on flags
if staleOnly && status != "DOC_STALE" {
continue
}
// Display result with full path (including type prefix if present)
fullPath := docPath
// Find the original path with type prefix
for _, origPath := range strings.Split(token.DocPath, ",") {
origPath = strings.TrimSpace(origPath)
if strings.HasSuffix(origPath, docPath) {
fullPath = origPath
break
}
}
emoji := "β
"
if status == "DOC_STALE" {
emoji = "β οΈ"
} else if status == "DOC_MISSING" {
emoji = "β"
} else if status == "DOC_UNHASHED" {
emoji = "βΉοΈ"
}
fmt.Printf("%s %s (%s): %s\n", emoji, token.ReqID, status, fullPath)
}
}
// Summary
total := stats["DOC_CURRENT"] + stats["DOC_STALE"] + stats["DOC_MISSING"] + stats["DOC_UNHASHED"]
if total > 0 {
fmt.Println()
fmt.Printf("Summary: %d total\n", total)
if stats["DOC_CURRENT"] > 0 {
fmt.Printf(" β
Current: %d\n", stats["DOC_CURRENT"])
}
if stats["DOC_STALE"] > 0 {
fmt.Printf(" β οΈ Stale: %d\n", stats["DOC_STALE"])
}
if stats["DOC_MISSING"] > 0 {
fmt.Printf(" β Missing: %d\n", stats["DOC_MISSING"])
}
if stats["DOC_UNHASHED"] > 0 {
fmt.Printf(" βΉοΈ Unhashed: %d\n", stats["DOC_UNHASHED"])
}
}
return nil
},
}
// CANARY: REQ=CBIN-136; FEATURE="DocReportCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_136_CLI_DocReport; UPDATED=2025-10-16
var docReportCmd = &cobra.Command{
Use: "report",
Short: "Generate documentation coverage and staleness report",
Long: `Generate comprehensive report on documentation coverage and health.
The report includes:
- Total documentation count by type (user, api, technical, feature, architecture)
- Staleness statistics (current, stale, missing, unhashed)
- Coverage percentage (requirements with vs without documentation)
- Requirements without documentation
- Documentation age metrics`,
Example: ` canary doc report
canary doc report --format json
canary doc report --show-undocumented`,
RunE: func(cmd *cobra.Command, args []string) error {
dbPath := cmd.Flag("db").Value.String()
format, _ := cmd.Flags().GetString("format")
showUndocumented, _ := cmd.Flags().GetBool("show-undocumented")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("failed to open database: %w", err)
}
defer db.Close()
// Get all tokens
tokens, err := db.ListTokens(map[string]string{}, "", "req_id ASC", 0)
if err != nil {
return fmt.Errorf("failed to query tokens: %w", err)
}
// Statistics
stats := struct {
TotalTokens int
TokensWithDocs int
TokensWithoutDocs int
ByType map[string]int
ByStatus map[string]int
UndocumentedRequirements []string
}{
ByType: make(map[string]int),
ByStatus: make(map[string]int),
UndocumentedRequirements: []string{},
}
seenRequirements := make(map[string]bool)
requirementsWithDocs := make(map[string]bool)
// Analyze tokens
for _, token := range tokens {
stats.TotalTokens++
// Track unique requirements
if !seenRequirements[token.ReqID] {
seenRequirements[token.ReqID] = true
}
if token.DocPath == "" {
continue
}
stats.TokensWithDocs++
requirementsWithDocs[token.ReqID] = true
// Count by type
if token.DocType != "" {
stats.ByType[token.DocType]++
}
// Check staleness for each documentation
results, err := docs.CheckMultipleDocumentation(token)
if err != nil {
continue
}
for _, status := range results {
stats.ByStatus[status]++
}
}
// Find undocumented requirements
for reqID := range seenRequirements {
if !requirementsWithDocs[reqID] {
stats.UndocumentedRequirements = append(stats.UndocumentedRequirements, reqID)
}
}
stats.TokensWithoutDocs = len(stats.UndocumentedRequirements)
// Output report
if format == "json" {
// JSON format output
report := map[string]interface{}{
"total_tokens": stats.TotalTokens,
"tokens_with_docs": stats.TokensWithDocs,
"tokens_without_docs": stats.TokensWithoutDocs,
"coverage_percent": float64(stats.TokensWithDocs) / float64(stats.TotalTokens) * 100,
"by_type": stats.ByType,
"by_status": stats.ByStatus,
"undocumented_count": len(stats.UndocumentedRequirements),
}
if showUndocumented {
report["undocumented_requirements"] = stats.UndocumentedRequirements
}
encoder := json.NewEncoder(os.Stdout)
encoder.SetIndent("", " ")
return encoder.Encode(report)
}
// Human-readable format
fmt.Println("π Documentation Report")
fmt.Println()
// Coverage summary
coveragePercent := 0.0
if stats.TotalTokens > 0 {
coveragePercent = float64(len(requirementsWithDocs)) / float64(len(seenRequirements)) * 100
}
fmt.Printf("Coverage: %d/%d requirements (%.1f%%)\n",
len(requirementsWithDocs), len(seenRequirements), coveragePercent)
fmt.Printf("Total Tokens: %d (%d with docs, %d without)\n\n",
stats.TotalTokens, stats.TokensWithDocs, stats.TokensWithoutDocs)
// Documentation by type
if len(stats.ByType) > 0 {
fmt.Println("π Documentation by Type:")
for docType, count := range stats.ByType {
fmt.Printf(" %s: %d\n", docType, count)
}
fmt.Println()
}
// Staleness statistics
totalDocs := stats.ByStatus["DOC_CURRENT"] + stats.ByStatus["DOC_STALE"] +
stats.ByStatus["DOC_MISSING"] + stats.ByStatus["DOC_UNHASHED"]
if totalDocs > 0 {
fmt.Println("π Documentation Status:")
if stats.ByStatus["DOC_CURRENT"] > 0 {
fmt.Printf(" β
Current: %d (%.1f%%)\n",
stats.ByStatus["DOC_CURRENT"],
float64(stats.ByStatus["DOC_CURRENT"])/float64(totalDocs)*100)
}
if stats.ByStatus["DOC_STALE"] > 0 {
fmt.Printf(" β οΈ Stale: %d (%.1f%%)\n",
stats.ByStatus["DOC_STALE"],
float64(stats.ByStatus["DOC_STALE"])/float64(totalDocs)*100)
}
if stats.ByStatus["DOC_MISSING"] > 0 {
fmt.Printf(" β Missing: %d (%.1f%%)\n",
stats.ByStatus["DOC_MISSING"],
float64(stats.ByStatus["DOC_MISSING"])/float64(totalDocs)*100)
}
if stats.ByStatus["DOC_UNHASHED"] > 0 {
fmt.Printf(" βΉοΈ Unhashed: %d (%.1f%%)\n",
stats.ByStatus["DOC_UNHASHED"],
float64(stats.ByStatus["DOC_UNHASHED"])/float64(totalDocs)*100)
}
fmt.Println()
}
// Undocumented requirements
if showUndocumented && len(stats.UndocumentedRequirements) > 0 {
fmt.Printf("π Undocumented Requirements (%d):\n", len(stats.UndocumentedRequirements))
for _, reqID := range stats.UndocumentedRequirements {
fmt.Printf(" - %s\n", reqID)
}
fmt.Println()
} else if len(stats.UndocumentedRequirements) > 0 {
fmt.Printf("π‘ %d requirements without documentation (use --show-undocumented to list)\n\n",
len(stats.UndocumentedRequirements))
}
// Recommendations
if stats.ByStatus["DOC_STALE"] > 0 {
fmt.Println("π‘ Recommendations:")
fmt.Println(" Run 'canary doc update --all --stale-only' to update stale documentation")
}
return nil
},
}
func init() {
// Add sub-commands to docCmd
docCmd.AddCommand(docCreateCmd)
docCmd.AddCommand(docUpdateCmd)
docCmd.AddCommand(docStatusCmd)
docCmd.AddCommand(docReportCmd)
// docCreateCmd flags
docCreateCmd.Flags().String("type", "", "Documentation type (user, technical, feature, api, architecture)")
docCreateCmd.Flags().String("output", "", "Output path for documentation file")
// docUpdateCmd flags
docUpdateCmd.Flags().String("db", ".canary/canary.db", "path to database file")
docUpdateCmd.Flags().Bool("all", false, "Update all documentation in database")
docUpdateCmd.Flags().Bool("stale-only", false, "Only update stale documentation (requires --all)")
// docStatusCmd flags
docStatusCmd.Flags().String("db", ".canary/canary.db", "path to database file")
docStatusCmd.Flags().Bool("all", false, "Check all requirements")
docStatusCmd.Flags().Bool("stale-only", false, "Show only stale documentation")
// docReportCmd flags
docReportCmd.Flags().String("db", ".canary/canary.db", "path to database file")
docReportCmd.Flags().String("format", "text", "Output format (text or json)")
docReportCmd.Flags().Bool("show-undocumented", false, "Show list of undocumented requirements")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"fmt"
"os"
"sort"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/storage"
)
// CANARY: REQ=CBIN-CLI-001; FEATURE="FilesCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_CLI_FilesCmd; UPDATED=2025-10-16
var filesCmd = &cobra.Command{
Use: "files <REQ-ID>",
Short: "List implementation files for a requirement",
Long: `Files lists all implementation files containing tokens for a requirement.
By default, excludes spec and template files, showing only actual implementation.
Files are grouped by aspect and show token counts.
Examples:
canary files CBIN-133
canary files CBIN-133 --all # Include spec/template files`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := args[0]
includeAll, _ := cmd.Flags().GetBool("all")
dbPath, _ := cmd.Flags().GetString("db")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
fmt.Fprintf(os.Stderr, "β οΈ Database not found\n")
fmt.Fprintf(os.Stderr, " Suggestion: Run 'canary index' to build database\n\n")
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Query file groups
excludeSpecs := !includeAll
fileGroups, err := db.GetFilesByReqID(reqID, excludeSpecs)
if err != nil {
return fmt.Errorf("query files: %w", err)
}
if len(fileGroups) == 0 {
fmt.Printf("No implementation files found for %s\n", reqID)
if !includeAll {
fmt.Println("\nTip: Use --all to include spec/template files")
}
return fmt.Errorf("no files found")
}
// Format output
fmt.Printf("Implementation files for %s:\n\n", reqID)
formatFilesList(fileGroups)
return nil
},
}
// formatFilesList formats file groups by aspect
func formatFilesList(fileGroups map[string][]*storage.Token) {
// Group files by aspect
aspectFiles := make(map[string][]string)
fileCounts := make(map[string]int)
for filePath, tokens := range fileGroups {
// Get aspect from first token (all tokens in same file may have different aspects)
aspects := make(map[string]bool)
for _, token := range tokens {
aspects[token.Aspect] = true
}
// Add file to each unique aspect
for aspect := range aspects {
aspectFiles[aspect] = append(aspectFiles[aspect], filePath)
}
fileCounts[filePath] = len(tokens)
}
// Sort aspects for consistent output
var aspects []string
for aspect := range aspectFiles {
aspects = append(aspects, aspect)
}
sort.Strings(aspects)
// Display by aspect
for _, aspect := range aspects {
files := aspectFiles[aspect]
sort.Strings(files)
fmt.Printf("**%s:**\n", aspect)
for _, file := range files {
count := fileCounts[file]
plural := "token"
if count > 1 {
plural = "tokens"
}
fmt.Printf(" %s (%d %s)\n", file, count, plural)
}
fmt.Println()
}
// Summary
totalFiles := len(fileGroups)
totalTokens := 0
for _, tokens := range fileGroups {
totalTokens += len(tokens)
}
fmt.Printf("Total: %d files, %d tokens\n", totalFiles, totalTokens)
}
func init() {
filesCmd.Flags().Bool("all", false, "Include spec and template files")
filesCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-140; FEATURE="GapCLI"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
package main
import (
"fmt"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/gap"
"go.devnw.com/canary/internal/storage"
)
// CANARY: REQ=CBIN-140; FEATURE="GapMarkCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var gapCmd = &cobra.Command{
Use: "gap <subcommand>",
Short: "Manage gap analysis entries for implementation mistakes",
Long: `Track and query implementation gaps to improve future development.
Gap analysis helps agents learn from past mistakes by:
- Recording what went wrong in implementations
- Tracking corrective actions taken
- Ranking gaps by helpfulness
- Automatically injecting relevant gaps into planning prompts
Subcommands:
mark Record a new gap analysis entry
query Query gaps with filters
report Generate gap analysis report
helpful Mark a gap as helpful
unhelpful Mark a gap as unhelpful
config View or update gap analysis configuration`,
}
// CANARY: REQ=CBIN-140; FEATURE="GapMarkCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var gapMarkCmd = &cobra.Command{
Use: "mark <req-id> <feature> --category <category> --description <description>",
Short: "Record a new gap analysis entry",
Long: `Record a new gap analysis entry for an implementation mistake.
Categories:
logic_error - Incorrect business logic or algorithm
test_failure - Tests incorrectly written or missing cases
performance - Performance issues or inefficient implementation
security - Security vulnerabilities or insecure practices
edge_case - Unhandled edge cases or boundary conditions
integration - Integration issues with existing systems
documentation - Incorrect or misleading documentation
other - Other types of implementation gaps
Examples:
# Record a logic error
canary gap mark CBIN-140 GapTracking \
--category logic_error \
--description "Incorrect query ordering in GetFilesByReqID" \
--action "Added ORDER BY clause to sort by file path"
# Record a test failure
canary gap mark CBIN-141 PromptFlag \
--category test_failure \
--description "Missing edge case test for empty prompt" \
--aspect CLI`,
Args: cobra.ExactArgs(2),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
reqID := args[0]
feature := args[1]
aspect, _ := cmd.Flags().GetString("aspect")
category, _ := cmd.Flags().GetString("category")
description, _ := cmd.Flags().GetString("description")
action, _ := cmd.Flags().GetString("action")
createdBy, _ := cmd.Flags().GetString("created-by")
// Validate required fields
if category == "" {
return fmt.Errorf("--category is required")
}
if description == "" {
return fmt.Errorf("--description is required")
}
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Create service
repo := storage.NewGapRepository(db)
service := gap.NewService(repo)
// Mark gap
gapID, err := service.MarkGap(reqID, feature, aspect, category, description, action, createdBy)
if err != nil {
return fmt.Errorf("mark gap: %w", err)
}
fmt.Printf("β
Created gap analysis entry: %s\n", gapID)
fmt.Printf("\nRequirement: %s\n", reqID)
fmt.Printf("Feature: %s\n", feature)
fmt.Printf("Category: %s\n", category)
fmt.Printf("Description: %s\n", description)
if action != "" {
fmt.Printf("Corrective Action: %s\n", action)
}
fmt.Printf("\nThis gap will be included in future planning for %s.\n", reqID)
fmt.Printf("Mark as helpful with: canary gap helpful %s\n", gapID)
return nil
},
}
// CANARY: REQ=CBIN-140; FEATURE="GapQueryCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var gapQueryCmd = &cobra.Command{
Use: "query [flags]",
Short: "Query gap analysis entries with filters",
Long: `Query gap entries with optional filters.
Examples:
# Query all gaps for a requirement
canary gap query --req-id CBIN-140
# Query by category
canary gap query --category logic_error
# Query by feature
canary gap query --feature GapTracking
# Query with limit
canary gap query --req-id CBIN-140 --limit 5`,
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
reqID, _ := cmd.Flags().GetString("req-id")
feature, _ := cmd.Flags().GetString("feature")
aspect, _ := cmd.Flags().GetString("aspect")
category, _ := cmd.Flags().GetString("category")
limit, _ := cmd.Flags().GetInt("limit")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Create service
repo := storage.NewGapRepository(db)
service := gap.NewService(repo)
// Query gaps
gaps, err := service.QueryGaps(reqID, feature, aspect, category, limit)
if err != nil {
return fmt.Errorf("query gaps: %w", err)
}
if len(gaps) == 0 {
fmt.Println("No gaps found matching the criteria")
return nil
}
fmt.Printf("Found %d gap(s):\n\n", len(gaps))
for i, g := range gaps {
fmt.Printf("%d. %s - %s\n", i+1, g.GapID, g.Feature)
fmt.Printf(" Requirement: %s\n", g.ReqID)
fmt.Printf(" Category: %s\n", g.Category)
if g.Aspect != "" {
fmt.Printf(" Aspect: %s\n", g.Aspect)
}
fmt.Printf(" Description: %s\n", g.Description)
if g.CorrectiveAction != "" {
fmt.Printf(" Corrective Action: %s\n", g.CorrectiveAction)
}
fmt.Printf(" Helpful: %d | Unhelpful: %d\n", g.HelpfulCount, g.UnhelpfulCount)
fmt.Printf(" Created: %s by %s\n", g.CreatedAt.Format("2006-01-02"), g.CreatedBy)
fmt.Println()
}
return nil
},
}
// CANARY: REQ=CBIN-140; FEATURE="GapReportCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var gapReportCmd = &cobra.Command{
Use: "report <req-id>",
Short: "Generate gap analysis report for a requirement",
Long: `Generate a comprehensive gap analysis report for a requirement.
The report includes:
- Total number of gaps
- Gaps grouped by category
- Descriptions and corrective actions
- Helpfulness ratings
Example:
canary gap report CBIN-140`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
reqID := args[0]
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Create service
repo := storage.NewGapRepository(db)
service := gap.NewService(repo)
// Generate report
report, err := service.GenerateReport(reqID)
if err != nil {
return fmt.Errorf("generate report: %w", err)
}
fmt.Println(report)
return nil
},
}
// CANARY: REQ=CBIN-140; FEATURE="GapHelpfulCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var gapHelpfulCmd = &cobra.Command{
Use: "helpful <gap-id>",
Short: "Mark a gap entry as helpful",
Long: `Mark a gap analysis entry as helpful.
Helpful gaps are prioritized when injecting into planning prompts.
Example:
canary gap helpful GAP-CBIN-140-001`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
gapID := args[0]
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Create service
repo := storage.NewGapRepository(db)
service := gap.NewService(repo)
// Mark helpful
if err := service.MarkHelpful(gapID); err != nil {
return fmt.Errorf("mark helpful: %w", err)
}
fmt.Printf("β
Marked %s as helpful\n", gapID)
fmt.Println("\nThis gap will be prioritized in future planning prompts.")
return nil
},
}
// CANARY: REQ=CBIN-140; FEATURE="GapUnhelpfulCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var gapUnhelpfulCmd = &cobra.Command{
Use: "unhelpful <gap-id>",
Short: "Mark a gap entry as unhelpful",
Long: `Mark a gap analysis entry as unhelpful.
Unhelpful gaps are deprioritized when injecting into planning prompts.
Example:
canary gap unhelpful GAP-CBIN-140-001`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
gapID := args[0]
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Create service
repo := storage.NewGapRepository(db)
service := gap.NewService(repo)
// Mark unhelpful
if err := service.MarkUnhelpful(gapID); err != nil {
return fmt.Errorf("mark unhelpful: %w", err)
}
fmt.Printf("β
Marked %s as unhelpful\n", gapID)
return nil
},
}
// CANARY: REQ=CBIN-140; FEATURE="GapConfigCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var gapConfigCmd = &cobra.Command{
Use: "config [flags]",
Short: "View or update gap analysis configuration",
Long: `View or update gap analysis configuration settings.
Configuration settings:
--max-gaps Maximum gaps to inject into planning (default: 10)
--min-helpful Minimum helpful count to include (default: 1)
--ranking Ranking strategy: helpful_desc, recency_desc, weighted (default: helpful_desc)
Examples:
# View current configuration
canary gap config
# Update configuration
canary gap config --max-gaps 20 --min-helpful 2 --ranking weighted`,
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
maxGaps, _ := cmd.Flags().GetInt("max-gaps")
minHelpful, _ := cmd.Flags().GetInt("min-helpful")
ranking, _ := cmd.Flags().GetString("ranking")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Create service
repo := storage.NewGapRepository(db)
service := gap.NewService(repo)
// Check if update flags provided
updateMode := cmd.Flags().Changed("max-gaps") ||
cmd.Flags().Changed("min-helpful") ||
cmd.Flags().Changed("ranking")
if updateMode {
// Get current config to preserve unchanged values
currentConfig, err := service.GetConfig()
if err != nil {
return fmt.Errorf("get current config: %w", err)
}
// Use current values if flags not provided
if !cmd.Flags().Changed("max-gaps") {
maxGaps = currentConfig.MaxGapInjection
}
if !cmd.Flags().Changed("min-helpful") {
minHelpful = currentConfig.MinHelpfulThreshold
}
if !cmd.Flags().Changed("ranking") {
ranking = currentConfig.RankingStrategy
}
// Update configuration
if err := service.UpdateConfig(maxGaps, minHelpful, ranking); err != nil {
return fmt.Errorf("update config: %w", err)
}
fmt.Println("β
Configuration updated:")
} else {
// Just display current config
fmt.Println("Current gap analysis configuration:")
}
// Display configuration
config, err := service.GetConfig()
if err != nil {
return fmt.Errorf("get config: %w", err)
}
fmt.Printf("\n Max Gaps to Inject: %d\n", config.MaxGapInjection)
fmt.Printf(" Min Helpful Threshold: %d\n", config.MinHelpfulThreshold)
fmt.Printf(" Ranking Strategy: %s\n", config.RankingStrategy)
fmt.Printf(" Last Updated: %s\n", config.UpdatedAt.Format("2006-01-02 15:04:05"))
return nil
},
}
// CANARY: REQ=CBIN-140; FEATURE="GapCategoriesCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var gapCategoriesCmd = &cobra.Command{
Use: "categories",
Short: "List available gap categories",
Long: `List all available gap analysis categories.
Categories help classify the type of implementation mistake.`,
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Create service
repo := storage.NewGapRepository(db)
service := gap.NewService(repo)
// Get categories
categories, err := service.GetCategories()
if err != nil {
return fmt.Errorf("get categories: %w", err)
}
fmt.Println("Available gap categories:")
for _, cat := range categories {
fmt.Printf(" %-20s %s\n", cat.Name, cat.Description)
}
return nil
},
}
func init() {
// Add gap subcommands
gapCmd.AddCommand(gapMarkCmd)
gapCmd.AddCommand(gapQueryCmd)
gapCmd.AddCommand(gapReportCmd)
gapCmd.AddCommand(gapHelpfulCmd)
gapCmd.AddCommand(gapUnhelpfulCmd)
gapCmd.AddCommand(gapConfigCmd)
gapCmd.AddCommand(gapCategoriesCmd)
// Global db flag for all gap commands
gapCmd.PersistentFlags().String("db", ".canary/canary.db", "path to database file")
// gapMarkCmd flags
gapMarkCmd.Flags().String("aspect", "", "implementation aspect (API, CLI, Engine, etc.)")
gapMarkCmd.Flags().String("category", "", "gap category (required)")
gapMarkCmd.Flags().String("description", "", "what went wrong (required)")
gapMarkCmd.Flags().String("action", "", "corrective action taken")
gapMarkCmd.Flags().String("created-by", "agent", "who identified the gap")
// gapQueryCmd flags
gapQueryCmd.Flags().String("req-id", "", "filter by requirement ID")
gapQueryCmd.Flags().String("feature", "", "filter by feature name")
gapQueryCmd.Flags().String("aspect", "", "filter by aspect")
gapQueryCmd.Flags().String("category", "", "filter by category")
gapQueryCmd.Flags().Int("limit", 0, "maximum number of results (0 = no limit)")
// gapConfigCmd flags
gapConfigCmd.Flags().Int("max-gaps", 10, "maximum gaps to inject into planning")
gapConfigCmd.Flags().Int("min-helpful", 1, "minimum helpful count to include")
gapConfigCmd.Flags().String("ranking", "helpful_desc", "ranking strategy (helpful_desc, recency_desc, weighted)")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"fmt"
"os"
"strings"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/storage"
)
// CANARY: REQ=CBIN-CLI-001; FEATURE="GrepCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_CLI_GrepCmd; UPDATED=2025-10-16
var grepCmd = &cobra.Command{
Use: "grep <pattern>",
Short: "Search CANARY tokens by pattern",
Long: `Search for CANARY tokens matching a pattern.
Searches across:
- Feature names
- File paths
- Test names
- Bench names
- Requirement IDs
The search is case-insensitive and matches substrings.
Examples:
canary grep User # Find all tokens related to "User"
canary grep internal/auth # Find tokens in auth directory
canary grep TestAuth # Find tokens with "TestAuth" test`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
pattern := args[0]
dbPath, _ := cmd.Flags().GetString("db")
groupBy, _ := cmd.Flags().GetString("group-by")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
fmt.Fprintf(os.Stderr, "β οΈ Database not found\n")
fmt.Fprintf(os.Stderr, " Suggestion: Run 'canary index' to build database\n\n")
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Search for matching tokens
tokens, err := grepTokens(db, pattern)
if err != nil {
return fmt.Errorf("search tokens: %w", err)
}
if len(tokens) == 0 {
fmt.Printf("No tokens found matching pattern: %s\n", pattern)
return nil
}
// Display results
fmt.Printf("Found %d tokens matching '%s':\n\n", len(tokens), pattern)
if groupBy == "requirement" {
displayGrepResultsByRequirement(tokens)
} else {
displayGrepResults(tokens)
}
return nil
},
}
// grepTokens searches for tokens matching the pattern
func grepTokens(db *storage.DB, pattern string) ([]*storage.Token, error) {
if pattern == "" {
return []*storage.Token{}, nil
}
// Get all tokens and filter by pattern
// We use SearchTokens which already does keyword matching
tokens, err := db.SearchTokens(pattern)
if err != nil {
return nil, err
}
// Additional filtering for file paths and test names
allTokens, err := db.ListTokens(nil, "", "", 0)
if err != nil {
return nil, err
}
patternLower := strings.ToLower(pattern)
matchMap := make(map[string]*storage.Token)
// Add tokens from keyword search
for _, token := range tokens {
key := fmt.Sprintf("%s:%s:%s:%d", token.ReqID, token.Feature, token.FilePath, token.LineNumber)
matchMap[key] = token
}
// Add tokens matching file path, test, or bench
for _, token := range allTokens {
if strings.Contains(strings.ToLower(token.FilePath), patternLower) ||
strings.Contains(strings.ToLower(token.Test), patternLower) ||
strings.Contains(strings.ToLower(token.Bench), patternLower) {
key := fmt.Sprintf("%s:%s:%s:%d", token.ReqID, token.Feature, token.FilePath, token.LineNumber)
matchMap[key] = token
}
}
// Convert map back to slice
result := make([]*storage.Token, 0, len(matchMap))
for _, token := range matchMap {
result = append(result, token)
}
return result, nil
}
// displayGrepResults shows grep results in a simple list format
func displayGrepResults(tokens []*storage.Token) {
for _, token := range tokens {
fmt.Printf("π %s - %s\n", token.ReqID, token.Feature)
fmt.Printf(" Status: %s | Aspect: %s\n", token.Status, token.Aspect)
fmt.Printf(" Location: %s:%d\n", token.FilePath, token.LineNumber)
if token.Test != "" {
fmt.Printf(" Test: %s\n", token.Test)
}
if token.Bench != "" {
fmt.Printf(" Bench: %s\n", token.Bench)
}
fmt.Println()
}
}
// displayGrepResultsByRequirement groups results by requirement ID
func displayGrepResultsByRequirement(tokens []*storage.Token) {
// Group by requirement
reqMap := make(map[string][]*storage.Token)
for _, token := range tokens {
reqMap[token.ReqID] = append(reqMap[token.ReqID], token)
}
// Display grouped results
for reqID, reqTokens := range reqMap {
fmt.Printf("## %s (%d tokens)\n\n", reqID, len(reqTokens))
for _, token := range reqTokens {
fmt.Printf(" π %s\n", token.Feature)
fmt.Printf(" Status: %s | Aspect: %s | %s:%d\n",
token.Status, token.Aspect, token.FilePath, token.LineNumber)
if token.Test != "" {
fmt.Printf(" Test: %s\n", token.Test)
}
}
fmt.Println()
}
}
func init() {
grepCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
grepCmd.Flags().String("group-by", "none", "Group results (none, requirement)")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-133; FEATURE="RequirementLookup"; ASPECT=API; STATUS=TESTED; UPDATED=2025-10-16
package main
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"text/template"
"time"
"go.devnw.com/canary/internal/matcher"
)
// RequirementSpec holds loaded specification data
type RequirementSpec struct {
ReqID string
FeatureName string
SpecPath string
SpecContent string
PlanPath string
PlanContent string
HasPlan bool
}
// ImplementFlags holds command flags
type ImplementFlags struct {
Prompt bool
ShowProgress bool
ContextLines int
}
// ProgressStats tracks implementation progress
type ProgressStats struct {
Total int
Stub int
Impl int
Tested int
Benched int
Completed int
}
// findRequirement locates a requirement by ID or fuzzy match
func findRequirement(query string) (*RequirementSpec, error) {
query = strings.TrimSpace(query)
queryUpper := strings.ToUpper(query)
// Extract REQ-ID if query is in format "CBIN-101-feature-name" or "CBIN-101"
reqID := extractReqID(queryUpper)
// Attempt 1: Exact ID match using extracted REQ-ID
if reqID != "" {
spec, err := findByExactID(reqID)
if err == nil {
return spec, nil
}
}
// Attempt 2: Directory pattern match
specsDir := ".canary/specs"
pattern := filepath.Join(specsDir, "*"+query+"*")
matches, _ := filepath.Glob(pattern)
if len(matches) == 1 {
return loadSpecFromDir(matches[0])
}
// Attempt 3: Fuzzy match
fuzzyMatches, err := matcher.FindBestMatches(query, specsDir, 5)
if err != nil {
return nil, fmt.Errorf("fuzzy search failed: %w", err)
}
if len(fuzzyMatches) == 0 {
return nil, fmt.Errorf("no matches found for query: %s", query)
}
// Auto-select if clear winner (>90% score or >20 points ahead)
if fuzzyMatches[0].Score >= 90 && (len(fuzzyMatches) == 1 || fuzzyMatches[0].Score-fuzzyMatches[1].Score > 20) {
return loadSpecFromDir(fuzzyMatches[0].SpecPath)
}
// For test purposes, return best match
// In production, this would trigger interactive selection
return loadSpecFromDir(fuzzyMatches[0].SpecPath)
}
// extractReqID extracts the requirement ID from a query
// Examples:
// - "CBIN-101" -> "CBIN-101"
// - "CBIN-101-engine" -> "CBIN-101"
// - "CBIN-101-feature-name" -> "CBIN-101"
// - "engine" -> ""
func extractReqID(query string) string {
// Match pattern: PROJECT-###
// Where PROJECT is alphanumeric (CBIN, REQ, etc.) and ### is 1-4 digits
parts := strings.SplitN(query, "-", 3)
if len(parts) >= 2 {
// Check if first part is alphabetic and second part is numeric
if len(parts[0]) > 0 && len(parts[1]) > 0 {
// Validate that second part is all digits
allDigits := true
for _, ch := range parts[1] {
if ch < '0' || ch > '9' {
allDigits = false
break
}
}
if allDigits {
return parts[0] + "-" + parts[1]
}
}
}
return ""
}
// findByExactID finds spec by exact requirement ID
func findByExactID(reqID string) (*RequirementSpec, error) {
reqID = strings.ToUpper(reqID)
specsDir := ".canary/specs"
entries, err := os.ReadDir(specsDir)
if err != nil {
return nil, err
}
for _, entry := range entries {
if !entry.IsDir() {
continue
}
if strings.HasPrefix(entry.Name(), reqID+"-") {
return loadSpecFromDir(filepath.Join(specsDir, entry.Name()))
}
}
return nil, fmt.Errorf("specification not found for %s", reqID)
}
// loadSpecFromDir loads spec.md and plan.md from directory
func loadSpecFromDir(dirPath string) (*RequirementSpec, error) {
specPath := filepath.Join(dirPath, "spec.md")
specContent, err := os.ReadFile(specPath)
if err != nil {
return nil, fmt.Errorf("read spec file: %w", err)
}
// Extract ReqID and FeatureName from directory name
dirName := filepath.Base(dirPath)
parts := strings.SplitN(dirName, "-", 3)
if len(parts) < 3 {
return nil, fmt.Errorf("invalid spec directory name: %s", dirName)
}
spec := &RequirementSpec{
ReqID: parts[0] + "-" + parts[1], // CBIN-XXX
FeatureName: parts[2], // feature-name
SpecPath: specPath,
SpecContent: string(specContent),
}
// Load plan.md if exists
planPath := filepath.Join(dirPath, "plan.md")
if planContent, err := os.ReadFile(planPath); err == nil {
spec.PlanPath = planPath
spec.PlanContent = string(planContent)
spec.HasPlan = true
}
return spec, nil
}
// CANARY: REQ=CBIN-133; FEATURE="PromptRenderer"; ASPECT=API; STATUS=TESTED; UPDATED=2025-10-16
// renderImplementPrompt generates comprehensive implementation guidance
func renderImplementPrompt(spec *RequirementSpec, flags *ImplementFlags) (string, error) {
// Load template
templatePath := ".canary/templates/implement-prompt-template.md"
templateContent, err := os.ReadFile(templatePath)
if err != nil {
return "", fmt.Errorf("read template: %w", err)
}
tmpl, err := template.New("implement-prompt").Parse(string(templateContent))
if err != nil {
return "", fmt.Errorf("parse template: %w", err)
}
// Load constitution
constitutionPath := ".canary/memory/constitution.md"
constitutionContent, _ := os.ReadFile(constitutionPath)
// Calculate progress
progress, _ := calculateProgress(spec.ReqID)
// Extract implementation checklist from spec
checklist := extractImplementationChecklist(spec.SpecContent)
// Populate template data
data := map[string]interface{}{
"ReqID": spec.ReqID,
"FeatureName": spec.FeatureName,
"SpecPath": spec.SpecPath,
"SpecContent": spec.SpecContent,
"PlanPath": spec.PlanPath,
"PlanContent": spec.PlanContent,
"HasPlan": spec.HasPlan,
"Constitution": string(constitutionContent),
"Checklist": checklist,
"Progress": progress,
"Today": time.Now().UTC().Format("2006-01-02"),
}
// Render
var buf strings.Builder
if err := tmpl.Execute(&buf, data); err != nil {
return "", fmt.Errorf("execute template: %w", err)
}
return buf.String(), nil
}
// calculateProgress scans codebase for tokens matching reqID
func calculateProgress(reqID string) (*ProgressStats, error) {
// Use grep to find all tokens for this requirement
grepCmd := exec.Command("grep", "-rn", "--include=*.go", "--include=*.md",
fmt.Sprintf("CANARY:.*REQ=%s", reqID), ".")
output, err := grepCmd.CombinedOutput()
if err != nil && len(output) == 0 {
return &ProgressStats{}, nil // No tokens found
}
stats := &ProgressStats{}
lines := strings.Split(string(output), "\n")
for _, line := range lines {
if line == "" {
continue
}
status := extractField(line, "STATUS")
stats.Total++
switch status {
case "STUB":
stats.Stub++
case "IMPL":
stats.Impl++
case "TESTED":
stats.Tested++
stats.Completed++
case "BENCHED":
stats.Benched++
stats.Completed++
}
}
return stats, nil
}
// extractImplementationChecklist extracts checklist section from spec
func extractImplementationChecklist(specContent string) string {
lines := strings.Split(specContent, "\n")
inChecklist := false
var checklist strings.Builder
for _, line := range lines {
if strings.Contains(line, "## Implementation Checklist") {
inChecklist = true
continue
}
if inChecklist {
// Stop at next major section
if strings.HasPrefix(line, "## ") && !strings.Contains(line, "Implementation") {
break
}
checklist.WriteString(line + "\n")
}
}
return checklist.String()
}
// listUnimplemented lists all unimplemented (STUB/IMPL) requirements
func listUnimplemented() error {
// TODO: Implement listing functionality
// For now, just return a message
fmt.Println("Listing unimplemented requirements...")
fmt.Println("(Feature not yet fully implemented)")
return nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-CLI-104; FEATURE="CanaryCLI"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
package main
import (
"encoding/json"
"fmt"
"io/fs"
"log/slog"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"text/template"
"time"
"github.com/spf13/cobra"
"go.devnw.com/canary/embedded"
"go.devnw.com/canary/internal/config"
"go.devnw.com/canary/internal/gap"
"go.devnw.com/canary/internal/migrate"
"go.devnw.com/canary/internal/reqid"
"go.devnw.com/canary/internal/storage"
)
var (
version = "dev"
rootCmd = &cobra.Command{
Use: "canary",
Short: "Track requirements via CANARY tokens in source code",
Long: `Canary tracks requirements through CANARY tokens embedded in source code.
Inspired by spec-kit's specification-driven development, canary provides
commands for scanning, creating, and managing requirement tokens.`,
Version: version,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
// Skip auto-migration for commands that don't use the database
skipCommands := map[string]bool{
"init": true,
"create": true,
"constitution": true,
"specify": true,
"plan": true,
"implement": true,
"scan": true,
"help": true,
"completion": true,
"migrate": true, // migrate command manages migrations itself
"rollback": true, // rollback command manages migrations itself
"detect": true, // detect command just reads, doesn't need DB
"migrate-from": true, // migrate-from creates .canary/, shouldn't auto-migrate first
}
if skipCommands[cmd.Name()] {
return nil
}
// Check if database commands have --db flag
dbPath := ".canary/canary.db" // default
if cmd.Flags().Lookup("db") != nil {
dbPath, _ = cmd.Flags().GetString("db")
}
// Auto-migrate if needed
if err := storage.AutoMigrate(dbPath); err != nil {
return fmt.Errorf("auto-migration failed: %w", err)
}
return nil
},
}
)
func main() {
if err := rootCmd.Execute(); err != nil {
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
os.Exit(1)
}
}
// CANARY: REQ=CBIN-111; FEATURE="ScanCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-17
// scanCmd wraps the existing tools/canary scanner
var scanCmd = &cobra.Command{
Use: "scan [flags]",
Short: "Scan for CANARY tokens and generate reports",
Long: `Scan source code for CANARY tokens and generate status reports.
This command scans your codebase for CANARY tokens and generates JSON/CSV reports.
Flags:
--root <dir> Root directory to scan (default ".")
--out <file> Output status.json path (default "status.json")
--csv <file> Optional status.csv path
--verify <file> GAP_ANALYSIS file to verify claims
--strict Enforce staleness on TESTED/BENCHED tokens (30 days)
--update-stale Rewrite UPDATED field for stale tokens
--skip <regex> Skip path regex (RE2)
--project-only Filter by project requirement ID pattern
Examples:
# Basic scan
canary scan --root . --out status.json
# Verify GAP_ANALYSIS.md claims
canary scan --verify GAP_ANALYSIS.md
# Update stale tokens
canary scan --update-stale
# Strict mode with staleness enforcement
canary scan --strict`,
RunE: func(cmd *cobra.Command, args []string) error {
// Build path to the canary scanner
scanner := filepath.Join("tools", "canary", "main.go")
// Get all flags
rootDir, _ := cmd.Flags().GetString("root")
out, _ := cmd.Flags().GetString("out")
csv, _ := cmd.Flags().GetString("csv")
verify, _ := cmd.Flags().GetString("verify")
strict, _ := cmd.Flags().GetBool("strict")
updateStale, _ := cmd.Flags().GetBool("update-stale")
skip, _ := cmd.Flags().GetString("skip")
projectOnly, _ := cmd.Flags().GetBool("project-only")
// Build scanner arguments
scanArgs := []string{"run", scanner}
if rootDir != "" {
scanArgs = append(scanArgs, "-root", rootDir)
}
if out != "" {
scanArgs = append(scanArgs, "-out", out)
}
if csv != "" {
scanArgs = append(scanArgs, "-csv", csv)
}
if verify != "" {
scanArgs = append(scanArgs, "-verify", verify)
}
if strict {
scanArgs = append(scanArgs, "-strict")
}
if updateStale {
scanArgs = append(scanArgs, "-update-stale")
}
if skip != "" {
scanArgs = append(scanArgs, "-skip", skip)
}
if projectOnly {
scanArgs = append(scanArgs, "-project-only")
}
// Pass through any additional args
scanArgs = append(scanArgs, args...)
goCmd := exec.Command("go", scanArgs...)
goCmd.Stdout = os.Stdout
goCmd.Stderr = os.Stderr
goCmd.Stdin = os.Stdin
return goCmd.Run()
},
}
// initCmd bootstraps a new project with CANARY token conventions
var initCmd = &cobra.Command{
Use: "init [project-name]",
Short: "Initialize a new project with full CANARY workflow",
Long: `Bootstrap a new project with CANARY spec-kit-inspired workflow.
Installation Modes:
Global (default): Installs commands in ~/.claude/commands/, ~/.cursor/commands/, etc.
for use across all projects
Local (--local): Installs commands in .claude/commands/, .cursor/commands/, etc.
for project-specific use
Creates:
- .canary/ directory with templates, scripts, agents, and slash commands
- .canary/agents/ directory with pre-configured CANARY agent definitions
- README.md with CANARY token format specification
- GAP_ANALYSIS.md template for tracking requirements
- CLAUDE.md for AI agent integration (slash commands)
The agent files support template variables that can be customized:
--agent-prefix: Agent name prefix (default: project key)
--agent-model: AI model to use (default: sonnet)
--agent-color: Agent color theme (default: blue)
Examples:
canary init # Global install (default)
canary init --local # Local install in current project
canary init myproject --local # Local install in new project`,
RunE: func(cmd *cobra.Command, args []string) error {
projectName := "."
if len(args) > 0 {
projectName = args[0]
}
// Check if this is an update to an existing project
canaryDir := filepath.Join(projectName, ".canary")
isUpdate := false
if _, err := os.Stat(canaryDir); err == nil {
isUpdate = true
fmt.Println("π¦ Existing CANARY project detected - updating...")
}
// Create project directory if needed
if projectName != "." {
if err := os.MkdirAll(projectName, 0755); err != nil {
return fmt.Errorf("create project dir: %w", err)
}
}
// Get project key and check for existing key BEFORE copying structure
projectKey, _ := cmd.Flags().GetString("key")
projectYamlPath := filepath.Join(projectName, ".canary", "project.yaml")
// If updating existing project, try to read existing key from project.yaml BEFORE overwriting
if isUpdate && projectKey == "" {
if existingContent, err := os.ReadFile(projectYamlPath); err == nil {
// Extract existing key from project.yaml (handles both "key:" and indented " key:")
for _, line := range strings.Split(string(existingContent), "\n") {
trimmed := strings.TrimSpace(line)
if strings.HasPrefix(trimmed, "key:") {
parts := strings.SplitN(trimmed, ":", 2)
if len(parts) == 2 {
existingKey := strings.TrimSpace(parts[1])
existingKey = strings.Trim(existingKey, "\"' ")
if existingKey != "" && existingKey != "{{PROJECT_KEY}}" {
projectKey = existingKey
fmt.Printf("π¦ Using existing project key: %s\n", projectKey)
break
}
}
}
}
} else {
fmt.Printf("β οΈ Warning: Could not read project.yaml: %v\n", err)
}
}
// Only prompt if still no key
if projectKey == "" {
// Prompt for project key
fmt.Print("Enter project requirement ID prefix (e.g., CBIN, PROJ, ACME): ")
var input string
if _, err := fmt.Scanln(&input); err != nil {
// Handle scan error (e.g., EOF or interrupted input)
input = ""
}
projectKey = strings.TrimSpace(strings.ToUpper(input))
}
if projectKey == "" {
projectKey = "PROJ" // Default
}
// Copy .canary/ structure from base/ (after extracting existing key)
if err := copyCanaryStructure(projectName); err != nil {
return fmt.Errorf("copy .canary structure: %w", err)
}
// Copy .canaryignore template
canaryignoreContent, err := readEmbeddedFile("base/.canaryignore")
if err == nil {
canaryignorePath := filepath.Join(projectName, ".canaryignore")
if err := os.WriteFile(canaryignorePath, canaryignoreContent, 0644); err != nil {
return fmt.Errorf("write .canaryignore: %w", err)
}
}
// Customize project.yaml with the project key
if err := customizeProjectYaml(projectYamlPath, projectName, projectKey); err != nil {
return fmt.Errorf("customize project.yaml: %w", err)
}
// Get installation mode flag
localInstall, _ := cmd.Flags().GetBool("local")
// Get agent selection flags
agentsList, _ := cmd.Flags().GetStringSlice("agents")
allAgents, _ := cmd.Flags().GetBool("all-agents")
// Get agent configuration flags
agentPrefix, _ := cmd.Flags().GetString("agent-prefix")
agentModel, _ := cmd.Flags().GetString("agent-model")
agentColor, _ := cmd.Flags().GetString("agent-color")
// Set defaults if not provided
if agentPrefix == "" {
agentPrefix = projectKey // Use project key as default agent prefix
}
if agentModel == "" {
agentModel = "claude-3-5-sonnet-20241022"
}
if agentColor == "" {
agentColor = "blue"
}
// Copy and process agent files to .canary/agents/ with template substitution
if err := copyAndProcessAgentFiles(projectName, agentPrefix, agentModel, agentColor); err != nil {
return fmt.Errorf("copy agent files: %w", err)
}
// Install/update slash commands to agent directories
if err := installSlashCommands(projectName, agentsList, allAgents, localInstall); err != nil {
return fmt.Errorf("install slash commands: %w", err)
}
// Install agent files to each agent system's directory
if err := installAgentFilesToSystems(projectName, agentsList, allAgents, agentPrefix, agentModel, agentColor, localInstall); err != nil {
return fmt.Errorf("install agent files to systems: %w", err)
}
// CANARY: REQ=CBIN-148; FEATURE="CopilotInitInstructions"; ASPECT=CLI; STATUS=BENCHED; TEST=TestCreateCopilotInstructions; BENCH=BenchmarkCreateCopilotInstructions; UPDATED=2025-10-19
// Create GitHub Copilot instruction files
if err := createCopilotInstructions(projectName, projectKey); err != nil {
return fmt.Errorf("create Copilot instructions: %w", err)
}
// Rebuild canary binary if we're updating
if isUpdate {
fmt.Println("\nπ§ Rebuilding canary binary...")
buildCmd := exec.Command("go", "build", "-ldflags=-s -w", "-o", "./bin/canary", "./cmd/canary")
buildCmd.Stdout = os.Stdout
buildCmd.Stderr = os.Stderr
if err := buildCmd.Run(); err != nil {
fmt.Printf("β οΈ Warning: Failed to rebuild canary binary: %v\n", err)
fmt.Println(" Run 'make canary-build' or 'go build -o ./bin/canary ./cmd/canary/main.go' to rebuild manually")
} else {
fmt.Println("β
Canary binary updated")
}
}
// Create README.md
readme := "# CANARY Token Specification\n\n" +
"## Format\n\n" +
"CANARY tokens track requirements directly in source code:\n\n" +
"```\n" +
"// CANARY: REQ=CBIN-###; FEATURE=\"Name\"; ASPECT=API; STATUS=IMPL; [TEST=TestName]; [BENCH=BenchName]; [OWNER=team]; UPDATED=YYYY-MM-DD\n" +
"```\n\n" +
"## Required Fields\n\n" +
"- **REQ**: Requirement ID (format: CBIN-###)\n" +
"- **FEATURE**: Short feature name\n" +
"- **ASPECT**: Category (API, CLI, Engine, Storage, etc.)\n" +
"- **STATUS**: Implementation state\n" +
"- **UPDATED**: Last update date (YYYY-MM-DD)\n\n" +
"## Status Values\n\n" +
"- **MISSING**: Planned but not implemented\n" +
"- **STUB**: Placeholder implementation\n" +
"- **IMPL**: Implemented\n" +
"- **TESTED**: Implemented with tests (auto-promoted from IMPL+TEST)\n" +
"- **BENCHED**: Tested with benchmarks (auto-promoted from TESTED+BENCH)\n" +
"- **REMOVED**: Deprecated/removed\n\n" +
"## Optional Fields\n\n" +
"- **TEST**: Test function name (promotes IMPL β TESTED)\n" +
"- **BENCH**: Benchmark function name (promotes TESTED β BENCHED)\n" +
"- **OWNER**: Team/person responsible\n\n" +
"## Example\n\n" +
"```go\n" +
"// CANARY: REQ=CBIN-001; FEATURE=\"UserAuth\"; ASPECT=API; STATUS=TESTED; TEST=TestUserAuth; OWNER=backend; UPDATED=2025-10-16\n" +
"func AuthenticateUser(credentials *Credentials) (*Session, error) {\n" +
" // implementation\n" +
"}\n" +
"```\n\n" +
"## Usage\n\n" +
"```bash\n" +
"# Scan for tokens and generate reports\n" +
"canary scan --root . --out status.json --csv status.csv\n\n" +
"# Verify GAP_ANALYSIS.md claims\n" +
"canary scan --root . --verify GAP_ANALYSIS.md\n\n" +
"# Check for stale tokens (30-day threshold)\n" +
"canary scan --root . --strict\n\n" +
"# Auto-update stale TESTED/BENCHED tokens\n" +
"canary scan --root . --update-stale\n" +
"```\n"
readmePath := filepath.Join(projectName, "README_CANARY.md")
if err := os.WriteFile(readmePath, []byte(readme), 0644); err != nil {
return fmt.Errorf("write README: %w", err)
}
// Create GAP_ANALYSIS.md template
gap := "# Requirements Gap Analysis\n\n" +
"## Claimed Requirements\n\n" +
"List requirements that are fully implemented and verified:\n\n" +
"β
CBIN-001 - UserAuth API fully tested\n" +
"β
CBIN-002 - DataValidation with benchmarks\n\n" +
"## Gaps\n\n" +
"List requirements that are planned or in progress:\n\n" +
"- [ ] CBIN-003 - ReportGeneration (STATUS=IMPL, needs tests)\n" +
"- [ ] CBIN-004 - CacheOptimization (STATUS=STUB)\n\n" +
"## Verification\n\n" +
"Run verification with:\n\n" +
"```bash\n" +
"canary scan --root . --verify GAP_ANALYSIS.md\n" +
"```\n\n" +
"This will:\n" +
"- β
Verify claimed requirements are TESTED or BENCHED\n" +
"- β Fail with exit code 2 if claims are overclaimed\n"
gapPath := filepath.Join(projectName, "GAP_ANALYSIS.md")
if err := os.WriteFile(gapPath, []byte(gap), 0644); err != nil {
return fmt.Errorf("write GAP_ANALYSIS.md: %w", err)
}
// Create CLAUDE.md for AI agent integration
claudeMD := createClaudeMD()
claudePath := filepath.Join(projectName, "CLAUDE.md")
if err := os.WriteFile(claudePath, []byte(claudeMD), 0644); err != nil {
return fmt.Errorf("write CLAUDE.md: %w", err)
}
if isUpdate {
fmt.Printf("\nβ
Updated CANARY project in: %s\n\n", projectName)
fmt.Println("Updated:")
} else {
fmt.Printf("\nβ
Initialized CANARY project in: %s\n\n", projectName)
fmt.Println("Created:")
}
fmt.Println(" β
.canary/ - Full workflow structure")
fmt.Println(" βββ agents/ - Pre-configured CANARY agent definitions")
fmt.Println(" βββ memory/constitution.md - Project principles")
fmt.Println(" βββ scripts/ - Automation scripts")
fmt.Println(" βββ templates/ - Spec/plan templates")
fmt.Println(" βββ templates/commands/ - Slash commands for AI agents")
// Show installation location information
if localInstall {
fmt.Println(" β
Agent Files - Installed LOCALLY in project directory")
} else {
homeDir, _ := os.UserHomeDir()
fmt.Printf(" β
Agent Files - Installed GLOBALLY in %s\n", homeDir)
}
// Show which agents had commands installed
agentDirs := map[string]string{
".claude": "Claude Code",
".cursor": "Cursor",
".github": "GitHub Copilot",
".windsurf": "Windsurf",
".kilocode": "Kilocode",
".roo": "Roo",
".opencode": "opencode",
".codex": "Codex",
".augment": "Auggie",
".codebuddy": "CodeBuddy",
".amazonq": "Amazon Q Developer",
}
// Determine where to check for agent directories
checkDir := projectName
if !localInstall {
// For global install, check in home directory
if homeDir, err := os.UserHomeDir(); err == nil {
checkDir = homeDir
}
}
installedAgents := []string{}
for dir, name := range agentDirs {
if _, err := os.Stat(filepath.Join(checkDir, dir)); err == nil {
installedAgents = append(installedAgents, name)
}
}
if len(installedAgents) > 0 {
installType := "local"
if !localInstall {
installType = "global"
}
fmt.Printf(" β
AI Agent Integration (%d systems with %s commands):\n", len(installedAgents), installType)
for _, agent := range installedAgents {
fmt.Printf(" β’ %s (commands + agent files)\n", agent)
}
}
if !isUpdate {
fmt.Println(" β
README_CANARY.md - Token format specification")
fmt.Println(" β
GAP_ANALYSIS.md - Requirements tracking template")
fmt.Println(" β
CLAUDE.md - AI agent slash command integration")
}
fmt.Println("\nAvailable Slash Commands for AI Agents:")
fmt.Println(" /canary.constitution - Create/update project principles")
fmt.Println(" /canary.specify - Create requirement specification")
fmt.Println(" /canary.plan - Generate implementation plan")
fmt.Println(" /canary.scan - Scan for CANARY tokens")
fmt.Println(" /canary.verify - Verify GAP_ANALYSIS.md claims")
fmt.Println(" /canary.update-stale - Update stale tokens")
fmt.Println("\nNext Steps:")
fmt.Println(" 1. Open in AI agent (Claude Code, Cursor, etc.)")
fmt.Println(" 2. Run: /canary.constitution to establish principles")
fmt.Println(" 3. Run: /canary.specify \"your feature description\"")
fmt.Println(" 4. Follow the spec-driven workflow!")
return nil
},
}
// filterCanaryTokens removes CANARY tokens with OWNER=canary from file content
// This strips out CANARY CLI internal tracking tokens when copying templates to user projects
func filterCanaryTokens(content []byte) []byte {
lines := strings.Split(string(content), "\n")
filtered := make([]string, 0, len(lines))
for _, line := range lines {
// Check if line contains a CANARY token with OWNER=canary
if strings.Contains(line, "CANARY:") && strings.Contains(line, "OWNER=canary") {
// Skip this line - it's a CANARY CLI internal token
continue
}
filtered = append(filtered, line)
}
return []byte(strings.Join(filtered, "\n"))
}
// readEmbeddedFile safely reads a file from the embedded filesystem
// It tries with and without the "base/" prefix to handle different embed scenarios
func readEmbeddedFile(path string) ([]byte, error) {
// Try the path as-is
if content, err := embedded.CanaryFS.ReadFile(path); err == nil {
return content, nil
}
// If the path starts with "base/", try without it
if strings.HasPrefix(path, "base/") {
trimmed := strings.TrimPrefix(path, "base/")
if content, err := embedded.CanaryFS.ReadFile(trimmed); err == nil {
return content, nil
}
}
// If the path doesn't start with "base/", try with it
if !strings.HasPrefix(path, "base/") {
withBase := "base/" + path
if content, err := embedded.CanaryFS.ReadFile(withBase); err == nil {
return content, nil
}
}
return nil, fmt.Errorf("file not found in embedded filesystem: %s", path)
}
// CANARY: REQ=CBIN-148; FEATURE="CopilotInstructionCreator"; ASPECT=CLI; STATUS=BENCHED; TEST=TestCreateCopilotInstructions; BENCH=BenchmarkCreateCopilotInstructions; UPDATED=2025-10-19
// createCopilotInstructions generates GitHub Copilot instruction files for the project
func createCopilotInstructions(projectName, projectKey string) error {
instructionsDir := filepath.Join(projectName, ".github", "instructions")
// Create .github/instructions/ directory structure
if err := os.MkdirAll(instructionsDir, 0755); err != nil {
return fmt.Errorf("create .github/instructions: %w", err)
}
// Define instruction files to create
instructionFiles := map[string]string{
// Repository-wide instruction
"repository.md": "base/copilot/repository.md",
// Path-specific instructions (nested directories)
".canary/specs/instruction.md": "base/copilot/specs.md",
".canary/instruction.md": "base/copilot/canary.md",
"tests/instruction.md": "base/copilot/tests.md",
}
// Template data for variable substitution
type TemplateData struct {
ProjectKey string
}
data := TemplateData{ProjectKey: projectKey}
for targetPath, templatePath := range instructionFiles {
fullTargetPath := filepath.Join(instructionsDir, targetPath)
// Check if file already exists (preserve user customizations)
if _, err := os.Stat(fullTargetPath); err == nil {
fmt.Printf("βοΈ Skipping existing instruction file: %s\n", targetPath)
continue
}
// Create parent directories for path-specific instructions
if err := os.MkdirAll(filepath.Dir(fullTargetPath), 0755); err != nil {
return fmt.Errorf("create directory for %s: %w", targetPath, err)
}
// Read template from embedded filesystem
templateContent, err := readEmbeddedFile(templatePath)
if err != nil {
return fmt.Errorf("read template %s: %w", templatePath, err)
}
// Parse and execute template
tmpl, err := template.New(targetPath).Parse(string(templateContent))
if err != nil {
return fmt.Errorf("parse template %s: %w", templatePath, err)
}
// Write to file
outFile, err := os.Create(fullTargetPath)
if err != nil {
return fmt.Errorf("create file %s: %w", fullTargetPath, err)
}
if err := tmpl.Execute(outFile, data); err != nil {
outFile.Close()
return fmt.Errorf("execute template %s: %w", templatePath, err)
}
outFile.Close()
fmt.Printf("β
Created Copilot instruction: %s\n", targetPath)
}
fmt.Println("β
GitHub Copilot instructions configured")
return nil
}
// CANARY: REQ=CBIN-105; FEATURE="InitWorkflow"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
// copyCanaryStructure copies the embedded base/ directory structure to the target .canary/ project directory
func copyCanaryStructure(targetDir string) error {
targetCanary := filepath.Join(targetDir, ".canary")
// Create root .canary directory
if err := os.MkdirAll(targetCanary, 0755); err != nil {
return err
}
// The embedded files are in "base/" and should be copied to ".canary/"
// Walk the embedded base directory
return fs.WalkDir(embedded.CanaryFS, "base", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
// Skip the base directory itself
if path == "base" {
return nil
}
// Skip certain files that are handled separately
if strings.HasSuffix(path, ".canaryignore") {
return nil // This is handled separately in the init command
}
// Get relative path from base/
relPath := strings.TrimPrefix(path, "base/")
if relPath == "" {
return nil
}
targetPath := filepath.Join(targetCanary, relPath)
if d.IsDir() {
return os.MkdirAll(targetPath, 0755)
}
// Ensure parent directory exists
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
return err
}
// Read embedded file
content, err := embedded.CanaryFS.ReadFile(path)
if err != nil {
return err
}
// Filter out CANARY CLI internal tokens (OWNER=canary) for markdown, Go, and shell script files
if strings.HasSuffix(path, ".md") || strings.HasSuffix(path, ".go") || strings.HasSuffix(path, ".sh") {
content = filterCanaryTokens(content)
}
// Write to target with appropriate permissions
mode := fs.FileMode(0644)
if strings.HasSuffix(path, ".sh") {
mode = 0755
}
return os.WriteFile(targetPath, content, mode)
})
}
// CANARY: REQ=CBIN-105; FEATURE="InitWorkflow"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
// customizeProjectYaml replaces placeholders in project.yaml with actual values
func customizeProjectYaml(path, projectName, projectKey string) error {
content, err := os.ReadFile(path)
if err != nil {
return fmt.Errorf("read project.yaml: %w", err)
}
// Replace placeholders
text := string(content)
text = strings.ReplaceAll(text, "{{PROJECT_NAME}}", projectName)
text = strings.ReplaceAll(text, "{{PROJECT_DESCRIPTION}}", fmt.Sprintf("%s project with CANARY requirement tracking", projectName))
text = strings.ReplaceAll(text, "{{PROJECT_KEY}}", projectKey)
if err := os.WriteFile(path, []byte(text), 0644); err != nil {
return fmt.Errorf("write project.yaml: %w", err)
}
return nil
}
// CANARY: REQ=CBIN-105; FEATURE="InitWorkflow"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
// AgentConfig defines configuration for each supported AI agent
type AgentConfig struct {
Dir string // Directory for agent files
Prefix string // Prefix for command files (e.g., "canary.")
}
// installSlashCommands copies slash commands to agent systems based on selection mode
// agentsList: specific agents to install for (e.g., ["claude", "cursor"])
// allAgentsFlag: if true, install for all supported agents
// localInstall: if true, install in project directory; if false, install globally in home directory
// If both are empty/false, auto-detect existing agent directories
func installSlashCommands(targetDir string, agentsList []string, allAgentsFlag bool, localInstall bool) error {
sourceDir := filepath.Join(targetDir, ".canary", "templates", "commands")
// Determine base directory for installation
var baseDir string
if localInstall {
// Local installation: use project directory
baseDir = targetDir
fmt.Println("π Installing commands locally in project directory...")
} else {
// Global installation: use home directory
homeDir, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("get home directory: %w", err)
}
baseDir = homeDir
fmt.Println("π Installing commands globally in home directory...")
}
// Agent configurations - matching spec-kit-repo structure
allAgents := map[string]AgentConfig{
"claude": {Dir: filepath.Join(baseDir, ".claude", "commands"), Prefix: "canary."},
"cursor": {Dir: filepath.Join(baseDir, ".cursor", "commands"), Prefix: "canary."},
"copilot": {Dir: filepath.Join(baseDir, ".github", "prompts"), Prefix: "canary-"},
"windsurf": {Dir: filepath.Join(baseDir, ".windsurf", "workflows"), Prefix: "canary-"},
"kilocode": {Dir: filepath.Join(baseDir, ".kilocode", "rules"), Prefix: "canary-"},
"roo": {Dir: filepath.Join(baseDir, ".roo", "rules"), Prefix: "canary-"},
"opencode": {Dir: filepath.Join(baseDir, ".opencode", "command"), Prefix: "canary-"},
"codex": {Dir: filepath.Join(baseDir, ".codex", "commands"), Prefix: "canary."},
"auggie": {Dir: filepath.Join(baseDir, ".augment", "rules"), Prefix: "canary-"},
"codebuddy": {Dir: filepath.Join(baseDir, ".codebuddy", "commands"), Prefix: "canary."},
"amazonq": {Dir: filepath.Join(baseDir, ".amazonq", "prompts"), Prefix: "canary-"},
}
agentRootDirs := map[string]string{
"claude": filepath.Join(baseDir, ".claude"),
"cursor": filepath.Join(baseDir, ".cursor"),
"copilot": filepath.Join(baseDir, ".github"),
"windsurf": filepath.Join(baseDir, ".windsurf"),
"kilocode": filepath.Join(baseDir, ".kilocode"),
"roo": filepath.Join(baseDir, ".roo"),
"opencode": filepath.Join(baseDir, ".opencode"),
"codex": filepath.Join(baseDir, ".codex"),
"auggie": filepath.Join(baseDir, ".augment"),
"codebuddy": filepath.Join(baseDir, ".codebuddy"),
"amazonq": filepath.Join(baseDir, ".amazonq"),
}
// Determine which agents to install for
var selectedAgents map[string]AgentConfig
if allAgentsFlag {
// Install for all agents
selectedAgents = allAgents
} else if len(agentsList) > 0 {
// Install for specific agents
selectedAgents = make(map[string]AgentConfig)
for _, agentName := range agentsList {
if config, ok := allAgents[agentName]; ok {
selectedAgents[agentName] = config
} else {
return fmt.Errorf("unknown agent: %s (valid: claude, cursor, copilot, windsurf, kilocode, roo, opencode, codex, auggie, codebuddy, amazonq)", agentName)
}
}
} else {
// Auto-detect existing agent directories
selectedAgents = make(map[string]AgentConfig)
for agentName, rootDir := range agentRootDirs {
if _, err := os.Stat(rootDir); err == nil {
selectedAgents[agentName] = allAgents[agentName]
}
}
}
// If no agents selected, nothing to install
if len(selectedAgents) == 0 {
fmt.Println("β οΈ No AI agent directories detected - skipping slash command installation")
fmt.Println(" Create an agent directory (e.g., .claude/, .cursor/) or use --agents or --all-agents flag")
return nil
}
// Read all command files from source
entries, err := os.ReadDir(sourceDir)
if err != nil {
return fmt.Errorf("read commands directory: %w", err)
}
// Install commands for selected agents
for agentName, config := range selectedAgents {
// Create agent directory
if err := os.MkdirAll(config.Dir, 0755); err != nil {
return fmt.Errorf("create %s directory: %w", agentName, err)
}
// Copy each command file with appropriate prefix
for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".md") {
continue
}
sourcePath := filepath.Join(sourceDir, entry.Name())
// Remove .md extension, add prefix, then add .md back
baseName := strings.TrimSuffix(entry.Name(), ".md")
targetName := config.Prefix + baseName + ".md"
targetPath := filepath.Join(config.Dir, targetName)
// Read source file
content, err := os.ReadFile(sourcePath)
if err != nil {
return fmt.Errorf("read command file %s: %w", entry.Name(), err)
}
// Write to target with prefix
if err := os.WriteFile(targetPath, content, 0644); err != nil {
return fmt.Errorf("write command file %s for %s: %w", targetName, agentName, err)
}
}
}
return nil
}
// CANARY: REQ=CBIN-105; FEATURE="InitWorkflow"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-17
// copyAndProcessAgentFiles copies agent files from embedded/.canary/agents/ to .canary/agents/
// and performs template variable substitution for {{ .AgentPrefix }}, {{ .AgentModel }}, {{ .AgentColor }}
func copyAndProcessAgentFiles(targetDir, agentPrefix, agentModel, agentColor string) error {
// Agent files are in base/agents/
sourceAgentsDir := "base/agents"
targetAgentsDir := filepath.Join(targetDir, ".canary", "agents")
// Create target agents directory
if err := os.MkdirAll(targetAgentsDir, 0755); err != nil {
return fmt.Errorf("create agents directory: %w", err)
}
// Read agent files from embedded FS
entries, err := embedded.CanaryFS.ReadDir(sourceAgentsDir)
if err != nil {
return fmt.Errorf("read agents directory: %w", err)
}
// Process each agent file
for _, entry := range entries {
if entry.IsDir() {
continue
}
sourcePath := filepath.Join(sourceAgentsDir, entry.Name())
targetPath := filepath.Join(targetAgentsDir, entry.Name())
// Read the agent file
content, err := embedded.CanaryFS.ReadFile(sourcePath)
if err != nil {
return fmt.Errorf("read agent file %s: %w", entry.Name(), err)
}
// Perform template substitution
processedContent := string(content)
processedContent = strings.ReplaceAll(processedContent, "{{ .AgentPrefix }}", agentPrefix)
processedContent = strings.ReplaceAll(processedContent, "{{ .AgentModel }}", agentModel)
processedContent = strings.ReplaceAll(processedContent, "{{ .AgentColor }}", agentColor)
// Filter out CANARY CLI internal tokens (OWNER=canary)
processedContent = string(filterCanaryTokens([]byte(processedContent)))
// Write processed content to target
if err := os.WriteFile(targetPath, []byte(processedContent), 0644); err != nil {
return fmt.Errorf("write agent file %s: %w", entry.Name(), err)
}
}
return nil
}
// CANARY: REQ=CBIN-105; FEATURE="InitWorkflow"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-17
// installAgentFilesToSystems copies agent files from embedded/.canary/agents/ to each agent system's agents directory
// This ensures agent definitions are available in each AI agent system (Claude, Cursor, etc.)
func installAgentFilesToSystems(targetDir string, agentsList []string, allAgentsFlag bool, agentPrefix, agentModel, agentColor string, localInstall bool) error {
// Agent files are in base/agents/
sourceAgentsDir := "base/agents"
entries, err := embedded.CanaryFS.ReadDir(sourceAgentsDir)
if err != nil {
return fmt.Errorf("read agents directory: %w", err)
}
// Determine base directory for installation
var baseDir string
if localInstall {
// Local installation: use project directory
baseDir = targetDir
} else {
// Global installation: use home directory
homeDir, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("get home directory: %w", err)
}
baseDir = homeDir
}
// Agent configurations with agents subdirectory paths
allAgents := map[string]string{
"claude": filepath.Join(baseDir, ".claude", "agents"),
"cursor": filepath.Join(baseDir, ".cursor", "agents"),
"copilot": filepath.Join(baseDir, ".github", "copilot", "agents"),
"windsurf": filepath.Join(baseDir, ".windsurf", "agents"),
"kilocode": filepath.Join(baseDir, ".kilocode", "agents"),
"roo": filepath.Join(baseDir, ".roo", "agents"),
"opencode": filepath.Join(baseDir, ".opencode", "agents"),
"codex": filepath.Join(baseDir, ".codex", "agents"),
"auggie": filepath.Join(baseDir, ".augment", "agents"),
"codebuddy": filepath.Join(baseDir, ".codebuddy", "agents"),
"amazonq": filepath.Join(baseDir, ".amazonq", "agents"),
}
agentRootDirs := map[string]string{
"claude": filepath.Join(baseDir, ".claude"),
"cursor": filepath.Join(baseDir, ".cursor"),
"copilot": filepath.Join(baseDir, ".github"),
"windsurf": filepath.Join(baseDir, ".windsurf"),
"kilocode": filepath.Join(baseDir, ".kilocode"),
"roo": filepath.Join(baseDir, ".roo"),
"opencode": filepath.Join(baseDir, ".opencode"),
"codex": filepath.Join(baseDir, ".codex"),
"auggie": filepath.Join(baseDir, ".augment"),
"codebuddy": filepath.Join(baseDir, ".codebuddy"),
"amazonq": filepath.Join(baseDir, ".amazonq"),
}
// Determine which agents to install for
var selectedAgents map[string]string
if allAgentsFlag {
// Install for all agents
selectedAgents = allAgents
} else if len(agentsList) > 0 {
// Install for specific agents
selectedAgents = make(map[string]string)
for _, agentName := range agentsList {
if agentDir, ok := allAgents[agentName]; ok {
selectedAgents[agentName] = agentDir
} else {
return fmt.Errorf("unknown agent: %s (valid: claude, cursor, copilot, windsurf, kilocode, roo, opencode, codex, auggie, codebuddy, amazonq)", agentName)
}
}
} else {
// Auto-detect existing agent directories
selectedAgents = make(map[string]string)
for agentName, rootDir := range agentRootDirs {
if _, err := os.Stat(rootDir); err == nil {
selectedAgents[agentName] = allAgents[agentName]
}
}
}
// If no agents selected, nothing to install
if len(selectedAgents) == 0 {
return nil
}
// Install agent files for selected agents
for agentName, agentDir := range selectedAgents {
// Create agents directory
if err := os.MkdirAll(agentDir, 0755); err != nil {
return fmt.Errorf("create %s agents directory: %w", agentName, err)
}
// Copy each agent file with template substitution
for _, entry := range entries {
if entry.IsDir() {
continue
}
sourcePath := filepath.Join(sourceAgentsDir, entry.Name())
targetPath := filepath.Join(agentDir, entry.Name())
// Read the agent file
content, err := embedded.CanaryFS.ReadFile(sourcePath)
if err != nil {
return fmt.Errorf("read agent file %s: %w", entry.Name(), err)
}
// Perform template substitution
processedContent := string(content)
processedContent = strings.ReplaceAll(processedContent, "{{ .AgentPrefix }}", agentPrefix)
processedContent = strings.ReplaceAll(processedContent, "{{ .AgentModel }}", agentModel)
processedContent = strings.ReplaceAll(processedContent, "{{ .AgentColor }}", agentColor)
// Filter out CANARY CLI internal tokens (OWNER=canary)
processedContent = string(filterCanaryTokens([]byte(processedContent)))
// Write to target
if err := os.WriteFile(targetPath, []byte(processedContent), 0644); err != nil {
return fmt.Errorf("write agent file %s for %s: %w", entry.Name(), agentName, err)
}
}
}
return nil
}
// CANARY: REQ=CBIN-106; FEATURE="AgentContext"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
// createClaudeMD generates the CLAUDE.md file for AI agent integration
func createClaudeMD() string {
return `# CANARY Development - AI Agent Guide
**Context File for AI Coding Agents**
This project uses CANARY requirement tracking with spec-kit-inspired workflows.
## Available Slash Commands
See [.canary/AGENT_CONTEXT.md](./.canary/AGENT_CONTEXT.md) for detailed information.
### Workflow Commands
- **/canary.constitution** - Create or update project governing principles
- **/canary.specify** - Create a new requirement specification from feature description
- **/canary.plan** - Generate technical implementation plan for a requirement
- **/canary.scan** - Scan codebase for CANARY tokens and generate reports
- **/canary.verify** - Verify GAP_ANALYSIS.md claims against actual implementation
- **/canary.update-stale** - Auto-update UPDATED field for stale tokens (>30 days)
### Command Definitions
All slash commands are defined in:
- ` + "`.canary/templates/commands/constitution.md`" + `
- ` + "`.canary/templates/commands/specify.md`" + `
- ` + "`.canary/templates/commands/plan.md`" + `
- ` + "`.canary/templates/commands/scan.md`" + `
- ` + "`.canary/templates/commands/verify.md`" + `
- ` + "`.canary/templates/commands/update-stale.md`" + `
## Quick Start Workflow
1. **Establish Principles**: ` + "`/canary.constitution Create principles for code quality and testing`" + `
2. **Define Requirement**: ` + "`/canary.specify Add user authentication with OAuth2 support`" + `
3. **Create Plan**: ` + "`/canary.plan CBIN-001 Use Go standard library with bcrypt`" + `
4. **Scan & Verify**: ` + "`/canary.scan`" + ` then ` + "`/canary.verify`" + `
5. **Update Stale**: ` + "`/canary.update-stale`" + ` (as needed)
## CANARY Token Format
` + "```" + `
// CANARY: REQ=CBIN-###; FEATURE="Name"; ASPECT=API; STATUS=IMPL; UPDATED=YYYY-MM-DD
` + "```" + `
**Status Progression:**
- STUB β IMPL β TESTED β BENCHED
**Valid Aspects:**
API, CLI, Engine, Storage, Security, Docs, Wire, Planner, Decode, Encode, RoundTrip, Bench, FrontEnd, Dist
## Constitutional Principles
See [.canary/memory/constitution.md](./.canary/memory/constitution.md) for full details.
**Core Principles:**
1. **Requirement-First**: Every feature starts with a CANARY token
2. **Test-First**: Tests written before implementation (Article IV)
3. **Evidence-Based**: Status promoted based on TEST=/BENCH= fields
4. **Simplicity**: Minimal complexity, prefer standard library
5. **Documentation Currency**: Keep tokens current with UPDATED field
## CLI Commands
` + "```bash" + `
# Initialize new project
canary init my-project
# Create requirement token
canary create CBIN-105 "FeatureName" --aspect API --status IMPL
# Scan for tokens
canary scan --root . --out status.json --csv status.csv
# Verify claims
canary scan --root . --verify GAP_ANALYSIS.md --strict
# Update stale tokens
canary scan --root . --update-stale
` + "```" + `
## Project Structure
` + "```" + `
.canary/
βββ memory/
β βββ constitution.md # Project principles
βββ scripts/
β βββ create-new-requirement.sh # Automation
βββ templates/
β βββ commands/ # Slash command definitions
β βββ spec-template.md # Requirement template
β βββ plan-template.md # Implementation plan template
βββ specs/
βββ CBIN-XXX-feature/ # Individual requirements
βββ spec.md
βββ plan.md
GAP_ANALYSIS.md # Requirement tracking
status.json # Scanner output
` + "```" + `
## For AI Agents
**Before implementing:**
1. Reference ` + "`.canary/memory/constitution.md`" + `
2. Use ` + "`/canary.specify`" + ` to create structured requirements
3. Follow test-first approach (Article IV)
**After implementing:**
1. Update CANARY tokens as code evolves
2. Run ` + "`/canary.scan`" + ` to verify status
3. Run ` + "`/canary.verify`" + ` to confirm claims
**Key Files:**
- [.canary/AGENT_CONTEXT.md](./.canary/AGENT_CONTEXT.md) - Complete context for AI agents
- [.canary/memory/constitution.md](./.canary/memory/constitution.md) - Constitutional principles
- [GAP_ANALYSIS.md](./GAP_ANALYSIS.md) - Requirement tracking
`
}
// createCmd generates a new CANARY token template
var createCmd = &cobra.Command{
Use: "create <req-id> <feature-name>",
Short: "Generate a new CANARY token template",
Long: `Create a properly formatted CANARY token for a new requirement.
Example:
canary create CBIN-CLI-105 "UserProfile" --aspect CLI --status IMPL
Outputs a ready-to-paste CANARY token comment.`,
Args: cobra.MinimumNArgs(2),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := args[0]
feature := args[1]
aspect, _ := cmd.Flags().GetString("aspect")
status, _ := cmd.Flags().GetString("status")
owner, _ := cmd.Flags().GetString("owner")
test, _ := cmd.Flags().GetString("test")
bench, _ := cmd.Flags().GetString("bench")
// Validate aspect
if err := reqid.ValidateAspect(aspect); err != nil {
return fmt.Errorf("invalid aspect: %w", err)
}
// Normalize aspect to canonical form
aspect = reqid.NormalizeAspect(aspect)
// Get today's date
today := time.Now().UTC().Format("2006-01-02")
// Build token
token := fmt.Sprintf("// CANARY: REQ=%s; FEATURE=\"%s\"; ASPECT=%s; STATUS=%s",
reqID, feature, aspect, status)
if test != "" {
token += fmt.Sprintf("; TEST=%s", test)
}
if bench != "" {
token += fmt.Sprintf("; BENCH=%s", bench)
}
if owner != "" {
token += fmt.Sprintf("; OWNER=%s", owner)
}
token += fmt.Sprintf("; UPDATED=%s", today)
fmt.Println(token)
fmt.Println("\n// Paste this above your implementation:")
fmt.Printf("// func %s() { ... }\n", feature)
return nil
},
}
// CANARY: REQ=CBIN-119; FEATURE="ConstitutionCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var constitutionCmd = &cobra.Command{
Use: "constitution [description]",
Short: "Create or update project governing principles",
Long: `Create or update the project's constitutional principles in .canary/memory/constitution.md.
If no arguments are provided, creates the default constitution.
If arguments are provided, updates or adds specific principles.`,
RunE: func(cmd *cobra.Command, args []string) error {
constitutionPath := ".canary/memory/constitution.md"
if _, err := os.Stat(constitutionPath); os.IsNotExist(err) {
// Read template from embedded FS
content, err := readEmbeddedFile("base/memory/constitution.md")
if err != nil {
return fmt.Errorf("read constitution template: %w", err)
}
// Ensure directory exists
if err := os.MkdirAll(filepath.Dir(constitutionPath), 0755); err != nil {
return fmt.Errorf("create memory directory: %w", err)
}
// Write constitution
if err := os.WriteFile(constitutionPath, content, 0644); err != nil {
return fmt.Errorf("write constitution: %w", err)
}
fmt.Printf("β
Created constitution at: %s\n", constitutionPath)
fmt.Println("\nConstitutional Principles:")
fmt.Println(" I. Requirement-First Development")
fmt.Println(" II. Specification Discipline")
fmt.Println(" III. Token-Driven Planning")
fmt.Println(" IV. Test-First Imperative")
fmt.Println(" V. Simplicity and Anti-Abstraction")
fmt.Println(" VI. Integration-First Testing")
fmt.Println(" VII. Documentation Currency")
fmt.Println(" VIII. Continuous Improvement")
fmt.Println(" IX. Amendment Process")
} else {
fmt.Printf("β
Constitution already exists at: %s\n", constitutionPath)
if len(args) > 0 {
fmt.Println("\nTo update specific principles, edit the file directly.")
}
}
return nil
},
}
// CANARY: REQ=CBIN-120; FEATURE="SpecifyCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var specifyCmd = &cobra.Command{
Use: "specify <feature-description>",
Short: "Create a new requirement specification",
Long: `Create a new CANARY requirement specification from a feature description.
Generates a new requirement ID with aspect-based format (CBIN-<ASPECT>-XXX),
creates a spec directory, and populates it with a specification template.`,
Args: cobra.MinimumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
featureDesc := strings.Join(args, " ")
aspect, _ := cmd.Flags().GetString("aspect")
// Validate aspect
if err := reqid.ValidateAspect(aspect); err != nil {
return fmt.Errorf("invalid aspect: %w", err)
}
// Normalize aspect to canonical form
aspect = reqid.NormalizeAspect(aspect)
// Generate next requirement ID for this aspect
generatedID, err := reqid.GenerateNextID("CBIN", aspect)
if err != nil {
return fmt.Errorf("generate requirement ID: %w", err)
}
// Create sanitized feature name for directory
featureName := strings.Map(func(r rune) rune {
if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') {
return r
}
return '-'
}, featureDesc)
if len(featureName) > 50 {
featureName = featureName[:50]
}
featureName = strings.Trim(featureName, "-")
specsDir := ".canary/specs"
specDir := filepath.Join(specsDir, fmt.Sprintf("%s-%s", generatedID, featureName))
specFile := filepath.Join(specDir, "spec.md")
// Create directory
if err := os.MkdirAll(specDir, 0755); err != nil {
return fmt.Errorf("create spec directory: %w", err)
}
// Read and populate template
templateContent, err := readEmbeddedFile("base/templates/spec-template.md")
if err != nil {
return fmt.Errorf("read spec template: %w", err)
}
content := string(templateContent)
content = strings.ReplaceAll(content, "CBIN-XXX", generatedID)
content = strings.ReplaceAll(content, "[FEATURE NAME]", featureDesc)
content = strings.ReplaceAll(content, "YYYY-MM-DD", time.Now().UTC().Format("2006-01-02"))
content = strings.ReplaceAll(content, "<ASPECT>", aspect)
if err := os.WriteFile(specFile, []byte(content), 0644); err != nil {
return fmt.Errorf("write spec file: %w", err)
}
fmt.Printf("β
Created specification: %s\n", specFile)
fmt.Printf("\nRequirement ID: %s\n", generatedID)
fmt.Printf("Aspect: %s\n", aspect)
fmt.Printf("Feature: %s\n", featureDesc)
fmt.Println("\nNext steps:")
fmt.Printf(" 1. Edit %s to complete the specification\n", specFile)
fmt.Printf(" 2. Run: canary plan %s\n", generatedID)
return nil
},
}
// CANARY: REQ=CBIN-133; FEATURE="ImplementCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_133_CLI_ExactMatch; OWNER=canary; DOC=user:docs/user/implement-command-guide.md; DOC_HASH=ed68fb1d97cf0562; UPDATED=2025-10-17
var implementCmd = &cobra.Command{
Use: "implement <query>",
Short: "Generate implementation guidance for a requirement",
Long: `Generate comprehensive implementation guidance for a requirement specification.
This command:
- Accepts requirement by ID (CBIN-XXX), name, or fuzzy search query
- Uses fuzzy matching with auto-selection for strong matches
- Generates complete implementation prompt including:
- Specification details
- Implementation plan
- Constitutional principles
- Implementation checklist
- Progress tracking
- Test-first guidance
Examples:
canary implement CBIN-105 # Exact ID match
canary implement "user auth" # Fuzzy search
canary implement UserAuthentication # Feature name match
canary implement --list # List all unimplemented requirements`,
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
listFlag, _ := cmd.Flags().GetBool("list")
promptFlag, _ := cmd.Flags().GetBool("prompt")
// Handle --list flag
if listFlag {
return listUnimplemented()
}
// Require query argument if not listing
if len(args) < 1 {
return fmt.Errorf("requirement query is required (use --list to show all unimplemented)")
}
query := args[0]
// Find requirement spec
spec, err := findRequirement(query)
if err != nil {
return fmt.Errorf("find requirement: %w", err)
}
// Generate prompt
flags := &ImplementFlags{
Prompt: promptFlag,
}
prompt, err := renderImplementPrompt(spec, flags)
if err != nil {
return fmt.Errorf("generate prompt: %w", err)
}
fmt.Println(prompt)
return nil
},
}
// loadProjectConfig loads the .canary/project.yaml configuration
func loadProjectConfig() (*config.ProjectConfig, error) {
return config.Load(".")
}
// extractField extracts a field value from a CANARY token string
func extractField(token, field string) string {
// Look for FIELD="value" or FIELD=value
pattern := field + `="([^"]+)"`
re := regexp.MustCompile(pattern)
matches := re.FindStringSubmatch(token)
if len(matches) > 1 {
return matches[1]
}
// Try without quotes
pattern = field + `=([^;\s]+)`
re = regexp.MustCompile(pattern)
matches = re.FindStringSubmatch(token)
if len(matches) > 1 {
return matches[1]
}
return ""
}
// CANARY: REQ=CBIN-121; FEATURE="PlanCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var planCmd = &cobra.Command{
Use: "plan <CBIN-XXX> [tech-stack]",
Short: "Generate technical implementation plan for a requirement",
Long: `Generate a technical implementation plan from a requirement specification.
Creates a plan.md file in the spec directory with implementation details,
tech stack decisions, and CANARY token placement instructions.`,
Args: cobra.MinimumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := args[0]
techStack := ""
if len(args) > 1 {
techStack = strings.Join(args[1:], " ")
}
// Get aspect flag
aspect, _ := cmd.Flags().GetString("aspect")
// Find spec directory
specsDir := ".canary/specs"
entries, err := os.ReadDir(specsDir)
if err != nil {
return fmt.Errorf("read specs directory: %w", err)
}
var specDir string
for _, entry := range entries {
if strings.HasPrefix(entry.Name(), reqID) && entry.IsDir() {
specDir = filepath.Join(specsDir, entry.Name())
break
}
}
if specDir == "" {
return fmt.Errorf("specification not found for %s", reqID)
}
planFile := filepath.Join(specDir, "plan.md")
if _, err := os.Stat(planFile); err == nil {
return fmt.Errorf("plan already exists: %s", planFile)
}
// Read template
templateContent, err := readEmbeddedFile("base/templates/plan-template.md")
if err != nil {
return fmt.Errorf("read plan template: %w", err)
}
// Read spec to get feature name and aspect if not provided
specFile := filepath.Join(specDir, "spec.md")
specContent, err := os.ReadFile(specFile)
if err != nil {
return fmt.Errorf("read spec file: %w", err)
}
// Extract feature name and aspect from spec
featureName := "Feature"
specAspect := ""
for _, line := range strings.Split(string(specContent), "\n") {
if strings.HasPrefix(line, "# Feature Specification:") {
featureName = strings.TrimPrefix(line, "# Feature Specification: ")
featureName = strings.TrimSpace(featureName)
}
if strings.HasPrefix(line, "**Aspect:**") {
// Extract aspect from markdown like "**Aspect:** API" or "**Aspect:** [API|CLI|...]"
parts := strings.SplitN(line, ":", 2)
if len(parts) == 2 {
aspectVal := strings.TrimSpace(parts[1])
// Remove brackets and extract first option if it's a list
aspectVal = strings.TrimPrefix(aspectVal, "[")
aspectVal = strings.Split(aspectVal, "|")[0]
aspectVal = strings.TrimSpace(aspectVal)
if aspectVal != "" {
specAspect = aspectVal
}
}
}
}
// Use aspect from flag, or fall back to spec, or default to "Engine"
if aspect == "" {
if specAspect != "" {
aspect = specAspect
} else {
aspect = "Engine"
}
}
// Validate and normalize aspect
if err := reqid.ValidateAspect(aspect); err != nil {
return fmt.Errorf("invalid aspect: %w", err)
}
aspect = reqid.NormalizeAspect(aspect)
content := string(templateContent)
content = strings.ReplaceAll(content, "CBIN-XXX", reqID)
content = strings.ReplaceAll(content, "[FEATURE NAME]", featureName)
content = strings.ReplaceAll(content, "YYYY-MM-DD", time.Now().UTC().Format("2006-01-02"))
content = strings.ReplaceAll(content, "<ASPECT>", aspect)
if techStack != "" {
content = strings.ReplaceAll(content, "[Go/Python/JavaScript/etc.]", techStack)
}
// CANARY: REQ=CBIN-140; FEATURE="PlanGapInjection"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
// Inject gap analysis if available
dbPath := ".canary/canary.db"
if _, err := os.Stat(dbPath); err == nil {
db, err := storage.Open(dbPath)
if err == nil {
defer db.Close()
repo := storage.NewGapRepository(db)
service := gap.NewService(repo)
gapContent, err := service.FormatGapsForInjection(reqID)
if err == nil && gapContent != "" {
// Inject gaps at the end of the plan content
content += "\n" + gapContent
}
}
}
if err := os.WriteFile(planFile, []byte(content), 0644); err != nil {
return fmt.Errorf("write plan file: %w", err)
}
fmt.Printf("β
Created implementation plan: %s\n", planFile)
fmt.Printf("\nRequirement: %s\n", reqID)
fmt.Println("\nNext steps:")
fmt.Printf(" 1. Edit %s to complete the plan\n", planFile)
fmt.Println(" 2. Implement following TDD (test-first)")
fmt.Println(" 3. Add CANARY tokens to source code")
fmt.Println(" 4. Run: canary scan")
return nil
},
}
// CANARY: REQ=CBIN-124; FEATURE="IndexCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var indexCmd = &cobra.Command{
Use: "index [flags]",
Short: "Build or rebuild the CANARY token database",
Long: `Scan the codebase for CANARY tokens and store metadata in SQLite database.
This enables advanced features like priority ordering, keyword search, and checkpoints.
The database is stored at .canary/canary.db by default.`,
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
rootPath, _ := cmd.Flags().GetString("root")
fmt.Printf("Indexing CANARY tokens from: %s\n", rootPath)
// Open or create database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Get git info if in a repo
var commitHash, branch string
if gitCmd := exec.Command("git", "rev-parse", "HEAD"); gitCmd.Dir == "" {
if output, err := gitCmd.Output(); err == nil {
commitHash = strings.TrimSpace(string(output))
}
}
if gitCmd := exec.Command("git", "rev-parse", "--abbrev-ref", "HEAD"); gitCmd.Dir == "" {
if output, err := gitCmd.Output(); err == nil {
branch = strings.TrimSpace(string(output))
}
}
// Scan for all CANARY tokens
grepCmd := exec.Command("grep",
"-rn",
"--include=*.go", "--include=*.md", "--include=*.py",
"--include=*.js", "--include=*.ts", "--include=*.java",
"--include=*.rb", "--include=*.rs", "--include=*.c",
"--include=*.cpp", "--include=*.h", "--include=*.sql",
"CANARY:",
rootPath,
)
output, err := grepCmd.CombinedOutput()
if err != nil && len(output) == 0 {
fmt.Println("No CANARY tokens found")
return nil
}
// Parse and store tokens
indexed := 0
lines := strings.Split(string(output), "\n")
for _, line := range lines {
if line == "" {
continue
}
// Parse grep output: file:line:content
parts := strings.SplitN(line, ":", 3)
if len(parts) < 3 {
continue
}
file := parts[0]
lineNum := 0
//nolint:errcheck // Best-effort parse, default to 0 on failure
fmt.Sscanf(parts[1], "%d", &lineNum)
content := parts[2]
// Extract all CANARY fields
reqID := extractField(content, "REQ")
feature := extractField(content, "FEATURE")
aspect := extractField(content, "ASPECT")
status := extractField(content, "STATUS")
if reqID == "" || feature == "" {
continue // Skip malformed tokens
}
// Build token struct
docPath := extractField(content, "DOC")
docType := extractField(content, "DOC_TYPE")
// Auto-infer DOC_TYPE from type prefix if not explicitly set
if docPath != "" && docType == "" {
// Extract type from first doc path (e.g., "user:docs/file.md" -> "user")
firstPath := strings.Split(docPath, ",")[0]
if strings.Contains(firstPath, ":") {
docType = strings.Split(firstPath, ":")[0]
}
}
token := &storage.Token{
ReqID: reqID,
Feature: feature,
Aspect: aspect,
Status: status,
FilePath: file,
LineNumber: lineNum,
Test: extractField(content, "TEST"),
Bench: extractField(content, "BENCH"),
Owner: extractField(content, "OWNER"),
Phase: extractField(content, "PHASE"),
Keywords: extractField(content, "KEYWORDS"),
SpecStatus: extractField(content, "SPEC_STATUS"),
UpdatedAt: extractField(content, "UPDATED"),
CreatedAt: extractField(content, "CREATED"),
StartedAt: extractField(content, "STARTED"),
CompletedAt: extractField(content, "COMPLETED"),
CommitHash: commitHash,
Branch: branch,
DependsOn: extractField(content, "DEPENDS_ON"),
Blocks: extractField(content, "BLOCKS"),
RelatedTo: extractField(content, "RELATED_TO"),
DocPath: docPath,
DocHash: extractField(content, "DOC_HASH"),
DocType: docType,
RawToken: content,
IndexedAt: time.Now().UTC().Format(time.RFC3339),
}
// Parse priority
if priorityStr := extractField(content, "PRIORITY"); priorityStr != "" {
if p, err := strconv.Atoi(priorityStr); err == nil {
token.Priority = p
} else {
token.Priority = 5 // default
}
} else {
token.Priority = 5 // default
}
// Set defaults
if token.UpdatedAt == "" {
token.UpdatedAt = time.Now().UTC().Format("2006-01-02")
}
if token.SpecStatus == "" {
token.SpecStatus = "draft"
}
// Store in database
if err := db.UpsertToken(token); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to store token %s/%s: %v\n", reqID, feature, err)
continue
}
indexed++
}
fmt.Printf("\nβ
Indexed %d CANARY tokens\n", indexed)
fmt.Printf("Database: %s\n", dbPath)
if commitHash != "" {
fmt.Printf("Commit: %s\n", commitHash[:8])
}
if branch != "" {
fmt.Printf("Branch: %s\n", branch)
}
return nil
},
}
// CANARY: REQ=CBIN-125; FEATURE="ListCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var listCmd = &cobra.Command{
Use: "list [flags]",
Short: "List CANARY tokens with filtering and ordering",
Long: `List tokens from the database with priority ordering and filtering.
Supports filtering by status, aspect, phase, owner, and spec status.
Results are ordered by priority (1=highest) and updated date by default.
By default, hides requirements from:
- Test files (*_test.go, /tests/, /test/)
- Template directories (.canary/templates/, /base/, /embedded/)
- Documentation examples (IMPLEMENTATION_SUMMARY, FINAL_SUMMARY, etc.)
- AI agent directories (.claude/, .cursor/, .github/prompts/, etc.)
Use --include-hidden to show all requirements including hidden ones.`,
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
filterStatus, _ := cmd.Flags().GetString("status")
filterAspect, _ := cmd.Flags().GetString("aspect")
filterPhase, _ := cmd.Flags().GetString("phase")
filterOwner, _ := cmd.Flags().GetString("owner")
filterSpecStatus, _ := cmd.Flags().GetString("spec-status")
priorityMin, _ := cmd.Flags().GetInt("priority-min")
priorityMax, _ := cmd.Flags().GetInt("priority-max")
orderBy, _ := cmd.Flags().GetString("order-by")
limit, _ := cmd.Flags().GetInt("limit")
jsonOutput, _ := cmd.Flags().GetBool("json")
includeHidden, _ := cmd.Flags().GetBool("include-hidden")
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Load project config to get ID pattern
cfg, err := loadProjectConfig()
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: could not load project config: %v\n", err)
}
idPattern := ""
if cfg != nil && cfg.Requirements.IDPattern != "" {
idPattern = cfg.Requirements.IDPattern
}
// Build filters
filters := make(map[string]string)
if filterStatus != "" {
filters["status"] = filterStatus
}
if filterAspect != "" {
filters["aspect"] = filterAspect
}
if filterPhase != "" {
filters["phase"] = filterPhase
}
if filterOwner != "" {
filters["owner"] = filterOwner
}
if filterSpecStatus != "" {
filters["spec_status"] = filterSpecStatus
}
if priorityMin > 0 {
filters["priority_min"] = strconv.Itoa(priorityMin)
}
if priorityMax > 0 {
filters["priority_max"] = strconv.Itoa(priorityMax)
}
if includeHidden {
filters["include_hidden"] = "true"
}
tokens, err := db.ListTokens(filters, idPattern, orderBy, limit)
if err != nil {
return fmt.Errorf("list tokens: %w", err)
}
if len(tokens) == 0 {
fmt.Println("No tokens found")
return nil
}
if jsonOutput {
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
return enc.Encode(tokens)
}
// Display as table
fmt.Printf("Found %d tokens:\n\n", len(tokens))
for _, token := range tokens {
fmt.Printf("π %s - %s\n", token.ReqID, token.Feature)
fmt.Printf(" Status: %s | Aspect: %s | Priority: %d", token.Status, token.Aspect, token.Priority)
if token.Phase != "" {
fmt.Printf(" | Phase: %s", token.Phase)
}
fmt.Println()
fmt.Printf(" Location: %s:%d\n", token.FilePath, token.LineNumber)
if token.Test != "" {
fmt.Printf(" Test: %s\n", token.Test)
}
if token.Owner != "" {
fmt.Printf(" Owner: %s\n", token.Owner)
}
if token.Keywords != "" {
fmt.Printf(" Keywords: %s\n", token.Keywords)
}
fmt.Println()
}
return nil
},
}
// CANARY: REQ=CBIN-126; FEATURE="SearchCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var searchCmd = &cobra.Command{
Use: "search <keywords>",
Short: "Search CANARY tokens by keywords",
Long: `Search tokens by keywords in feature names, requirement IDs, and keyword tags.
Keywords are matched case-insensitively using LIKE queries.`,
Args: cobra.MinimumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
jsonOutput, _ := cmd.Flags().GetBool("json")
keywords := strings.Join(args, " ")
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
tokens, err := db.SearchTokens(keywords)
if err != nil {
return fmt.Errorf("search tokens: %w", err)
}
if len(tokens) == 0 {
fmt.Printf("No tokens found for: %s\n", keywords)
return nil
}
if jsonOutput {
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
return enc.Encode(tokens)
}
fmt.Printf("Search results for '%s' (%d tokens):\n\n", keywords, len(tokens))
for _, token := range tokens {
fmt.Printf("π %s - %s\n", token.ReqID, token.Feature)
fmt.Printf(" Status: %s | Priority: %d | %s:%d\n",
token.Status, token.Priority, token.FilePath, token.LineNumber)
if token.Keywords != "" {
fmt.Printf(" Tags: %s\n", token.Keywords)
}
fmt.Println()
}
return nil
},
}
// CANARY: REQ=CBIN-127; FEATURE="PrioritizeCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var prioritizeCmd = &cobra.Command{
Use: "prioritize <REQ-ID> <feature> <priority>",
Short: "Update priority of a CANARY token",
Long: `Update the priority of a specific token (1=highest, 10=lowest).
Priority affects ordering in list and search results.`,
Args: cobra.ExactArgs(3),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
reqID := args[0]
feature := args[1]
priority, err := strconv.Atoi(args[2])
if err != nil {
return fmt.Errorf("invalid priority: %s (must be 1-10)", args[2])
}
if priority < 1 || priority > 10 {
return fmt.Errorf("priority must be between 1 (highest) and 10 (lowest)")
}
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
if err := db.UpdatePriority(reqID, feature, priority); err != nil {
return fmt.Errorf("update priority: %w", err)
}
fmt.Printf("β
Updated priority for %s/%s to %d\n", reqID, feature, priority)
return nil
},
}
// CANARY: REQ=CBIN-128; FEATURE="CheckpointCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var checkpointCmd = &cobra.Command{
Use: "checkpoint <name> [description]",
Short: "Create a state snapshot checkpoint",
Long: `Create a checkpoint to capture current state of all tokens.
Checkpoints include:
- Counts by status (STUB, IMPL, TESTED, BENCHED)
- Commit hash and timestamp
- Full JSON snapshot of all tokens
Useful for tracking progress over time.`,
Args: cobra.MinimumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
name := args[0]
description := ""
if len(args) > 1 {
description = strings.Join(args[1:], " ")
}
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Get current commit hash
commitHash := ""
if gitCmd := exec.Command("git", "rev-parse", "HEAD"); gitCmd.Dir == "" {
if output, err := gitCmd.Output(); err == nil {
commitHash = strings.TrimSpace(string(output))
}
}
// Load project config for ID pattern filtering
cfg, _ := loadProjectConfig()
idPattern := ""
if cfg != nil && cfg.Requirements.IDPattern != "" {
idPattern = cfg.Requirements.IDPattern
}
// Get all tokens for snapshot
tokens, err := db.ListTokens(nil, idPattern, "", 0)
if err != nil {
return fmt.Errorf("get tokens: %w", err)
}
snapshotJSON, err := json.Marshal(tokens)
if err != nil {
return fmt.Errorf("marshal snapshot: %w", err)
}
if err := db.CreateCheckpoint(name, description, commitHash, string(snapshotJSON)); err != nil {
return fmt.Errorf("create checkpoint: %w", err)
}
fmt.Printf("β
Created checkpoint: %s\n", name)
if commitHash != "" {
fmt.Printf("Commit: %s\n", commitHash[:8])
}
fmt.Printf("Tokens: %d\n", len(tokens))
return nil
},
}
// CANARY: REQ=CBIN-129; FEATURE="MigrateCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var migrateCmd = &cobra.Command{
Use: "migrate <steps>",
Short: "Run database migrations",
Long: `Apply database migrations to the CANARY database.
Steps can be:
- "all" to migrate to the latest version
- A positive integer to migrate forward by that many steps
- A negative integer to roll back by that many steps`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
steps := args[0]
fmt.Printf("Running migrations on: %s\n", dbPath)
if err := storage.MigrateDB(dbPath, steps); err != nil {
return fmt.Errorf("migration failed: %w", err)
}
fmt.Println("β
Migrations completed successfully")
return nil
},
}
// CANARY: REQ=CBIN-129; FEATURE="RollbackCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var rollbackCmd = &cobra.Command{
Use: "rollback <steps>",
Short: "Roll back database migrations",
Long: `Roll back database migrations.
Steps can be:
- "all" to roll back all migrations
- A positive integer to roll back by that many steps`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
steps := args[0]
fmt.Printf("Rolling back migrations on: %s\n", dbPath)
if err := storage.TeardownDB(dbPath, steps); err != nil {
return fmt.Errorf("rollback failed: %w", err)
}
fmt.Println("β
Rollback completed successfully")
return nil
},
}
// CANARY: REQ=CBIN-131; FEATURE="DetectCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var detectCmd = &cobra.Command{
Use: "detect [directory]",
Short: "Detect what type of system exists (spec-kit or legacy canary)",
Long: `Analyze a directory to determine if it contains a spec-kit or legacy canary system.
This helps determine the best migration strategy.`,
RunE: func(cmd *cobra.Command, args []string) error {
rootDir := "."
if len(args) > 0 {
rootDir = args[0]
}
systemType, details := migrate.DetectSystemType(rootDir)
fmt.Printf("π Analyzing: %s\n\n", rootDir)
fmt.Printf("System Type: %s\n", systemType)
fmt.Printf("Details: %s\n\n", details)
if systemType == migrate.SystemTypeUnknown {
fmt.Println("No recognized system found. You can initialize a new CANARY project with:")
fmt.Println(" canary init")
return nil
}
if systemType == migrate.SystemTypeMigrated {
fmt.Println("β
This system is already using the unified CANARY system!")
fmt.Println("\nAvailable commands:")
fmt.Println(" canary index # Build/rebuild token database")
fmt.Println(" canary list # List tokens")
fmt.Println(" canary scan # Scan for CANARY tokens")
fmt.Println(" canary implement # Show implementation locations")
return nil
}
fmt.Println("To migrate this system, run:")
fmt.Printf(" canary migrate-from %s\n", systemType)
fmt.Println("\nFor a dry run (preview changes):")
fmt.Printf(" canary migrate-from %s --dry-run\n", systemType)
return nil
},
}
// CANARY: REQ=CBIN-132; FEATURE="NextCmd"; ASPECT=CLI; STATUS=BENCHED; TEST=TestCANARY_CBIN_132_CLI_NextPrioritySelection; BENCH=BenchmarkCANARY_CBIN_132_CLI_PriorityQuery; OWNER=canary; DOC=user:docs/user/next-priority-guide.md; DOC_HASH=17524f7a14d2c410; UPDATED=2025-10-17
var nextCmd = &cobra.Command{
Use: "next [flags]",
Short: "Identify and implement next highest priority requirement",
Long: `Identify the next highest priority unimplemented requirement and generate
comprehensive implementation guidance.
This command automatically:
- Queries database or scans filesystem for CANARY tokens
- Identifies highest priority STUB or IMPL requirement
- Excludes hidden requirements (test files, templates, examples)
- Verifies dependencies are satisfied
- Generates comprehensive implementation prompt with:
- Specification details
- Constitutional principles
- Test-first guidance
- Token placement examples
Priority determination factors:
1. PRIORITY field (1=highest, 10=lowest)
2. STATUS (STUB > IMPL > TESTED)
3. DEPENDS_ON (dependencies must be TESTED/BENCHED)
4. UPDATED field (older tokens get priority boost)`,
RunE: func(cmd *cobra.Command, args []string) error {
dbPath, _ := cmd.Flags().GetString("db")
promptFlag, _ := cmd.Flags().GetBool("prompt")
jsonOutput, _ := cmd.Flags().GetBool("json")
dryRun, _ := cmd.Flags().GetBool("dry-run")
filterStatus, _ := cmd.Flags().GetString("status")
filterAspect, _ := cmd.Flags().GetString("aspect")
// Build filters
filters := make(map[string]string)
if filterStatus != "" {
filters["status"] = filterStatus
}
if filterAspect != "" {
filters["aspect"] = filterAspect
}
// Select next priority
token, err := selectNextPriority(dbPath, filters)
if err != nil {
return fmt.Errorf("select next priority: %w", err)
}
if token == nil {
fmt.Println("π All requirements completed! No work available.")
fmt.Println("\nSuggestions:")
fmt.Println(" β’ Run: canary scan --verify GAP_ANALYSIS.md")
fmt.Println(" β’ Review completed requirements")
fmt.Println(" β’ Consider creating new specifications")
return nil
}
if dryRun {
fmt.Printf("Next priority (dry run): %s - %s\n", token.ReqID, token.Feature)
fmt.Printf("Priority: %d | Status: %s | Aspect: %s\n", token.Priority, token.Status, token.Aspect)
fmt.Printf("Location: %s\n", token.FilePath)
return nil
}
// Render prompt
output, err := renderPrompt(token, promptFlag)
if err != nil {
return fmt.Errorf("render prompt: %w", err)
}
if jsonOutput {
// TODO: Implement JSON output format
fmt.Println("{\"error\": \"JSON output not yet implemented\"}")
return nil
}
fmt.Println(output)
return nil
},
}
// CANARY: REQ=CBIN-131; FEATURE="MigrateFromCmd"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
var migrateFromCmd = &cobra.Command{
Use: "migrate-from <system-type> [directory]",
Short: "Migrate from spec-kit or legacy canary to unified canary system",
Long: `Migrate an existing spec-kit or legacy canary project to the new unified system.
System types:
spec-kit - Migrate from spec-kit specification system
legacy-canary - Migrate from legacy CANARY token system
The migration will:
- Create .canary/ directory structure
- Copy/merge existing templates and configurations
- Preserve existing CANARY tokens and documentation
- Create missing files (constitution, slash commands, etc.)
Use --dry-run to preview changes before applying them.`,
Args: cobra.RangeArgs(1, 2),
RunE: func(cmd *cobra.Command, args []string) error {
systemTypeStr := args[0]
rootDir := "."
if len(args) > 1 {
rootDir = args[1]
}
dryRun, _ := cmd.Flags().GetBool("dry-run")
force, _ := cmd.Flags().GetBool("force")
// Parse system type
var systemType migrate.SystemType
switch systemTypeStr {
case "spec-kit":
systemType = migrate.SystemTypeSpecKit
case "legacy-canary":
systemType = migrate.SystemTypeLegacyCanary
default:
return fmt.Errorf("unsupported system type: %s (use 'spec-kit' or 'legacy-canary')", systemTypeStr)
}
// Detect actual system type
detectedType, details := migrate.DetectSystemType(rootDir)
// Check if already migrated
if detectedType == migrate.SystemTypeMigrated {
fmt.Printf("β
System already migrated!\n\n")
fmt.Printf("Details: %s\n\n", details)
fmt.Println("This system is already using the unified CANARY system.")
fmt.Println("No migration needed.")
fmt.Println("\nAvailable commands:")
fmt.Println(" canary index # Build/rebuild token database")
fmt.Println(" canary list # List tokens")
fmt.Println(" canary scan # Scan for CANARY tokens")
fmt.Println(" canary implement # Show implementation locations")
return nil
}
if !force && detectedType != systemType {
if detectedType == migrate.SystemTypeUnknown {
fmt.Printf("β οΈ Warning: No %s system detected in %s\n", systemType, rootDir)
fmt.Printf("Details: %s\n", details)
fmt.Println("\nUse --force to proceed anyway, or run 'canary detect' to identify the system type.")
return fmt.Errorf("system type mismatch")
}
fmt.Printf("β οΈ Warning: Detected %s but trying to migrate as %s\n", detectedType, systemType)
fmt.Println("Use --force to override detection, or specify the correct system type.")
return fmt.Errorf("system type mismatch")
}
// Create migration plan
fmt.Printf("π Planning migration from %s...\n\n", systemType)
plan, err := migrate.PlanMigration(rootDir, systemType, dryRun)
if err != nil {
return fmt.Errorf("failed to create migration plan: %w", err)
}
// Show summary
fmt.Println(migrate.GetMigrationSummary(plan))
// Execute migration
fmt.Printf("\nπ Executing migration...\n\n")
if err := migrate.ExecuteMigration(rootDir, plan, dryRun); err != nil {
return fmt.Errorf("migration failed: %w", err)
}
if dryRun {
fmt.Println("\nβ
Dry run complete - no changes were made")
fmt.Println("Run without --dry-run to apply changes")
} else {
fmt.Println("\nβ
Migration complete!")
fmt.Println("\nNext steps:")
fmt.Println(" 1. Review migrated files in .canary/")
fmt.Println(" 2. Update slash commands in .canary/templates/commands/ for your workflow")
fmt.Println(" 3. Run: canary index")
fmt.Println(" 4. Run: canary scan --root . --out status.json")
}
return nil
},
}
func init() {
// Configure slog to use ERROR level by default to reduce noise
opts := &slog.HandlerOptions{
Level: slog.LevelError,
}
handler := slog.NewTextHandler(os.Stderr, opts)
slog.SetDefault(slog.New(handler))
rootCmd.AddCommand(scanCmd)
rootCmd.AddCommand(initCmd)
rootCmd.AddCommand(createCmd)
rootCmd.AddCommand(constitutionCmd)
rootCmd.AddCommand(specifyCmd)
rootCmd.AddCommand(planCmd)
rootCmd.AddCommand(implementCmd)
rootCmd.AddCommand(nextCmd)
rootCmd.AddCommand(indexCmd)
rootCmd.AddCommand(listCmd)
rootCmd.AddCommand(searchCmd)
rootCmd.AddCommand(prioritizeCmd)
rootCmd.AddCommand(checkpointCmd)
rootCmd.AddCommand(migrateCmd)
rootCmd.AddCommand(rollbackCmd)
rootCmd.AddCommand(detectCmd)
rootCmd.AddCommand(migrateFromCmd)
rootCmd.AddCommand(orphanCmd)
rootCmd.AddCommand(docCmd)
// CANARY: REQ=CBIN-CLI-001; FEATURE="ShowCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_CLI_ShowCmd; UPDATED=2025-10-16
rootCmd.AddCommand(showCmd)
// CANARY: REQ=CBIN-CLI-001; FEATURE="FilesCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_CLI_FilesCmd; UPDATED=2025-10-16
rootCmd.AddCommand(filesCmd)
// CANARY: REQ=CBIN-CLI-001; FEATURE="StatusCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_CLI_StatusCmd; UPDATED=2025-10-16
rootCmd.AddCommand(statusCmd)
// CANARY: REQ=CBIN-CLI-001; FEATURE="GrepCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_CLI_GrepCmd; UPDATED=2025-10-16
rootCmd.AddCommand(grepCmd)
// CANARY: REQ=CBIN-147; FEATURE="DepsParentCommand"; ASPECT=CLI; STATUS=TESTED; TEST=TestDepsParentCommand; UPDATED=2025-10-18
rootCmd.AddCommand(createDepsCommand())
// CANARY: REQ=CBIN-140; FEATURE="GapCmd"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
rootCmd.AddCommand(gapCmd)
// CANARY: REQ=CBIN-145; FEATURE="SpecsCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_145_CLI_SpecsCmd; UPDATED=2025-10-17
rootCmd.AddCommand(specsCmd)
// Bug tracking command for managing BUG-* CANARY tokens
rootCmd.AddCommand(bugCmd)
// initCmd flags
initCmd.Flags().Bool("local", false, "install commands locally in project directory (default: global in home directory)")
initCmd.Flags().StringSlice("agents", []string{}, "comma-separated list of agents to install for (claude,cursor,copilot,windsurf,kilocode,roo,opencode,codex,auggie,codebuddy,amazonq)")
initCmd.Flags().Bool("all-agents", false, "install commands for all supported agents")
initCmd.Flags().String("key", "", "project requirement ID prefix (e.g., CBIN, PROJ, ACME)")
initCmd.Flags().String("agent-prefix", "", "agent name prefix for CANARY agents (default: project key)")
initCmd.Flags().String("agent-model", "sonnet", "AI model for CANARY agents")
initCmd.Flags().String("agent-color", "blue", "color for CANARY agents")
// specifyCmd flags
specifyCmd.Flags().String("aspect", "Engine", "requirement aspect (API, CLI, Engine, Storage, Security, Docs, Wire, Planner, Decode, Encode, RoundTrip, Bench, FrontEnd, Dist)")
// planCmd flags
planCmd.Flags().String("aspect", "", "requirement aspect for template substitution (API, CLI, Engine, Storage, Security, Docs, Wire, Planner, Decode, Encode, RoundTrip, Bench, FrontEnd, Dist)")
// createCmd flags
createCmd.Flags().String("aspect", "API", "requirement aspect/category")
createCmd.Flags().String("status", "IMPL", "implementation status")
createCmd.Flags().String("owner", "", "team/person responsible")
createCmd.Flags().String("test", "", "test function name")
createCmd.Flags().String("bench", "", "benchmark function name")
// implementCmd flags
implementCmd.Flags().Bool("list", false, "list all unimplemented requirements")
implementCmd.Flags().Bool("prompt", true, "generate full implementation prompt (default: true)")
// indexCmd flags
indexCmd.Flags().String("db", ".canary/canary.db", "path to database file")
indexCmd.Flags().String("root", ".", "root directory to scan")
// listCmd flags
listCmd.Flags().String("db", ".canary/canary.db", "path to database file")
listCmd.Flags().String("status", "", "filter by status (STUB, IMPL, TESTED, BENCHED)")
listCmd.Flags().String("aspect", "", "filter by aspect (API, CLI, Engine, etc.)")
listCmd.Flags().String("phase", "", "filter by phase (Phase0, Phase1, Phase2, Phase3)")
listCmd.Flags().String("owner", "", "filter by owner")
listCmd.Flags().String("spec-status", "", "filter by spec status (draft, approved, in-progress, completed, archived)")
listCmd.Flags().Int("priority-min", 0, "filter by minimum priority (0 = no minimum)")
listCmd.Flags().Int("priority-max", 0, "filter by maximum priority (0 = no maximum)")
listCmd.Flags().String("order-by", "", "custom ORDER BY clause (default: priority ASC, updated_at DESC)")
listCmd.Flags().Int("limit", 0, "maximum number of results (0 = no limit)")
listCmd.Flags().Bool("json", false, "output as JSON")
listCmd.Flags().Bool("include-hidden", false, "include hidden requirements (test files, templates, examples)")
// searchCmd flags
searchCmd.Flags().String("db", ".canary/canary.db", "path to database file")
searchCmd.Flags().Bool("json", false, "output as JSON")
// prioritizeCmd flags
prioritizeCmd.Flags().String("db", ".canary/canary.db", "path to database file")
// checkpointCmd flags
checkpointCmd.Flags().String("db", ".canary/canary.db", "path to database file")
// migrateCmd flags
migrateCmd.Flags().String("db", ".canary/canary.db", "path to database file")
// rollbackCmd flags
rollbackCmd.Flags().String("db", ".canary/canary.db", "path to database file")
// migrateFromCmd flags
migrateFromCmd.Flags().Bool("dry-run", false, "preview changes without applying them")
migrateFromCmd.Flags().Bool("force", false, "force migration even if system type doesn't match detection")
// scanCmd flags
scanCmd.Flags().String("root", ".", "root directory to scan")
scanCmd.Flags().String("out", "status.json", "output status.json path")
scanCmd.Flags().String("csv", "", "optional status.csv path")
scanCmd.Flags().String("verify", "", "GAP_ANALYSIS file to verify claims")
scanCmd.Flags().Bool("strict", false, "enforce staleness on TESTED/BENCHED tokens (30 days)")
scanCmd.Flags().Bool("update-stale", false, "rewrite UPDATED field for stale tokens")
scanCmd.Flags().String("skip", "", "skip path regex (RE2)")
scanCmd.Flags().Bool("project-only", false, "filter by project requirement ID pattern")
// nextCmd flags
nextCmd.Flags().String("db", ".canary/canary.db", "path to database file")
nextCmd.Flags().Bool("prompt", false, "generate full implementation prompt (default: summary only)")
nextCmd.Flags().Bool("json", false, "output in JSON format")
nextCmd.Flags().Bool("dry-run", false, "show what would be selected without generating prompt")
nextCmd.Flags().String("status", "", "filter by status (STUB, IMPL, TESTED, BENCHED)")
nextCmd.Flags().String("aspect", "", "filter by aspect (API, CLI, Engine, Storage, etc.)")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-145; FEATURE="MigrateCommand"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
package main
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/migrate"
"go.devnw.com/canary/internal/storage"
)
// CANARY: REQ=CBIN-145; FEATURE="MigrateParentCommand"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var orphanCmd = &cobra.Command{
Use: "orphan",
Short: "Manage orphaned CANARY tokens (tokens without specifications)",
Long: `Generate specifications and plans for requirements with orphaned tokens.
Orphaned requirements are those that have CANARY tokens in the codebase but no
formal specification file in .canary/specs/. This typically happens when migrating
legacy code or when tokens were added without following the requirement-first workflow.
The orphan command:
1. Detects requirements with tokens but no specs
2. Generates spec.md from existing token metadata
3. Generates plan.md reflecting current implementation state
4. Filters out documentation/example tokens automatically`,
Example: ` # Detect orphaned requirements
canary orphan detect
# Migrate a specific requirement
canary orphan run CBIN-105
# Migrate all orphaned requirements
canary orphan run --all
# Preview migration without creating files
canary orphan run --all --dry-run`,
}
// CANARY: REQ=CBIN-145; FEATURE="MigrateDetectCommand"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var migrateDetectCmd = &cobra.Command{
Use: "detect",
Short: "Detect orphaned requirements",
Long: `List all requirements that have CANARY tokens but no specification files.
This command scans the database for tokens and checks if corresponding spec.md
files exist in .canary/specs/. Tokens in documentation directories are automatically
excluded.`,
Example: ` canary migrate detect
canary migrate detect --show-features`,
RunE: func(cmd *cobra.Command, args []string) error {
dbPath := cmd.Flag("db").Value.String()
rootDir, _ := cmd.Flags().GetString("root")
showFeatures, _ := cmd.Flags().GetBool("show-features")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("failed to open database: %w", err)
}
defer db.Close()
// Detect orphans with path filtering
excludePaths := []string{"/docs/", "/.claude/", "/.cursor/", "/.canary/specs/"}
orphans, err := migrate.DetectOrphans(db, rootDir, excludePaths)
if err != nil {
return fmt.Errorf("failed to detect orphans: %w", err)
}
if len(orphans) == 0 {
fmt.Println("β
No orphaned requirements found!")
fmt.Println("All CANARY tokens have corresponding specifications.")
return nil
}
fmt.Printf("π Found %d orphaned requirement(s):\n\n", len(orphans))
for _, orphan := range orphans {
confidenceEmoji := "π’"
if orphan.Confidence == migrate.ConfidenceMedium {
confidenceEmoji = "π‘"
} else if orphan.Confidence == migrate.ConfidenceLow {
confidenceEmoji = "π΄"
}
fmt.Printf("%s %s (Confidence: %s)\n", confidenceEmoji, orphan.ReqID, orphan.Confidence)
fmt.Printf(" Features: %d\n", orphan.FeatureCount)
if showFeatures {
for _, token := range orphan.Features {
fmt.Printf(" - %s (%s, %s) at %s:%d\n",
token.Feature, token.Aspect, token.Status, token.FilePath, token.LineNumber)
}
}
fmt.Println()
}
fmt.Printf("π‘ To migrate these requirements:\n")
fmt.Printf(" Single: canary migrate <REQ-ID>\n")
fmt.Printf(" All: canary migrate --all\n")
fmt.Printf(" Preview: canary migrate --all --dry-run\n")
return nil
},
}
// CANARY: REQ=CBIN-145; FEATURE="MigrateRunCommand"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-17
var migrateRunCmd = &cobra.Command{
Use: "run [REQ-ID]",
Short: "Migrate orphaned requirements to specifications",
Long: `Generate specification and plan files for orphaned requirements.
For each orphaned requirement:
1. Creates .canary/specs/REQ-ID-name/ directory
2. Generates spec.md from existing tokens
3. Generates plan.md reflecting implementation status
4. Marks requirement as having a specification
Use --all to migrate all orphaned requirements at once.
Use --dry-run to preview changes without creating files.`,
Example: ` # Migrate single requirement
canary migrate run CBIN-105
# Migrate all orphaned requirements
canary migrate run --all
# Preview without creating files
canary migrate run --all --dry-run`,
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
dbPath := cmd.Flag("db").Value.String()
rootDir, _ := cmd.Flags().GetString("root")
migrateAll, _ := cmd.Flags().GetBool("all")
dryRun, _ := cmd.Flags().GetBool("dry-run")
// Validate arguments
if len(args) == 0 && !migrateAll {
return fmt.Errorf("provide REQ-ID or use --all flag")
}
if len(args) > 0 && migrateAll {
return fmt.Errorf("cannot specify REQ-ID with --all flag")
}
// Open database
db, err := storage.Open(dbPath)
if err != nil {
return fmt.Errorf("failed to open database: %w", err)
}
defer db.Close()
// Detect orphans
excludePaths := []string{"/docs/", "/.claude/", "/.cursor/", "/.canary/specs/"}
orphans, err := migrate.DetectOrphans(db, rootDir, excludePaths)
if err != nil {
return fmt.Errorf("failed to detect orphans: %w", err)
}
if len(orphans) == 0 {
fmt.Println("β
No orphaned requirements to migrate!")
return nil
}
// Filter to specific requirement if provided
var toMigrate []*migrate.OrphanedRequirement
if len(args) == 1 {
reqID := strings.ToUpper(args[0])
found := false
for _, orphan := range orphans {
if orphan.ReqID == reqID {
toMigrate = append(toMigrate, orphan)
found = true
break
}
}
if !found {
return fmt.Errorf("requirement %s is not orphaned or does not exist", reqID)
}
} else {
toMigrate = orphans
}
// Dry run mode
if dryRun {
fmt.Printf("π Dry run: would migrate %d requirement(s)\n\n", len(toMigrate))
for _, orphan := range toMigrate {
dirName := orphan.ReqID + "-" + slugify(orphan.Features[0].Feature)
fmt.Printf("Would create:\n")
fmt.Printf(" π .canary/specs/%s/\n", dirName)
fmt.Printf(" π .canary/specs/%s/spec.md (Confidence: %s)\n", dirName, orphan.Confidence)
fmt.Printf(" π .canary/specs/%s/plan.md\n", dirName)
fmt.Println()
}
fmt.Println("β
Dry run complete (no files created)")
return nil
}
// Perform migration
specsDir := filepath.Join(rootDir, ".canary", "specs")
if err := os.MkdirAll(specsDir, 0755); err != nil {
return fmt.Errorf("failed to create specs directory: %w", err)
}
migratedCount := 0
for _, orphan := range toMigrate {
fmt.Printf("π Migrating %s...\n", orphan.ReqID)
// Generate spec
specContent, err := migrate.GenerateSpec(orphan)
if err != nil {
fmt.Printf("β οΈ Failed to generate spec for %s: %v\n", orphan.ReqID, err)
continue
}
// Generate plan
planContent, err := migrate.GeneratePlan(orphan)
if err != nil {
fmt.Printf("β οΈ Failed to generate plan for %s: %v\n", orphan.ReqID, err)
continue
}
// Create directory
primaryFeature := orphan.Features[0].Feature
dirName := orphan.ReqID + "-" + slugify(primaryFeature)
specDir := filepath.Join(specsDir, dirName)
if err := os.MkdirAll(specDir, 0755); err != nil {
fmt.Printf("β οΈ Failed to create directory for %s: %v\n", orphan.ReqID, err)
continue
}
// Write spec
specPath := filepath.Join(specDir, "spec.md")
if err := os.WriteFile(specPath, []byte(specContent), 0644); err != nil {
fmt.Printf("β οΈ Failed to write spec for %s: %v\n", orphan.ReqID, err)
continue
}
// Write plan
planPath := filepath.Join(specDir, "plan.md")
if err := os.WriteFile(planPath, []byte(planContent), 0644); err != nil {
fmt.Printf("β οΈ Failed to write plan for %s: %v\n", orphan.ReqID, err)
continue
}
fmt.Printf("β
Migrated %s (Confidence: %s)\n", orphan.ReqID, orphan.Confidence)
fmt.Printf(" π %s\n", specPath)
fmt.Printf(" π %s\n", planPath)
if orphan.Confidence == migrate.ConfidenceLow {
fmt.Printf(" β οΈ Low confidence - please review and update manually\n")
}
fmt.Println()
migratedCount++
}
// Summary
fmt.Printf("\nβ
Successfully migrated %d requirement(s)\n", migratedCount)
if migratedCount > 0 {
fmt.Println("\nπ‘ Next steps:")
fmt.Println(" 1. Review generated specifications for accuracy")
fmt.Println(" 2. Update spec.md files with detailed requirements")
fmt.Println(" 3. Update plan.md files with implementation details")
fmt.Println(" 4. Run 'canary scan' to reindex the database")
}
return nil
},
}
// slugify converts a string to a slug (lowercase, alphanumeric + hyphens)
func slugify(s string) string {
result := ""
for _, c := range s {
if (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') {
result += string(c)
} else if c == ' ' || c == '-' || c == '_' {
result += "-"
}
}
// Convert to lowercase
lower := ""
for _, c := range result {
if c >= 'A' && c <= 'Z' {
lower += string(c + 32)
} else {
lower += string(c)
}
}
// Limit length
if len(lower) > 40 {
return lower[:40]
}
return lower
}
func init() {
// Add subcommands
orphanCmd.AddCommand(migrateDetectCmd)
orphanCmd.AddCommand(migrateRunCmd)
// Global flags
migrateDetectCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
migrateDetectCmd.Flags().String("root", ".", "Root directory for project")
migrateDetectCmd.Flags().Bool("show-features", false, "Show feature details for each orphan")
migrateRunCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
migrateRunCmd.Flags().String("root", ".", "Root directory for project")
migrateRunCmd.Flags().Bool("all", false, "Migrate all orphaned requirements")
migrateRunCmd.Flags().Bool("dry-run", false, "Preview migration without creating files")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-132; FEATURE="NextPriorityCommand"; ASPECT=CLI; STATUS=BENCHED; TEST=TestCANARY_CBIN_132_CLI_NextPrioritySelection; BENCH=BenchmarkCANARY_CBIN_132_CLI_PriorityQuery; OWNER=canary; UPDATED=2025-10-16
package main
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"regexp"
"strings"
"text/template"
"time"
"go.devnw.com/canary/internal/config"
"go.devnw.com/canary/internal/storage"
)
// PromptData holds template variables for prompt generation
type PromptData struct {
ReqID string
Feature string
Aspect string
Status string
Priority int
SpecFile string
SpecContent string
Constitution string
RelatedSpecs []RelatedSpec
Dependencies []*storage.Token
SuggestedFiles []string
TestGuidance string
TokenExample string
SuccessCriteria []string
Today string
SuggestedTestFile string
PackageName string
}
// RelatedSpec represents a related specification reference
type RelatedSpec struct {
ReqID string
Feature string
SpecFile string
}
// selectNextPriority identifies the highest priority unimplemented requirement
// Uses database if available, falls back to filesystem scan
func selectNextPriority(dbPath string, filters map[string]string) (*storage.Token, error) {
// Check if database file exists
if _, err := os.Stat(dbPath); os.IsNotExist(err) {
// Fall back to filesystem scan if database doesn't exist
return selectFromFilesystem(filters)
}
// Try database first
db, err := storage.Open(dbPath)
if err != nil {
// Fall back to filesystem scan if database unavailable
return selectFromFilesystem(filters)
}
defer db.Close()
return selectFromDatabase(db, filters)
}
// selectFromDatabase queries the database for next priority
func selectFromDatabase(db *storage.DB, filters map[string]string) (*storage.Token, error) {
// Build filters for incomplete requirements
if filters == nil {
filters = make(map[string]string)
}
// Load project config for ID pattern filtering
cfg, _ := config.Load(".")
idPattern := ""
if cfg != nil && cfg.Requirements.IDPattern != "" {
idPattern = cfg.Requirements.IDPattern
}
// If no status filter, only select STUB or IMPL by default
if _, hasStatusFilter := filters["status"]; !hasStatusFilter {
// Query separately for STUB and IMPL, prioritizing STUB
stubFilters := make(map[string]string)
for k, v := range filters {
stubFilters[k] = v
}
stubFilters["status"] = "STUB"
// Try STUB first
tokens, err := db.ListTokens(stubFilters, idPattern, "priority ASC, updated_at DESC", 50)
if err != nil {
return nil, fmt.Errorf("query STUB tokens: %w", err)
}
// Filter out blocked tokens
for _, token := range tokens {
if !hasUnresolvedDependencies(db, token) {
return token, nil
}
}
// Try IMPL if no STUB available
implFilters := make(map[string]string)
for k, v := range filters {
implFilters[k] = v
}
implFilters["status"] = "IMPL"
tokens, err = db.ListTokens(implFilters, idPattern, "priority ASC, updated_at DESC", 50)
if err != nil {
return nil, fmt.Errorf("query IMPL tokens: %w", err)
}
for _, token := range tokens {
if !hasUnresolvedDependencies(db, token) {
return token, nil
}
}
return nil, nil // No work available
}
// Use provided filters
tokens, err := db.ListTokens(filters, idPattern, "priority ASC, updated_at DESC", 50)
if err != nil {
return nil, fmt.Errorf("query tokens: %w", err)
}
// Find first unblocked token
for _, token := range tokens {
if !hasUnresolvedDependencies(db, token) {
return token, nil
}
}
return nil, nil // No unblocked work available
}
// hasUnresolvedDependencies checks if a token has blocking dependencies
func hasUnresolvedDependencies(db *storage.DB, token *storage.Token) bool {
if token.DependsOn == "" {
return false
}
// Parse comma-separated dependencies
deps := strings.Split(token.DependsOn, ",")
for _, dep := range deps {
dep = strings.TrimSpace(dep)
if dep == "" {
continue
}
// Query dependency status
depTokens, err := db.GetTokensByReqID(dep)
if err != nil || len(depTokens) == 0 {
return true // Dependency not found = blocking
}
// Check if any token for this requirement is incomplete
allComplete := true
for _, depToken := range depTokens {
if depToken.Status != "TESTED" && depToken.Status != "BENCHED" {
allComplete = false
break
}
}
if !allComplete {
return true // Dependency incomplete = blocking
}
}
return false
}
// isHiddenPath determines if a token should be hidden based on its file path
func isHiddenPath(filePath string) bool {
hiddenPatterns := []string{
// Test files
"_test.go", "Test.", "/tests/", "/test/",
// Template directories
".canary/templates/", "/templates/", "/base/", "/embedded/",
// Documentation examples
"IMPLEMENTATION_SUMMARY", "FINAL_SUMMARY", "README_CANARY.md", "GAP_ANALYSIS.md",
// AI agent directories
".claude/", ".cursor/", ".github/prompts/", ".windsurf/", ".kilocode/",
".roo/", ".opencode/", ".codex/", ".augment/", ".codebuddy/", ".amazonq/",
}
for _, pattern := range hiddenPatterns {
if strings.Contains(filePath, pattern) {
return true
}
}
return false
}
// selectFromFilesystem scans filesystem for CANARY tokens when database unavailable
func selectFromFilesystem(filters map[string]string) (*storage.Token, error) {
// Use grep to find all CANARY tokens
grepCmd := exec.Command("grep",
"-rn",
"--include=*.go", "--include=*.md", "--include=*.py",
"--include=*.js", "--include=*.ts", "--include=*.java",
"--include=*.rb", "--include=*.rs",
"CANARY:",
".",
)
output, err := grepCmd.CombinedOutput()
if err != nil && len(output) == 0 {
return nil, nil // No tokens found
}
// Parse tokens from grep output
var candidates []*storage.Token
lines := strings.Split(string(output), "\n")
for _, line := range lines {
if line == "" {
continue
}
// Parse grep output: file:line:content
parts := strings.SplitN(line, ":", 3)
if len(parts) < 3 {
continue
}
file := parts[0]
content := parts[2]
// Extract CANARY fields
reqID := extractField(content, "REQ")
feature := extractField(content, "FEATURE")
aspect := extractField(content, "ASPECT")
status := extractField(content, "STATUS")
priorityStr := extractField(content, "PRIORITY")
if reqID == "" || feature == "" {
continue
}
// Apply filters
if filterStatus, ok := filters["status"]; ok && status != filterStatus {
continue
}
if filterAspect, ok := filters["aspect"]; ok && aspect != filterAspect {
continue
}
// Parse priority
priority := 5 // default
if priorityStr != "" {
//nolint:errcheck // Best-effort parse, default to 5 on failure
fmt.Sscanf(priorityStr, "%d", &priority)
}
// Only include STUB or IMPL unless filtered
if _, hasFilter := filters["status"]; !hasFilter {
if status != "STUB" && status != "IMPL" {
continue
}
}
// Skip hidden paths unless include_hidden is set
if includeHidden, ok := filters["include_hidden"]; !ok || includeHidden != "true" {
if isHiddenPath(file) {
continue
}
}
token := &storage.Token{
ReqID: reqID,
Feature: feature,
Aspect: aspect,
Status: status,
Priority: priority,
FilePath: file,
RawToken: content,
}
candidates = append(candidates, token)
}
if len(candidates) == 0 {
return nil, nil
}
// Sort by priority (1=highest), then by status (STUB > IMPL)
var best *storage.Token
for _, candidate := range candidates {
if best == nil {
best = candidate
continue
}
// Prefer higher priority (lower number)
if candidate.Priority < best.Priority {
best = candidate
continue
}
if candidate.Priority > best.Priority {
continue
}
// Same priority: prefer STUB over IMPL
if candidate.Status == "STUB" && best.Status == "IMPL" {
best = candidate
}
}
return best, nil
}
// renderPrompt generates implementation prompt from template
func renderPrompt(token *storage.Token, promptFlag bool) (string, error) {
if !promptFlag {
// Simple summary output
return fmt.Sprintf("Next: %s - %s (Priority: %d, Status: %s)\n"+
"Run with --prompt for full implementation guidance.",
token.ReqID, token.Feature, token.Priority, token.Status), nil
}
// Load template
templatePath := ".canary/templates/next-prompt-template.md"
templateContent, err := os.ReadFile(templatePath)
if err != nil {
return "", fmt.Errorf("read template: %w", err)
}
tmpl, err := template.New("next-prompt").Parse(string(templateContent))
if err != nil {
return "", fmt.Errorf("parse template: %w", err)
}
// Load prompt data
data, err := loadPromptData(token)
if err != nil {
return "", fmt.Errorf("load prompt data: %w", err)
}
// Render template
var buf strings.Builder
if err := tmpl.Execute(&buf, data); err != nil {
return "", fmt.Errorf("execute template: %w", err)
}
return buf.String(), nil
}
// loadPromptData loads all data needed for template rendering
func loadPromptData(token *storage.Token) (*PromptData, error) {
data := &PromptData{
ReqID: token.ReqID,
Feature: token.Feature,
Aspect: token.Aspect,
Status: token.Status,
Priority: token.Priority,
Today: time.Now().UTC().Format("2006-01-02"),
}
// Load specification file
specPattern := fmt.Sprintf(".canary/specs/%s-*/spec.md", token.ReqID)
matches, err := filepath.Glob(specPattern)
if err == nil && len(matches) > 0 {
data.SpecFile = matches[0]
specContent, err := os.ReadFile(matches[0])
if err == nil {
data.SpecContent = string(specContent)
// Extract success criteria from spec
data.SuccessCriteria = extractSuccessCriteria(data.SpecContent)
}
}
// Load constitution
constitutionPath := ".canary/memory/constitution.md"
constitutionContent, err := os.ReadFile(constitutionPath)
if err == nil {
data.Constitution = string(constitutionContent)
}
// Generate suggested files based on aspect
data.SuggestedFiles = suggestFileLocations(token.Aspect)
// Generate test guidance
data.TestGuidance = generateTestGuidance(token)
// Generate token example
data.TokenExample = generateTokenExample(token)
// Determine package name and test file
data.PackageName = guessPackageName(token.Aspect)
data.SuggestedTestFile = fmt.Sprintf("cmd/canary/%s_test.go", strings.ToLower(token.Feature))
// Load dependencies if in database
dbPath := ".canary/canary.db"
if db, err := storage.Open(dbPath); err == nil {
defer db.Close()
if token.DependsOn != "" {
deps := strings.Split(token.DependsOn, ",")
for _, dep := range deps {
dep = strings.TrimSpace(dep)
if dep == "" {
continue
}
depTokens, err := db.GetTokensByReqID(dep)
if err == nil && len(depTokens) > 0 {
data.Dependencies = append(data.Dependencies, depTokens[0])
}
}
}
}
return data, nil
}
// extractSuccessCriteria extracts success criteria from specification
func extractSuccessCriteria(specContent string) []string {
var criteria []string
inSection := false
lines := strings.Split(specContent, "\n")
for _, line := range lines {
line = strings.TrimSpace(line)
// Look for success criteria section
if strings.Contains(strings.ToLower(line), "success criteria") {
inSection = true
continue
}
// Stop at next major section
if inSection && strings.HasPrefix(line, "##") {
break
}
// Extract list items
if inSection && (strings.HasPrefix(line, "-") || strings.HasPrefix(line, "*")) {
criterion := strings.TrimLeft(line, "-* \t")
if criterion != "" {
criteria = append(criteria, criterion)
}
}
}
if len(criteria) == 0 {
criteria = []string{
"Implementation meets specification requirements",
"All tests pass",
"Code follows project conventions",
}
}
return criteria
}
// suggestFileLocations suggests file locations based on aspect
func suggestFileLocations(aspect string) []string {
suggestions := map[string][]string{
"CLI": {"cmd/canary/main.go", "cmd/canary/*.go"},
"API": {"internal/*/api.go", "pkg/*/api.go"},
"Engine": {"internal/engine/*.go", "pkg/engine/*.go"},
"Storage": {"internal/storage/*.go"},
"Security": {"internal/security/*.go", "pkg/security/*.go"},
}
if files, ok := suggestions[aspect]; ok {
return files
}
return []string{"cmd/", "internal/", "pkg/"}
}
// generateTestGuidance creates test-first guidance
func generateTestGuidance(token *storage.Token) string {
return fmt.Sprintf(`Create tests that verify the %s functionality:
- Test happy path with valid inputs
- Test error cases with invalid inputs
- Test edge cases and boundary conditions
- Test integration with existing components
Use table-driven tests where appropriate for multiple scenarios.`, token.Feature)
}
// generateTokenExample creates CANARY token placement example
func generateTokenExample(token *storage.Token) string {
today := time.Now().UTC().Format("2006-01-02")
return fmt.Sprintf(`// CANARY: REQ=%s; FEATURE="%s"; ASPECT=%s; STATUS=STUB; UPDATED=%s
func %s() error {
// TODO: implement
return nil
}`, token.ReqID, token.Feature, token.Aspect, today, token.Feature)
}
// guessPackageName guesses package name from aspect
func guessPackageName(aspect string) string {
names := map[string]string{
"CLI": "main",
"API": "api",
"Engine": "engine",
"Storage": "storage",
"Security": "security",
}
if name, ok := names[aspect]; ok {
return name
}
return "main"
}
// extractField extracts a field value from a CANARY token string (already defined in main.go)
// This is a duplicate for use in next.go - consider moving to shared utility
func extractFieldInternal(token, field string) string {
// Look for FIELD="value" or FIELD=value
pattern := field + `="([^"]+)"`
re := regexp.MustCompile(pattern)
matches := re.FindStringSubmatch(token)
if len(matches) > 1 {
return matches[1]
}
// Try without quotes
pattern = field + `=([^;\s]+)`
re = regexp.MustCompile(pattern)
matches = re.FindStringSubmatch(token)
if len(matches) > 1 {
return matches[1]
}
return ""
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-146; FEATURE="ProjectCLI"; ASPECT=CLI; STATUS=IMPL; UPDATED=2025-10-18
package main
import (
"fmt"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/storage"
)
// dbCmd manages database initialization
var dbCmd = &cobra.Command{
Use: "db",
Short: "Database management commands",
Long: `Manage the CANARY database (global or local modes).`,
}
// dbInitCmd initializes a database
var dbInitCmd = &cobra.Command{
Use: "init",
Short: "Initialize database",
Long: `Initialize the CANARY database in global or local mode.
Modes:
Global (default): ~/.canary/canary.db - shared across all projects
Local (--local): ./.canary/canary.db - project-specific database
Examples:
canary db init # Initialize global database
canary db init --global # Initialize global database (explicit)
canary db init --local # Initialize local database`,
RunE: func(cmd *cobra.Command, args []string) error {
local, _ := cmd.Flags().GetBool("local")
global, _ := cmd.Flags().GetBool("global")
// Determine mode (default to global)
mode := storage.GlobalMode
modeStr := "global"
if local {
mode = storage.LocalMode
modeStr = "local"
} else if global {
mode = storage.GlobalMode
modeStr = "global"
}
// Initialize database
manager := storage.NewDatabaseManager()
if err := manager.Initialize(mode); err != nil {
return fmt.Errorf("initialize database: %w", err)
}
defer manager.Close()
// Success message
fmt.Fprintf(cmd.OutOrStdout(), "β
Initialized %s database at: %s\n", modeStr, manager.Location())
return nil
},
}
// projectCmd manages projects
var projectCmd = &cobra.Command{
Use: "project",
Short: "Project management commands",
Long: `Register, list, remove, and switch between projects.`,
}
// projectRegisterCmd registers a new project
var projectRegisterCmd = &cobra.Command{
Use: "register <name> <path>",
Short: "Register a new project",
Long: `Register a new project in the global database.
The project will be assigned a unique slug-based ID generated from the name.
Examples:
canary project register "My Project" /path/to/project
canary project register Backend ./backend`,
Args: cobra.ExactArgs(2),
RunE: func(cmd *cobra.Command, args []string) error {
name := args[0]
path := args[1]
// Open global database
manager := storage.NewDatabaseManager()
if err := manager.Discover(); err != nil {
return fmt.Errorf("database not found: %w (run 'canary db init' first)", err)
}
defer manager.Close()
// Register project
registry := storage.NewProjectRegistry(manager)
project := &storage.Project{
Name: name,
Path: path,
}
if err := registry.Register(project); err != nil {
return fmt.Errorf("register project: %w", err)
}
fmt.Fprintf(cmd.OutOrStdout(), "β
Registered project: %s (ID: %s)\n", project.Name, project.ID)
fmt.Fprintf(cmd.OutOrStdout(), " Path: %s\n", project.Path)
return nil
},
}
// projectListCmd lists all registered projects
var projectListCmd = &cobra.Command{
Use: "list",
Short: "List all registered projects",
Long: `List all projects registered in the database.`,
RunE: func(cmd *cobra.Command, args []string) error {
// Open database
manager := storage.NewDatabaseManager()
if err := manager.Discover(); err != nil {
return fmt.Errorf("database not found: %w (run 'canary db init' first)", err)
}
defer manager.Close()
// List projects
registry := storage.NewProjectRegistry(manager)
projects, err := registry.List()
if err != nil {
return fmt.Errorf("list projects: %w", err)
}
if len(projects) == 0 {
fmt.Fprintln(cmd.OutOrStdout(), "No projects registered.")
return nil
}
fmt.Fprintf(cmd.OutOrStdout(), "Registered Projects (%d):\n\n", len(projects))
for _, p := range projects {
active := ""
if p.Active {
active = " (active)"
}
fmt.Fprintf(cmd.OutOrStdout(), " %s: %s%s\n", p.ID, p.Name, active)
fmt.Fprintf(cmd.OutOrStdout(), " Path: %s\n", p.Path)
fmt.Fprintln(cmd.OutOrStdout())
}
return nil
},
}
// projectRemoveCmd removes a project
var projectRemoveCmd = &cobra.Command{
Use: "remove <project-id>",
Short: "Remove a project",
Long: `Remove a project from the registry.
Examples:
canary project remove my-project
canary project remove backend-api`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
projectID := args[0]
// Open database
manager := storage.NewDatabaseManager()
if err := manager.Discover(); err != nil {
return fmt.Errorf("database not found: %w", err)
}
defer manager.Close()
// Remove project
registry := storage.NewProjectRegistry(manager)
if err := registry.Remove(projectID); err != nil {
return fmt.Errorf("remove project: %w", err)
}
fmt.Fprintf(cmd.OutOrStdout(), "β
Project removed: %s\n", projectID)
return nil
},
}
// projectSwitchCmd switches active project context
var projectSwitchCmd = &cobra.Command{
Use: "switch <project-id>",
Short: "Switch to a different project",
Long: `Switch the active project context.
Only one project can be active at a time.
Examples:
canary project switch my-project
canary project switch backend-api`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
projectID := args[0]
// Open database
manager := storage.NewDatabaseManager()
if err := manager.Discover(); err != nil {
return fmt.Errorf("database not found: %w", err)
}
defer manager.Close()
// Switch context
ctx := storage.NewContextManager(manager)
if err := ctx.SwitchTo(projectID); err != nil {
return fmt.Errorf("switch project: %w", err)
}
// Get project details
registry := storage.NewProjectRegistry(manager)
project, err := registry.GetByID(projectID)
if err != nil {
return fmt.Errorf("get project: %w", err)
}
fmt.Fprintf(cmd.OutOrStdout(), "β
Switched to project: %s\n", project.Name)
fmt.Fprintf(cmd.OutOrStdout(), " Path: %s\n", project.Path)
return nil
},
}
// projectCurrentCmd shows the current active project
var projectCurrentCmd = &cobra.Command{
Use: "current",
Short: "Show current active project",
Long: `Display the currently active project context.`,
RunE: func(cmd *cobra.Command, args []string) error {
// Open database
manager := storage.NewDatabaseManager()
if err := manager.Discover(); err != nil {
return fmt.Errorf("database not found: %w", err)
}
defer manager.Close()
// Get current project
ctx := storage.NewContextManager(manager)
current, err := ctx.GetCurrent()
if err != nil {
fmt.Fprintln(cmd.OutOrStdout(), "No active project context set.")
return nil
}
fmt.Fprintf(cmd.OutOrStdout(), "Current Project: %s\n", current.Name)
fmt.Fprintf(cmd.OutOrStdout(), " ID: %s\n", current.ID)
fmt.Fprintf(cmd.OutOrStdout(), " Path: %s\n", current.Path)
return nil
},
}
func init() {
// Database command flags
dbInitCmd.Flags().Bool("global", false, "Initialize global database (default)")
dbInitCmd.Flags().Bool("local", false, "Initialize local database in current directory")
// Add subcommands
dbCmd.AddCommand(dbInitCmd)
projectCmd.AddCommand(projectRegisterCmd)
projectCmd.AddCommand(projectListCmd)
projectCmd.AddCommand(projectRemoveCmd)
projectCmd.AddCommand(projectSwitchCmd)
projectCmd.AddCommand(projectCurrentCmd)
// Register with root command
rootCmd.AddCommand(dbCmd)
rootCmd.AddCommand(projectCmd)
}
// Constructor functions for testing
func newDBCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "db",
Short: "Database management commands",
}
init := &cobra.Command{
Use: "init",
Short: "Initialize database",
RunE: dbInitCmd.RunE,
}
init.Flags().Bool("global", false, "Initialize global database (default)")
init.Flags().Bool("local", false, "Initialize local database")
cmd.AddCommand(init)
return cmd
}
func newProjectCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "project",
Short: "Project management commands",
}
register := &cobra.Command{
Use: "register <name> <path>",
Short: "Register a new project",
Args: cobra.ExactArgs(2),
RunE: projectRegisterCmd.RunE,
}
list := &cobra.Command{
Use: "list",
Short: "List all registered projects",
RunE: projectListCmd.RunE,
}
remove := &cobra.Command{
Use: "remove <project-id>",
Short: "Remove a project",
Args: cobra.ExactArgs(1),
RunE: projectRemoveCmd.RunE,
}
switchCmd := &cobra.Command{
Use: "switch <project-id>",
Short: "Switch to a different project",
Args: cobra.ExactArgs(1),
RunE: projectSwitchCmd.RunE,
}
current := &cobra.Command{
Use: "current",
Short: "Show current active project",
RunE: projectCurrentCmd.RunE,
}
cmd.AddCommand(register, list, remove, switchCmd, current)
return cmd
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"encoding/json"
"fmt"
"os"
"strings"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/storage"
)
// CANARY: REQ=CBIN-CLI-001; FEATURE="ShowCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_CLI_ShowCmd; UPDATED=2025-10-16
var showCmd = &cobra.Command{
Use: "show <REQ-ID>",
Short: "Display all CANARY tokens for a requirement",
Long: `Show displays all CANARY tokens for a specific requirement ID.
Displays:
- Feature name, aspect, status
- File location and line number
- Test and benchmark references
- Owner and priority
Grouping:
- By default, groups by aspect (CLI, API, Engine, etc.)
- Use --group-by status to group by implementation status
- Use --json for machine-readable output
Examples:
canary show CBIN-133
canary show CBIN-133 --group-by status
canary show CBIN-133 --json`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := args[0]
groupBy, _ := cmd.Flags().GetString("group-by")
jsonOutput, _ := cmd.Flags().GetBool("json")
noColor, _ := cmd.Flags().GetBool("no-color")
dbPath, _ := cmd.Flags().GetString("db")
// Open database
db, err := storage.Open(dbPath)
if err != nil {
fmt.Fprintf(os.Stderr, "β οΈ Database not found, using filesystem search (slower)\n")
fmt.Fprintf(os.Stderr, " Suggestion: Run 'canary index' to build database\n\n")
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Query tokens
tokens, err := db.GetTokensByReqID(reqID)
if err != nil {
return fmt.Errorf("query tokens: %w", err)
}
if len(tokens) == 0 {
fmt.Printf("No tokens found for %s\n", reqID)
fmt.Println("\nSuggestions:")
fmt.Println(" β’ Run: canary list")
fmt.Println(" β’ Check requirement ID format (e.g., CBIN-XXX)")
return fmt.Errorf("requirement not found")
}
// Format output
if jsonOutput {
return outputTokensJSON(tokens)
}
fmt.Printf("Tokens for %s:\n\n", reqID)
output := formatTokensTable(tokens, groupBy, !noColor)
fmt.Println(output)
return nil
},
}
// outputTokensJSON outputs tokens as JSON
func outputTokensJSON(tokens []*storage.Token) error {
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
return enc.Encode(tokens)
}
// formatTokensTable formats tokens as a grouped table
func formatTokensTable(tokens []*storage.Token, groupBy string, useColor bool) string {
var buf strings.Builder
// Group tokens
groups := groupTokens(tokens, groupBy)
// Format each group
for groupName, groupTokens := range groups {
buf.WriteString(fmt.Sprintf("## %s\n\n", groupName))
for _, token := range groupTokens {
buf.WriteString(fmt.Sprintf("π %s - %s\n", token.ReqID, token.Feature))
// Status with optional color
statusLine := fmt.Sprintf(" Status: %s | Aspect: %s", token.Status, token.Aspect)
if token.Priority > 0 {
statusLine += fmt.Sprintf(" | Priority: %d", token.Priority)
}
buf.WriteString(statusLine + "\n")
buf.WriteString(fmt.Sprintf(" Location: %s:%d\n", token.FilePath, token.LineNumber))
if token.Test != "" {
buf.WriteString(fmt.Sprintf(" Test: %s\n", token.Test))
}
if token.Bench != "" {
buf.WriteString(fmt.Sprintf(" Bench: %s\n", token.Bench))
}
if token.Owner != "" {
buf.WriteString(fmt.Sprintf(" Owner: %s\n", token.Owner))
}
buf.WriteString("\n")
}
}
return buf.String()
}
// groupTokens groups tokens by specified field
func groupTokens(tokens []*storage.Token, groupBy string) map[string][]*storage.Token {
groups := make(map[string][]*storage.Token)
for _, token := range tokens {
var key string
switch groupBy {
case "status":
key = token.Status
case "aspect":
key = token.Aspect
default:
// Default: group by aspect
key = token.Aspect
}
if key == "" {
key = "Ungrouped"
}
groups[key] = append(groups[key], token)
}
return groups
}
func init() {
showCmd.Flags().String("group-by", "aspect", "Group tokens by field (aspect, status)")
showCmd.Flags().Bool("json", false, "Output in JSON format")
showCmd.Flags().Bool("no-color", false, "Disable colored output")
showCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-134; FEATURE="SpecModification"; ASPECT=CLI; STATUS=IMPL; DOC=user:docs/user/spec-modification-guide.md; DOC_HASH=676eb2a18c9d002a; UPDATED=2025-10-17
package main
import (
"fmt"
"os"
"path/filepath"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/specs"
"go.devnw.com/canary/internal/storage"
)
var updateCmd = &cobra.Command{
Use: "update <REQ-ID or search-query>",
Short: "Update an existing requirement specification",
Long: `Locate and update an existing CANARY requirement specification.
Supports exact ID lookup, fuzzy text search, and section-specific loading
to minimize context usage for AI agents.
Examples:
canary specify update CBIN-134 # Exact ID lookup
canary specify update --search "spec mod" # Fuzzy search
canary specify update CBIN-134 --sections overview # Load specific sections`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
query := args[0]
searchFlag, _ := cmd.Flags().GetBool("search")
sectionsFlag, _ := cmd.Flags().GetStringSlice("sections")
var specPath string
var err error
// Determine lookup method
if searchFlag {
// Fuzzy search mode
matches, err := specs.FindSpecBySearch(query, 5)
if err != nil {
return fmt.Errorf("search specs: %w", err)
}
if len(matches) == 0 {
return fmt.Errorf("no specs found matching: %s", query)
}
// Show matches
fmt.Printf("Found %d matching specs:\n\n", len(matches))
for i, match := range matches {
fmt.Printf(" %d. %s - %s (Score: %d%%)\n",
i+1, match.ReqID, match.FeatureName, match.Score)
}
// Auto-select if single strong match (>90%)
if len(matches) == 1 || (matches[0].Score > 90 && (len(matches) == 1 || matches[0].Score-matches[1].Score > 20)) {
specPath = filepath.Join(matches[0].SpecPath, "spec.md")
fmt.Printf("\nAuto-selected: %s\n\n", matches[0].ReqID)
} else {
return fmt.Errorf("multiple matches found - please use exact REQ-ID for precision")
}
} else {
// Exact ID lookup
specPath, err = specs.FindSpecByID(query)
if err != nil {
// Try database fallback
dbPath := ".canary/canary.db"
if db, dbErr := storage.Open(dbPath); dbErr == nil {
defer db.Close()
specPath, err = specs.FindSpecInDB(db, query)
}
}
if err != nil {
return fmt.Errorf("spec not found: %w\n\nHint: Try fuzzy search with --search flag:\n canary specify update --search \"%s\"", err, query)
}
}
// Read spec content
content, err := os.ReadFile(specPath)
if err != nil {
return fmt.Errorf("read spec: %w", err)
}
specContent := string(content)
// Apply section filtering if requested
if len(sectionsFlag) > 0 {
specContent, err = specs.ParseSections(specContent, sectionsFlag)
if err != nil {
return fmt.Errorf("parse sections: %w\n\nHint: Use --sections with valid section names like: overview, user-stories, requirements", err)
}
}
// Check for plan.md
planPath := filepath.Join(filepath.Dir(specPath), "plan.md")
hasPlan := false
if _, err := os.Stat(planPath); err == nil {
hasPlan = true
}
// Output results
fmt.Printf("β
Found specification: %s\n", specPath)
if hasPlan {
fmt.Printf("π Plan exists: %s\n", planPath)
}
// If sections were requested, show what was included
if len(sectionsFlag) > 0 {
fmt.Printf("π Sections: %v\n", sectionsFlag)
}
fmt.Printf("\n--- Spec Content ---\n\n")
fmt.Println(specContent)
if hasPlan && len(sectionsFlag) == 0 {
fmt.Printf("\nπ‘ Tip: View plan with: cat %s\n", planPath)
}
return nil
},
}
func init() {
updateCmd.Flags().Bool("search", false, "use fuzzy search instead of exact ID lookup")
updateCmd.Flags().StringSlice("sections", []string{}, "load only specific sections (comma-separated)")
// Add updateCmd as subcommand of specifyCmd
// specifyCmd is defined in main.go
specifyCmd.AddCommand(updateCmd)
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"github.com/spf13/cobra"
)
// CANARY: REQ=CBIN-145; FEATURE="SpecsCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_145_CLI_SpecsCmd; UPDATED=2025-10-17
var specsCmd = &cobra.Command{
Use: "specs [flags]",
Short: "List all requirement specification directories",
Long: `Specs lists all requirement specification directories in .canary/specs/.
Shows requirement ID, feature name (extracted from directory name), and paths
to spec.md and plan.md files if they exist.
Examples:
canary specs
canary specs --path .canary/specs
canary specs --json`,
RunE: func(cmd *cobra.Command, args []string) error {
specsPath, _ := cmd.Flags().GetString("path")
jsonOutput, _ := cmd.Flags().GetBool("json")
// Check if specs directory exists
if _, err := os.Stat(specsPath); os.IsNotExist(err) {
return fmt.Errorf("specs directory not found: %s", specsPath)
}
// Read specs directory
entries, err := os.ReadDir(specsPath)
if err != nil {
return fmt.Errorf("read specs directory: %w", err)
}
// Collect spec information
type SpecInfo struct {
ReqID string `json:"req_id"`
FeatureName string `json:"feature_name"`
Directory string `json:"directory"`
HasSpec bool `json:"has_spec"`
HasPlan bool `json:"has_plan"`
}
var specs []SpecInfo
for _, entry := range entries {
if !entry.IsDir() {
continue
}
dirName := entry.Name()
dirPath := filepath.Join(specsPath, dirName)
// Parse directory name (format: CBIN-XXX-feature-name)
parts := strings.SplitN(dirName, "-", 3)
reqID := dirName
featureName := ""
if len(parts) >= 3 {
reqID = parts[0] + "-" + parts[1]
featureName = strings.ReplaceAll(parts[2], "-", " ")
featureName = strings.Title(featureName)
}
// Check for spec.md and plan.md
specPath := filepath.Join(dirPath, "spec.md")
planPath := filepath.Join(dirPath, "plan.md")
hasSpec := false
hasPlan := false
if _, err := os.Stat(specPath); err == nil {
hasSpec = true
}
if _, err := os.Stat(planPath); err == nil {
hasPlan = true
}
specs = append(specs, SpecInfo{
ReqID: reqID,
FeatureName: featureName,
Directory: dirPath,
HasSpec: hasSpec,
HasPlan: hasPlan,
})
}
// Sort by requirement ID
sort.Slice(specs, func(i, j int) bool {
return specs[i].ReqID < specs[j].ReqID
})
if len(specs) == 0 {
fmt.Printf("No specification directories found in %s\n", specsPath)
return nil
}
// Output
if jsonOutput {
// JSON output
fmt.Println("[")
for i, spec := range specs {
comma := ","
if i == len(specs)-1 {
comma = ""
}
fmt.Printf(" {\"req_id\": \"%s\", \"feature_name\": \"%s\", \"directory\": \"%s\", \"has_spec\": %t, \"has_plan\": %t}%s\n",
spec.ReqID, spec.FeatureName, spec.Directory, spec.HasSpec, spec.HasPlan, comma)
}
fmt.Println("]")
} else {
// Human-readable output
fmt.Printf("Found %d specification directories:\n\n", len(specs))
for _, spec := range specs {
fmt.Printf("π %s", spec.ReqID)
if spec.FeatureName != "" {
fmt.Printf(" - %s", spec.FeatureName)
}
fmt.Println()
fmt.Printf(" %s\n", spec.Directory)
files := []string{}
if spec.HasSpec {
files = append(files, "spec.md")
}
if spec.HasPlan {
files = append(files, "plan.md")
}
if len(files) > 0 {
fmt.Printf(" Files: %s\n", strings.Join(files, ", "))
} else {
fmt.Printf(" (no spec or plan files)\n")
}
fmt.Println()
}
fmt.Printf("Total: %d specifications\n", len(specs))
}
return nil
},
}
func init() {
specsCmd.Flags().String("path", ".canary/specs", "Path to specs directory")
specsCmd.Flags().Bool("json", false, "Output as JSON")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"fmt"
"os"
"github.com/fatih/color"
"github.com/spf13/cobra"
"go.devnw.com/canary/internal/storage"
)
// CANARY: REQ=CBIN-CLI-001; FEATURE="StatusCmd"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_CLI_StatusCmd; UPDATED=2025-10-16
var statusCmd = &cobra.Command{
Use: "status <REQ-ID>",
Short: "Show implementation progress for a requirement",
Long: `Status displays implementation progress summary for a requirement.
Shows:
- Total token count
- Breakdown by status (STUB, IMPL, TESTED, BENCHED)
- Completion percentage
- List of incomplete work
Examples:
canary status CBIN-133
canary status CBIN-133 --no-color`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
reqID := args[0]
noColor, _ := cmd.Flags().GetBool("no-color")
dbPath, _ := cmd.Flags().GetString("db")
// Disable colors if requested
if noColor {
color.NoColor = true
}
// Open database
db, err := storage.Open(dbPath)
if err != nil {
fmt.Fprintf(os.Stderr, "β οΈ Database not found\n")
fmt.Fprintf(os.Stderr, " Suggestion: Run 'canary index' to build database\n\n")
return fmt.Errorf("open database: %w", err)
}
defer db.Close()
// Query tokens
tokens, err := db.GetTokensByReqID(reqID)
if err != nil {
return fmt.Errorf("query tokens: %w", err)
}
if len(tokens) == 0 {
fmt.Printf("No tokens found for %s\n", reqID)
return fmt.Errorf("requirement not found")
}
// Calculate statistics
stats := calculateStats(tokens)
// Display summary
displayStatusSummary(reqID, stats, tokens)
return nil
},
}
// StatusStats holds progress statistics
type StatusStats struct {
Total int
Stub int
Impl int
Tested int
Benched int
Completed int
}
// calculateStats computes statistics from tokens
func calculateStats(tokens []*storage.Token) *StatusStats {
stats := &StatusStats{
Total: len(tokens),
}
for _, token := range tokens {
switch token.Status {
case "STUB":
stats.Stub++
case "IMPL":
stats.Impl++
case "TESTED":
stats.Tested++
stats.Completed++
case "BENCHED":
stats.Benched++
stats.Completed++
}
}
return stats
}
// displayStatusSummary shows formatted progress summary
func displayStatusSummary(reqID string, stats *StatusStats, tokens []*storage.Token) {
green := color.New(color.FgGreen).SprintFunc()
yellow := color.New(color.FgYellow).SprintFunc()
red := color.New(color.FgRed).SprintFunc()
cyan := color.New(color.FgCyan).SprintFunc()
fmt.Printf("Implementation Status for %s:\n\n", cyan(reqID))
// Progress bar
completionPct := 0
if stats.Total > 0 {
completionPct = (stats.Completed * 100) / stats.Total
}
fmt.Printf("Progress: %s\n\n", progressBar(completionPct, 40))
// Statistics
fmt.Printf("Total: %d tokens\n", stats.Total)
fmt.Printf("Completed: %s (%d%%)\n", green(fmt.Sprintf("%d", stats.Completed)), completionPct)
fmt.Printf("In Progress:\n")
fmt.Printf(" β’ IMPL: %s\n", yellow(fmt.Sprintf("%d", stats.Impl)))
fmt.Printf(" β’ STUB: %s\n", red(fmt.Sprintf("%d", stats.Stub)))
fmt.Printf("Status Breakdown:\n")
fmt.Printf(" β’ TESTED: %s\n", green(fmt.Sprintf("%d", stats.Tested)))
fmt.Printf(" β’ BENCHED: %s\n", green(fmt.Sprintf("%d", stats.Benched)))
fmt.Println()
// List incomplete work
if stats.Stub > 0 || stats.Impl > 0 {
fmt.Println("Incomplete Work:")
for _, token := range tokens {
if token.Status == "STUB" || token.Status == "IMPL" {
statusColor := red
if token.Status == "IMPL" {
statusColor = yellow
}
fmt.Printf(" %s %s - %s\n",
statusColor(token.Status),
token.Feature,
token.FilePath)
}
}
} else {
fmt.Println(green("β
All features completed!"))
}
}
// progressBar generates a text progress bar
func progressBar(pct int, width int) string {
if pct < 0 {
pct = 0
}
if pct > 100 {
pct = 100
}
filled := (pct * width) / 100
empty := width - filled
bar := "["
for i := 0; i < filled; i++ {
bar += "="
}
if filled < width {
bar += ">"
}
for i := 0; i < empty-1; i++ {
bar += " "
}
bar += fmt.Sprintf("] %d%%", pct)
return bar
}
func init() {
statusCmd.Flags().Bool("no-color", false, "Disable colored output")
statusCmd.Flags().String("db", ".canary/canary.db", "Path to database file")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-140; FEATURE="ProjectConfig"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-16
package config
import (
"fmt"
"os"
"path/filepath"
"gopkg.in/yaml.v3"
)
// ProjectConfig represents the .canary/project.yaml configuration
type ProjectConfig struct {
Project struct {
Name string `yaml:"name"`
Description string `yaml:"description"`
} `yaml:"project"`
Requirements struct {
IDPattern string `yaml:"id_pattern"`
} `yaml:"requirements"`
Scanner struct {
ExcludePaths []string `yaml:"exclude_paths"`
} `yaml:"scanner"`
Verification struct {
RequireTestField bool `yaml:"require_test_field"`
RequireBenchField bool `yaml:"require_bench_field"`
StalenessDays int `yaml:"staleness_days"`
} `yaml:"verification"`
Agent struct {
DefaultModel string `yaml:"default_model"`
} `yaml:"agent"`
}
// Load reads and parses the project.yaml configuration file
func Load(rootDir string) (*ProjectConfig, error) {
configPath := filepath.Join(rootDir, ".canary", "project.yaml")
data, err := os.ReadFile(configPath)
if err != nil {
// Return default config if file doesn't exist
if os.IsNotExist(err) {
return &ProjectConfig{}, nil
}
return nil, fmt.Errorf("read config file: %w", err)
}
var cfg ProjectConfig
if err := yaml.Unmarshal(data, &cfg); err != nil {
return nil, fmt.Errorf("parse config file: %w", err)
}
return &cfg, nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-136; FEATURE="DocStalenessDetection"; ASPECT=Engine; STATUS=TESTED; TEST=TestCANARY_CBIN_136_Engine_StalenessDetection; DOC=architecture:docs/architecture/adr-001-documentation-tracking.md; DOC_HASH=9c40f77ae6604be5; UPDATED=2025-10-16
package docs
import (
"os"
"strings"
"go.devnw.com/canary/internal/storage"
)
// CheckStaleness compares documentation file hash to token DOC_HASH field.
// Returns one of: DOC_CURRENT, DOC_STALE, DOC_MISSING, or DOC_UNHASHED
//
// Status meanings:
// - DOC_CURRENT: File hash matches token DOC_HASH (documentation is up-to-date)
// - DOC_STALE: File hash differs from token DOC_HASH (documentation needs updating)
// - DOC_MISSING: Documentation file does not exist at specified path
// - DOC_UNHASHED: Token has no DOC_HASH field (hash tracking not enabled)
//
// Example:
//
// status, err := docs.CheckStaleness(token)
// if status == "DOC_STALE" {
// fmt.Printf("Documentation for %s is outdated\n", token.ReqID)
// }
func CheckStaleness(token *storage.Token) (string, error) {
// Case 1: No DOC_HASH field in token
if token.DocHash == "" {
return "DOC_UNHASHED", nil
}
// Case 2: Documentation file missing
if _, err := os.Stat(token.DocPath); os.IsNotExist(err) {
return "DOC_MISSING", nil
}
// Case 3: Calculate current hash and compare
currentHash, err := CalculateHash(token.DocPath)
if err != nil {
return "", err
}
// Compare abbreviated hash (first 16 chars)
tokenHash := token.DocHash
if len(tokenHash) > 16 {
tokenHash = tokenHash[:16]
}
if currentHash == tokenHash {
return "DOC_CURRENT", nil
}
return "DOC_STALE", nil
}
// CheckMultipleDocumentation handles tokens with multiple DOC paths (comma-separated).
// Returns a map of doc path to status for each documentation file.
//
// Example:
//
// // Token with: DOC=user:docs/user.md,api:docs/api.md
// results, err := docs.CheckMultipleDocumentation(token)
// // Returns: {"docs/user.md": "DOC_CURRENT", "docs/api.md": "DOC_STALE"}
func CheckMultipleDocumentation(token *storage.Token) (map[string]string, error) {
// Parse comma-separated DOC paths
docPaths := strings.Split(token.DocPath, ",")
docHashes := strings.Split(token.DocHash, ",")
results := make(map[string]string)
for i, docPath := range docPaths {
// Trim whitespace and type prefix (e.g., "user:docs/file.md" -> "docs/file.md")
docPath = strings.TrimSpace(docPath)
if strings.Contains(docPath, ":") {
parts := strings.SplitN(docPath, ":", 2)
if len(parts) == 2 {
docPath = parts[1]
}
}
// Create temporary token for single doc check
singleDocToken := &storage.Token{
DocPath: docPath,
DocHash: "",
}
if i < len(docHashes) {
singleDocToken.DocHash = strings.TrimSpace(docHashes[i])
}
status, err := CheckStaleness(singleDocToken)
if err != nil {
return nil, err
}
results[docPath] = status
}
return results, nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-136; FEATURE="DocHashCalculation"; ASPECT=Engine; STATUS=TESTED; TEST=TestCANARY_CBIN_136_Engine_HashCalculation; UPDATED=2025-10-16
package docs
import (
"crypto/sha256"
"encoding/hex"
"os"
"strings"
)
// CalculateHash computes SHA256 hash of documentation file with line ending normalization.
// Returns first 16 characters (64 bits) for abbreviated display in CANARY tokens.
//
// Line endings are normalized to LF (\n) before hashing to ensure cross-platform consistency
// between Windows (CRLF) and Unix/Mac (LF) systems.
//
// Example:
//
// hash, err := docs.CalculateHash("docs/user/auth.md")
// // Returns: "8f434346648f6b96" (first 16 chars of SHA256)
func CalculateHash(filePath string) (string, error) {
// Read file content
content, err := os.ReadFile(filePath)
if err != nil {
return "", err
}
// Normalize line endings: Convert CRLF to LF
// This ensures deterministic hashing across platforms
normalized := strings.ReplaceAll(string(content), "\r\n", "\n")
// Calculate SHA256
hash := sha256.Sum256([]byte(normalized))
// Encode to hex string
fullHash := hex.EncodeToString(hash[:])
// Return abbreviated hash (first 16 characters)
// 16 hex chars = 64 bits, sufficient collision resistance for doc tracking
return fullHash[:16], nil
}
// CalculateFullHash returns full 64-character SHA256 hash.
// Use this for database storage if full hash precision is needed.
func CalculateFullHash(filePath string) (string, error) {
content, err := os.ReadFile(filePath)
if err != nil {
return "", err
}
normalized := strings.ReplaceAll(string(content), "\r\n", "\n")
hash := sha256.Sum256([]byte(normalized))
return hex.EncodeToString(hash[:]), nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-140; FEATURE="GapService"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-17
package gap
import (
"fmt"
"strings"
"time"
"go.devnw.com/canary/internal/storage"
)
// Service provides gap analysis business logic
type Service struct {
repo *storage.GapRepository
}
// NewService creates a new gap analysis service
func NewService(repo *storage.GapRepository) *Service {
return &Service{repo: repo}
}
// MarkGap records a new gap analysis entry
func (s *Service) MarkGap(reqID, feature, aspect, category, description, correctiveAction, createdBy string) (string, error) {
// Validate inputs
if reqID == "" {
return "", fmt.Errorf("req_id is required")
}
if feature == "" {
return "", fmt.Errorf("feature is required")
}
if category == "" {
return "", fmt.Errorf("category is required")
}
if description == "" {
return "", fmt.Errorf("description is required")
}
// Validate category exists
categories, err := s.repo.GetCategories()
if err != nil {
return "", fmt.Errorf("get categories: %w", err)
}
validCategory := false
for _, cat := range categories {
if cat.Name == category {
validCategory = true
break
}
}
if !validCategory {
return "", fmt.Errorf("invalid category: %s", category)
}
// Generate gap ID
gapID, err := s.generateGapID(reqID)
if err != nil {
return "", fmt.Errorf("generate gap ID: %w", err)
}
// Create entry
entry := &storage.GapEntry{
GapID: gapID,
ReqID: reqID,
Feature: feature,
Aspect: aspect,
Category: category,
Description: description,
CorrectiveAction: correctiveAction,
CreatedAt: time.Now(),
CreatedBy: createdBy,
}
if err := s.repo.CreateEntry(entry); err != nil {
return "", fmt.Errorf("create entry: %w", err)
}
return gapID, nil
}
// QueryGaps queries gap entries with filters
func (s *Service) QueryGaps(reqID, feature, aspect, category string, limit int) ([]*storage.GapEntry, error) {
filter := storage.GapQueryFilter{
ReqID: reqID,
Feature: feature,
Aspect: aspect,
Category: category,
Limit: limit,
}
entries, err := s.repo.QueryEntries(filter)
if err != nil {
return nil, fmt.Errorf("query entries: %w", err)
}
return entries, nil
}
// GenerateReport generates a gap analysis report for a requirement
func (s *Service) GenerateReport(reqID string) (string, error) {
if reqID == "" {
return "", fmt.Errorf("req_id is required")
}
report, err := s.repo.GenerateGapReport(reqID)
if err != nil {
return "", fmt.Errorf("generate report: %w", err)
}
return report, nil
}
// MarkHelpful marks a gap entry as helpful
func (s *Service) MarkHelpful(gapID string) error {
if gapID == "" {
return fmt.Errorf("gap_id is required")
}
// Verify gap exists
_, err := s.repo.GetEntryByGapID(gapID)
if err != nil {
return fmt.Errorf("gap not found: %s", gapID)
}
if err := s.repo.MarkHelpful(gapID); err != nil {
return fmt.Errorf("mark helpful: %w", err)
}
return nil
}
// MarkUnhelpful marks a gap entry as unhelpful
func (s *Service) MarkUnhelpful(gapID string) error {
if gapID == "" {
return fmt.Errorf("gap_id is required")
}
// Verify gap exists
_, err := s.repo.GetEntryByGapID(gapID)
if err != nil {
return fmt.Errorf("gap not found: %s", gapID)
}
if err := s.repo.MarkUnhelpful(gapID); err != nil {
return fmt.Errorf("mark unhelpful: %w", err)
}
return nil
}
// GetCategories retrieves all available gap categories
func (s *Service) GetCategories() ([]*storage.GapCategory, error) {
categories, err := s.repo.GetCategories()
if err != nil {
return nil, fmt.Errorf("get categories: %w", err)
}
return categories, nil
}
// GetTopGapsForPlan retrieves top gaps for a requirement to inject into planning
func (s *Service) GetTopGapsForPlan(reqID string) ([]*storage.GapEntry, error) {
if reqID == "" {
return nil, fmt.Errorf("req_id is required")
}
// Get configuration
config, err := s.repo.GetConfig()
if err != nil {
return nil, fmt.Errorf("get config: %w", err)
}
// Get top gaps
gaps, err := s.repo.GetTopGaps(reqID, config)
if err != nil {
return nil, fmt.Errorf("get top gaps: %w", err)
}
return gaps, nil
}
// FormatGapsForInjection formats gaps for injection into plan command prompt
func (s *Service) FormatGapsForInjection(reqID string) (string, error) {
gaps, err := s.GetTopGapsForPlan(reqID)
if err != nil {
return "", fmt.Errorf("get gaps for plan: %w", err)
}
if len(gaps) == 0 {
return "", nil // No gaps to inject
}
var output strings.Builder
output.WriteString("\n\n## Past Implementation Gaps\n\n")
output.WriteString(fmt.Sprintf("The following gaps were identified in previous implementations of %s:\n\n", reqID))
for i, gap := range gaps {
output.WriteString(fmt.Sprintf("%d. **%s** (%s)\n", i+1, gap.Feature, gap.Category))
output.WriteString(fmt.Sprintf(" - **Problem:** %s\n", gap.Description))
if gap.CorrectiveAction != "" {
output.WriteString(fmt.Sprintf(" - **Solution:** %s\n", gap.CorrectiveAction))
}
output.WriteString(fmt.Sprintf(" - **Helpfulness:** %d helpful, %d unhelpful\n", gap.HelpfulCount, gap.UnhelpfulCount))
output.WriteString("\n")
}
output.WriteString("**Action:** Review these gaps and ensure your implementation avoids similar mistakes.\n\n")
return output.String(), nil
}
// UpdateConfig updates gap analysis configuration
func (s *Service) UpdateConfig(maxGapInjection, minHelpfulThreshold int, rankingStrategy string) error {
// Validate ranking strategy
validStrategies := map[string]bool{
"helpful_desc": true,
"recency_desc": true,
"weighted": true,
}
if !validStrategies[rankingStrategy] {
return fmt.Errorf("invalid ranking strategy: %s (must be helpful_desc, recency_desc, or weighted)", rankingStrategy)
}
if maxGapInjection < 0 {
return fmt.Errorf("max_gap_injection must be >= 0")
}
if minHelpfulThreshold < 0 {
return fmt.Errorf("min_helpful_threshold must be >= 0")
}
config := &storage.GapConfig{
MaxGapInjection: maxGapInjection,
MinHelpfulThreshold: minHelpfulThreshold,
RankingStrategy: rankingStrategy,
UpdatedAt: time.Now(),
}
if err := s.repo.UpdateConfig(config); err != nil {
return fmt.Errorf("update config: %w", err)
}
return nil
}
// GetConfig retrieves current gap analysis configuration
func (s *Service) GetConfig() (*storage.GapConfig, error) {
config, err := s.repo.GetConfig()
if err != nil {
return nil, fmt.Errorf("get config: %w", err)
}
return config, nil
}
// generateGapID generates a unique gap ID for a requirement
func (s *Service) generateGapID(reqID string) (string, error) {
// Get existing entries for this requirement
entries, err := s.repo.GetEntriesByReqID(reqID)
if err != nil {
return "", err
}
// Find next available number
nextNum := 1
for _, entry := range entries {
// Extract number from GAP-CBIN-XXX-NNN
parts := strings.Split(entry.GapID, "-")
if len(parts) >= 4 {
var num int
fmt.Sscanf(parts[3], "%d", &num)
if num >= nextNum {
nextNum = num + 1
}
}
}
// Generate gap ID: GAP-{REQ_ID}-{NUMBER}
gapID := fmt.Sprintf("GAP-%s-%03d", reqID, nextNum)
return gapID, nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-133; FEATURE="FuzzyMatcher"; ASPECT=Engine; STATUS=TESTED; TEST=TestCANARY_CBIN_133_Engine_Levenshtein; OWNER=canary; UPDATED=2025-10-16
package matcher
import (
"os"
"path/filepath"
"sort"
"strings"
"unicode"
)
// CalculateLevenshtein computes edit distance between two strings
func CalculateLevenshtein(s1, s2 string) int {
s1 = strings.ToLower(s1)
s2 = strings.ToLower(s2)
if s1 == s2 {
return 0
}
// Create matrix
d := make([][]int, len(s1)+1)
for i := range d {
d[i] = make([]int, len(s2)+1)
d[i][0] = i
}
for j := range d[0] {
d[0][j] = j
}
// Fill matrix
for i := 1; i <= len(s1); i++ {
for j := 1; j <= len(s2); j++ {
cost := 0
if s1[i-1] != s2[j-1] {
cost = 1
}
d[i][j] = min(
d[i-1][j]+1, // deletion
d[i][j-1]+1, // insertion
d[i-1][j-1]+cost, // substitution
)
}
}
return d[len(s1)][len(s2)]
}
// ScoreMatch calculates similarity score (0-100) between query and candidate
func ScoreMatch(query, candidate string) int {
// Save original candidate for abbreviation matching (needs capitals)
origCandidate := candidate
query = strings.ToLower(query)
candidate = strings.ToLower(candidate)
// Exact match
if query == candidate {
return 100
}
// Try removing hyphens for ID matching (e.g., "cbin105" vs "cbin-105")
queryNoHyphen := strings.ReplaceAll(query, "-", "")
candidateNoHyphen := strings.ReplaceAll(candidate, "-", "")
if queryNoHyphen == candidateNoHyphen {
return 95 // Very close match, just formatting difference
}
// Substring match gets high score
if strings.Contains(candidate, query) {
ratio := float64(len(query)) / float64(len(candidate))
return int(80 + (ratio * 20)) // 80-100 range
}
// Multi-word fuzzy match (e.g., "user auth" vs "UserAuthentication")
if strings.Contains(query, " ") {
words := strings.Fields(query)
allMatch := true
for _, word := range words {
if !strings.Contains(candidate, word) {
allMatch = false
break
}
}
if allMatch {
// All words found as substrings
ratio := float64(len(query)-len(words)+1) / float64(len(candidate))
return int(70 + (ratio * 20)) // 70-90 range for multi-word matches
}
}
// Abbreviation match (e.g., "ua" matches "UserAuthentication")
// Pass original candidate (not lowercased) to preserve capitals
abbrevScore := abbreviationScore(query, origCandidate)
if abbrevScore >= 75 {
return abbrevScore
}
// Levenshtein distance
distance := CalculateLevenshtein(query, candidate)
maxLen := max(len(query), len(candidate))
if maxLen == 0 {
return 0
}
// Convert distance to similarity percentage
similarity := float64(maxLen-distance) / float64(maxLen)
score := int(similarity * 100)
// Use abbreviation score if it's better
if abbrevScore > score {
return abbrevScore
}
if score < 0 {
return 0
}
return score
}
// abbreviationScore calculates score based on abbreviation matching
func abbreviationScore(query, candidate string) int {
// Extract abbreviation from candidate (capital letters)
var abbrev strings.Builder
for i, ch := range candidate {
if i == 0 || unicode.IsUpper(ch) {
abbrev.WriteRune(unicode.ToLower(ch))
}
}
abbrevStr := abbrev.String()
// Exact abbreviation match
if query == abbrevStr {
return 75
}
// Query is substring of abbreviation
if strings.Contains(abbrevStr, query) {
ratio := float64(len(query)) / float64(len(abbrevStr))
return int(70 + (ratio * 20)) // 70-90 range
}
// Levenshtein distance for abbreviation
if len(abbrevStr) > 0 {
distance := CalculateLevenshtein(query, abbrevStr)
maxLen := max(len(query), len(abbrevStr))
if distance <= 1 && maxLen > 0 {
similarity := float64(maxLen-distance) / float64(maxLen)
score := int(similarity * 80) // Max 80 for close abbreviation match
if score >= 70 {
return score
}
}
}
return 0
}
// matchesAbbreviation checks if query matches first letters of words in candidate
func matchesAbbreviation(query, candidate string) bool {
var abbrev strings.Builder
for i, ch := range candidate {
if i == 0 || unicode.IsUpper(ch) {
abbrev.WriteRune(unicode.ToLower(ch))
}
}
return strings.Contains(abbrev.String(), query)
}
func min(a, b, c int) int {
if a < b {
if a < c {
return a
}
return c
}
if b < c {
return b
}
return c
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
// Match represents a fuzzy match result
type Match struct {
ReqID string
FeatureName string
Score int
SpecPath string
}
// FindBestMatches returns top N matches for query
func FindBestMatches(query string, specsDir string, limit int) ([]Match, error) {
entries, err := os.ReadDir(specsDir)
if err != nil {
return nil, err
}
var matches []Match
for _, entry := range entries {
if !entry.IsDir() {
continue
}
// Parse directory name: CBIN-XXX-feature-name
// Need to extract CBIN-XXX as reqID, rest as feature name
parts := strings.Split(entry.Name(), "-")
if len(parts) < 3 {
continue
}
// ReqID is first two parts joined: "CBIN" + "-" + "XXX"
reqID := parts[0] + "-" + parts[1]
// Feature name is everything after that
featureName := strings.Join(parts[2:], "-")
// Empty query returns all matches
if query == "" {
matches = append(matches, Match{
ReqID: reqID,
FeatureName: featureName,
Score: 100, // All items have equal score
SpecPath: filepath.Join(specsDir, entry.Name()),
})
continue
}
// Score against both ID and feature name
idScore := ScoreMatch(query, reqID)
nameScore := ScoreMatch(query, featureName)
score := max(idScore, nameScore)
if score >= 60 { // Minimum threshold
matches = append(matches, Match{
ReqID: reqID,
FeatureName: featureName,
Score: score,
SpecPath: filepath.Join(specsDir, entry.Name()),
})
}
}
// Sort by score (highest first)
sort.Slice(matches, func(i, j int) bool {
return matches[i].Score > matches[j].Score
})
// Return top N
if limit > 0 && len(matches) > limit {
return matches[:limit], nil
}
return matches, nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-131; FEATURE="MigrateFrom"; ASPECT=CLI; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
package migrate
import (
"fmt"
"io"
"os"
"path/filepath"
"strings"
"go.devnw.com/canary/embedded"
)
// SystemType represents the type of system being migrated from
type SystemType string
const (
SystemTypeSpecKit SystemType = "spec-kit"
SystemTypeLegacyCanary SystemType = "legacy-canary"
SystemTypeMigrated SystemType = "migrated"
SystemTypeUnknown SystemType = "unknown"
)
// DetectSystemType identifies what type of system exists in a directory
func DetectSystemType(rootDir string) (SystemType, string) {
// First check if already migrated (has .canary/ with modern structure)
canaryDB := filepath.Join(rootDir, ".canary/canary.db")
canaryTemplates := filepath.Join(rootDir, ".canary/templates")
hasDB := false
hasTemplates := false
if _, err := os.Stat(canaryDB); err == nil {
hasDB = true
}
if _, err := os.Stat(canaryTemplates); err == nil {
hasTemplates = true
}
// If has database OR templates in .canary/, it's already migrated
if hasDB || hasTemplates {
details := "System already migrated to unified CANARY"
if hasDB {
details += " (has .canary/canary.db)"
}
if hasTemplates {
details += " (has .canary/templates/)"
}
return SystemTypeMigrated, details
}
// Check for spec-kit indicators
specKitIndicators := []string{
"memory/constitution.md",
"templates/spec-template.md",
"templates/plan-template.md",
"templates/commands/specify.md",
"templates/commands/plan.md",
}
specKitScore := 0
for _, indicator := range specKitIndicators {
if _, err := os.Stat(filepath.Join(rootDir, indicator)); err == nil {
specKitScore++
}
}
if specKitScore >= 3 {
return SystemTypeSpecKit, fmt.Sprintf("Detected spec-kit system (%d/5 indicators found)", specKitScore)
}
// Check for legacy canary indicators
legacyIndicators := []string{
"tools/canary",
"tools/canary/main.go",
"status.json",
"GAP_ANALYSIS.md",
}
legacyScore := 0
for _, indicator := range legacyIndicators {
if _, err := os.Stat(filepath.Join(rootDir, indicator)); err == nil {
legacyScore++
}
}
if legacyScore >= 2 {
return SystemTypeLegacyCanary, fmt.Sprintf("Detected legacy CANARY system (%d/4 indicators found)", legacyScore)
}
return SystemTypeUnknown, "No recognized system type detected"
}
// MigrationPlan describes what will be migrated
type MigrationPlan struct {
SystemType SystemType
FilesToCopy []FileCopy
FilesToMerge []FileMerge
FilesToCreate []string
Warnings []string
}
// FileCopy represents a file to be copied
type FileCopy struct {
Source string
Dest string
}
// FileMerge represents a file that needs merging
type FileMerge struct {
Source string
Dest string
Description string
}
// PlanMigration creates a migration plan
func PlanMigration(rootDir string, systemType SystemType, dryRun bool) (*MigrationPlan, error) {
plan := &MigrationPlan{
SystemType: systemType,
FilesToCopy: []FileCopy{},
FilesToMerge: []FileMerge{},
FilesToCreate: []string{},
Warnings: []string{},
}
switch systemType {
case SystemTypeSpecKit:
return planSpecKitMigration(rootDir, plan)
case SystemTypeLegacyCanary:
return planLegacyCanaryMigration(rootDir, plan)
case SystemTypeMigrated, SystemTypeUnknown:
return nil, fmt.Errorf("cannot migrate from type: %s", systemType)
default:
return nil, fmt.Errorf("unsupported system type: %s", systemType)
}
}
// planSpecKitMigration plans migration from spec-kit
func planSpecKitMigration(rootDir string, plan *MigrationPlan) (*MigrationPlan, error) {
// Check if .canary/ already exists
canaryDir := filepath.Join(rootDir, ".canary")
if _, err := os.Stat(canaryDir); err == nil {
plan.Warnings = append(plan.Warnings, ".canary/ directory already exists - will merge content")
}
// Files to copy directly
copyFiles := []struct{ src, dest string }{
{"memory/constitution.md", ".canary/memory/constitution.md"},
{"templates/spec-template.md", ".canary/templates/spec-template.md"},
{"templates/plan-template.md", ".canary/templates/plan-template.md"},
{"templates/tasks-template.md", ".canary/templates/tasks-template.md"},
{"templates/checklist-template.md", ".canary/templates/checklist-template.md"},
}
for _, f := range copyFiles {
srcPath := filepath.Join(rootDir, f.src)
if _, err := os.Stat(srcPath); err == nil {
plan.FilesToCopy = append(plan.FilesToCopy, FileCopy{
Source: f.src,
Dest: f.dest,
})
}
}
// Slash commands to copy
commands := []string{"specify", "plan", "tasks", "implement", "clarify", "analyze", "checklist", "constitution"}
for _, cmd := range commands {
srcPath := filepath.Join(rootDir, "templates", "commands", cmd+".md")
if _, err := os.Stat(srcPath); err == nil {
plan.FilesToCopy = append(plan.FilesToCopy, FileCopy{
Source: filepath.Join("templates", "commands", cmd+".md"),
Dest: filepath.Join(".canary", "templates", "commands", cmd+".md"),
})
}
}
// Scripts to adapt
if _, err := os.Stat(filepath.Join(rootDir, "scripts")); err == nil {
plan.Warnings = append(plan.Warnings, "scripts/ directory found - will need manual review for compatibility")
}
// Files to merge
if _, err := os.Stat(filepath.Join(rootDir, "README.md")); err == nil {
plan.FilesToMerge = append(plan.FilesToMerge, FileMerge{
Source: "README.md",
Dest: "README.md",
Description: "Merge spec-kit README with CANARY token documentation",
})
}
// Files to create
// Note: For spec-kit migration, these files are typically created by user
// after migration via slash commands, so we don't need to create them here.
// The .canary/ templates were already copied from templates/ above.
plan.FilesToCreate = []string{}
return plan, nil
}
// planLegacyCanaryMigration plans migration from legacy canary
func planLegacyCanaryMigration(rootDir string, plan *MigrationPlan) (*MigrationPlan, error) {
// Check if .canary/ already exists
canaryDir := filepath.Join(rootDir, ".canary")
if _, err := os.Stat(canaryDir); err == nil {
plan.Warnings = append(plan.Warnings, ".canary/ directory already exists - will merge content")
}
// Preserve existing files
preserveFiles := []string{"status.json", "status.csv", "GAP_ANALYSIS.md"}
for _, f := range preserveFiles {
srcPath := filepath.Join(rootDir, f)
if _, err := os.Stat(srcPath); err == nil {
plan.FilesToCopy = append(plan.FilesToCopy, FileCopy{
Source: f,
Dest: f, // Keep in same location
})
}
}
// Files to create from embedded templates
plan.FilesToCreate = []string{
".canary/memory/constitution.md",
".canary/templates/spec-template.md",
".canary/templates/plan-template.md",
".canary/templates/commands/constitution.md",
".canary/templates/commands/plan.md",
".canary/templates/commands/scan.md",
".canary/templates/commands/specify.md",
".canary/templates/commands/update-stale.md",
".canary/templates/commands/verify.md",
}
// Warning about tools/canary
if _, err := os.Stat(filepath.Join(rootDir, "tools/canary")); err == nil {
plan.Warnings = append(plan.Warnings, "tools/canary/ scanner found - can be removed after migration (new binary has built-in scanner)")
}
return plan, nil
}
// ExecuteMigration performs the migration
func ExecuteMigration(rootDir string, plan *MigrationPlan, dryRun bool) error {
if dryRun {
fmt.Println("\nπ DRY RUN MODE - No changes will be made")
}
// Create directories
dirsToCreate := []string{
".canary",
".canary/memory",
".canary/templates",
".canary/templates/commands",
".canary/scripts",
".canary/specs",
}
for _, dir := range dirsToCreate {
dirPath := filepath.Join(rootDir, dir)
if dryRun {
fmt.Printf("Would create: %s\n", dir)
} else {
if err := os.MkdirAll(dirPath, 0755); err != nil {
return fmt.Errorf("failed to create directory %s: %w", dir, err)
}
fmt.Printf("β
Created: %s\n", dir)
}
}
// Copy files
for _, fc := range plan.FilesToCopy {
srcPath := filepath.Join(rootDir, fc.Source)
destPath := filepath.Join(rootDir, fc.Dest)
if dryRun {
fmt.Printf("Would copy: %s -> %s\n", fc.Source, fc.Dest)
} else {
if err := os.MkdirAll(filepath.Dir(destPath), 0755); err != nil {
return fmt.Errorf("failed to create destination directory for %s: %w", fc.Dest, err)
}
if err := copyFile(srcPath, destPath); err != nil {
return fmt.Errorf("failed to copy %s to %s: %w", fc.Source, fc.Dest, err)
}
fmt.Printf("β
Copied: %s -> %s\n", fc.Source, fc.Dest)
}
}
// Create files from templates
for _, filename := range plan.FilesToCreate {
destPath := filepath.Join(rootDir, filename)
if dryRun {
fmt.Printf("Would create: %s\n", filename)
} else {
// Map file path to embedded template path
embeddedPath := filepath.Join("base", filename)
content, err := embedded.CanaryFS.ReadFile(embeddedPath)
if err != nil {
return fmt.Errorf("failed to read embedded template %s: %w", embeddedPath, err)
}
if err := os.MkdirAll(filepath.Dir(destPath), 0755); err != nil {
return fmt.Errorf("failed to create directory for %s: %w", filename, err)
}
if err := os.WriteFile(destPath, content, 0644); err != nil {
return fmt.Errorf("failed to create file %s: %w", filename, err)
}
fmt.Printf("β
Created: %s\n", filename)
}
}
// Show merge requirements
if len(plan.FilesToMerge) > 0 {
fmt.Println("\nβ οΈ Files requiring manual merge:")
for _, fm := range plan.FilesToMerge {
fmt.Printf(" - %s: %s\n", fm.Source, fm.Description)
}
}
// Show warnings
if len(plan.Warnings) > 0 {
fmt.Println("\nβ οΈ Warnings:")
for _, warning := range plan.Warnings {
fmt.Printf(" - %s\n", warning)
}
}
return nil
}
// copyFile copies a file from src to dst
func copyFile(src, dst string) error {
sourceFile, err := os.Open(src)
if err != nil {
return err
}
defer sourceFile.Close()
destFile, err := os.Create(dst)
if err != nil {
return err
}
defer destFile.Close()
if _, err := io.Copy(destFile, sourceFile); err != nil {
return err
}
// Preserve file permissions
sourceInfo, err := os.Stat(src)
if err != nil {
return err
}
return os.Chmod(dst, sourceInfo.Mode())
}
// GetMigrationSummary returns a human-readable summary
func GetMigrationSummary(plan *MigrationPlan) string {
var sb strings.Builder
sb.WriteString(fmt.Sprintf("Migration Plan for %s:\n\n", plan.SystemType))
sb.WriteString(fmt.Sprintf("Files to copy: %d\n", len(plan.FilesToCopy)))
sb.WriteString(fmt.Sprintf("Files to merge: %d\n", len(plan.FilesToMerge)))
sb.WriteString(fmt.Sprintf("Files to create: %d\n", len(plan.FilesToCreate)))
sb.WriteString(fmt.Sprintf("Warnings: %d\n", len(plan.Warnings)))
return sb.String()
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-145; FEATURE="OrphanDetection"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-17
package migrate
import (
"fmt"
"os"
"path/filepath"
"strings"
"go.devnw.com/canary/internal/storage"
)
// DetectOrphans finds all requirements with tokens but no specification
func DetectOrphans(db *storage.DB, rootDir string, excludePaths []string) ([]*OrphanedRequirement, error) {
// Get all tokens from database
tokens, err := db.ListTokens(map[string]string{}, "", "req_id ASC", 0)
if err != nil {
return nil, fmt.Errorf("failed to list tokens: %w", err)
}
// Filter tokens by path exclusions
filteredTokens := []*storage.Token{}
for _, token := range tokens {
if !shouldExcludePath(token.FilePath, excludePaths) {
filteredTokens = append(filteredTokens, token)
}
}
// Group tokens by requirement ID
grouped := groupByRequirement(filteredTokens)
// Find orphans (requirements without specs)
orphans := []*OrphanedRequirement{}
for reqID, reqTokens := range grouped {
if !specExists(rootDir, reqID) {
orphan := createOrphanRequirement(reqID, reqTokens)
orphans = append(orphans, orphan)
}
}
return orphans, nil
}
// DryRun simulates migration without creating files
func DryRun(db *storage.DB, rootDir string, excludePaths []string) (*OrphanPlan, error) {
orphans, err := DetectOrphans(db, rootDir, excludePaths)
if err != nil {
return nil, err
}
plan := &OrphanPlan{
Orphans: orphans,
TotalOrphans: len(orphans),
Excluded: excludePaths,
}
return plan, nil
}
// CalculateConfidence determines confidence level based on orphan characteristics
func CalculateConfidence(orphan *OrphanedRequirement) string {
score := 0
// Feature count scoring
if orphan.FeatureCount >= 5 {
score += 3
} else if orphan.FeatureCount >= 3 {
score += 2
} else if orphan.FeatureCount >= 2 {
score += 1
}
// Test coverage scoring
hasTests := false
hasBenchmarks := false
for _, token := range orphan.Features {
if token.Test != "" {
hasTests = true
}
if token.Bench != "" {
hasBenchmarks = true
}
}
if hasTests {
score += 2
}
if hasBenchmarks {
score += 1
}
// Status progression scoring
hasImpl := false
hasTested := false
hasBenched := false
for _, token := range orphan.Features {
if token.Status == "IMPL" {
hasImpl = true
}
if token.Status == "TESTED" {
hasTested = true
}
if token.Status == "BENCHED" {
hasBenched = true
}
}
if hasBenched {
score += 2
} else if hasTested {
score += 1
} else if hasImpl {
score += 0
}
// Convert score to confidence level
if score >= 5 {
return ConfidenceHigh
} else if score >= 2 {
return ConfidenceMedium
}
return ConfidenceLow
}
// shouldExcludePath checks if a file path should be excluded
func shouldExcludePath(filePath string, excludePaths []string) bool {
for _, exclude := range excludePaths {
// Remove leading/trailing slashes for consistent matching
exclude = strings.Trim(exclude, "/")
// Check if path contains the exclusion pattern
if strings.Contains(filePath, exclude+"/") || strings.HasPrefix(filePath, exclude+"/") {
return true
}
}
return false
}
// groupByRequirement groups tokens by requirement ID
func groupByRequirement(tokens []*storage.Token) map[string][]*storage.Token {
grouped := make(map[string][]*storage.Token)
for _, token := range tokens {
grouped[token.ReqID] = append(grouped[token.ReqID], token)
}
return grouped
}
// specExists checks if a specification exists for the given requirement ID
func specExists(rootDir string, reqID string) bool {
specsDir := filepath.Join(rootDir, ".canary", "specs")
// Read specs directory
entries, err := os.ReadDir(specsDir)
if err != nil {
return false
}
// Look for directory matching reqID pattern (e.g., CBIN-100-*)
prefix := reqID + "-"
for _, entry := range entries {
if entry.IsDir() && strings.HasPrefix(entry.Name(), prefix) {
// Check if spec.md exists in this directory
specPath := filepath.Join(specsDir, entry.Name(), "spec.md")
if _, err := os.Stat(specPath); err == nil {
return true
}
}
}
return false
}
// createOrphanRequirement creates an OrphanedRequirement from tokens
func createOrphanRequirement(reqID string, tokens []*storage.Token) *OrphanedRequirement {
orphan := &OrphanedRequirement{
ReqID: reqID,
Features: tokens,
FeatureCount: len(tokens),
}
// Calculate confidence
orphan.Confidence = CalculateConfidence(orphan)
return orphan
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-145; FEATURE="PlanGeneration"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-17
package migrate
import (
"fmt"
"strings"
"time"
)
// GeneratePlan creates an implementation plan from an orphaned requirement
func GeneratePlan(orphan *OrphanedRequirement) (string, error) {
if len(orphan.Features) == 0 {
return "", fmt.Errorf("cannot generate plan for orphan with no features")
}
today := time.Now().Format("2006-01-02")
primaryFeature := orphan.Features[0].Feature
var plan strings.Builder
// Header
plan.WriteString(fmt.Sprintf("# Implementation Plan: %s\n\n", primaryFeature))
plan.WriteString(fmt.Sprintf("**Requirement ID:** %s\n", orphan.ReqID))
plan.WriteString(fmt.Sprintf("**Generated:** %s (Auto-generated from legacy tokens)\n", today))
plan.WriteString(fmt.Sprintf("**Confidence:** %s\n\n", orphan.Confidence))
if orphan.Confidence == ConfidenceLow || orphan.Confidence == ConfidenceMedium {
plan.WriteString("> **β οΈ MIGRATION NOTICE:** This plan was generated from existing code.\n")
plan.WriteString("> Please review and update to reflect the actual implementation approach.\n\n")
}
// Overview
plan.WriteString("## Overview\n\n")
plan.WriteString(fmt.Sprintf("This plan documents the existing implementation of %s, which was detected in the codebase with %d features.\n\n", primaryFeature, orphan.FeatureCount))
// Current Implementation Status
plan.WriteString("## Current Implementation Status\n\n")
statusCounts := make(map[string]int)
for _, token := range orphan.Features {
statusCounts[token.Status]++
}
plan.WriteString("**Status Breakdown:**\n")
for _, status := range []string{"STUB", "IMPL", "TESTED", "BENCHED"} {
count := statusCounts[status]
if count > 0 {
plan.WriteString(fmt.Sprintf("- %s: %d features\n", status, count))
}
}
plan.WriteString("\n")
// Architecture
plan.WriteString("## Architecture\n\n")
plan.WriteString("**Components:**\n\n")
aspectGroups := make(map[string][]string)
aspectFiles := make(map[string]map[string]bool)
for _, token := range orphan.Features {
aspectGroups[token.Aspect] = append(aspectGroups[token.Aspect], token.Feature)
if aspectFiles[token.Aspect] == nil {
aspectFiles[token.Aspect] = make(map[string]bool)
}
aspectFiles[token.Aspect][token.FilePath] = true
}
for aspect, features := range aspectGroups {
plan.WriteString(fmt.Sprintf("### %s Layer\n", aspect))
plan.WriteString(fmt.Sprintf("**Features:** %d\n", len(features)))
plan.WriteString("**Files:**\n")
for file := range aspectFiles[aspect] {
plan.WriteString(fmt.Sprintf("- `%s`\n", file))
}
plan.WriteString("\n")
}
// Implementation Phases
plan.WriteString("## Implementation Phases\n\n")
plan.WriteString("_Note: These phases reflect the current state of the codebase._\n\n")
// Phase 1: Stubbed features
if statusCounts["STUB"] > 0 {
plan.WriteString("### Phase 1: Stub Features (TODO)\n\n")
plan.WriteString("The following features are stubbed and need implementation:\n\n")
for _, token := range orphan.Features {
if token.Status == "STUB" {
plan.WriteString(fmt.Sprintf("- **%s** (%s) - `%s:%d`\n", token.Feature, token.Aspect, token.FilePath, token.LineNumber))
}
}
plan.WriteString("\n")
}
// Phase 2: Implemented features
if statusCounts["IMPL"] > 0 {
plan.WriteString("### Phase 2: Implemented Features (COMPLETE)\n\n")
plan.WriteString("The following features are implemented:\n\n")
for _, token := range orphan.Features {
if token.Status == "IMPL" {
tokenStr := fmt.Sprintf("<!-- CANARY: REQ=%s; FEATURE=\"%s\"; ASPECT=%s; STATUS=%s; UPDATED=%s -->",
orphan.ReqID, token.Feature, token.Aspect, token.Status, today)
plan.WriteString(tokenStr + "\n")
plan.WriteString(fmt.Sprintf("- **%s** (%s) - `%s:%d`\n", token.Feature, token.Aspect, token.FilePath, token.LineNumber))
}
}
plan.WriteString("\n")
}
// Phase 3: Tested features
if statusCounts["TESTED"] > 0 {
plan.WriteString("### Phase 3: Tested Features (COMPLETE)\n\n")
plan.WriteString("The following features have tests:\n\n")
for _, token := range orphan.Features {
if token.Status == "TESTED" {
tokenStr := fmt.Sprintf("<!-- CANARY: REQ=%s; FEATURE=\"%s\"; ASPECT=%s; STATUS=%s",
orphan.ReqID, token.Feature, token.Aspect, token.Status)
if token.Test != "" {
tokenStr += fmt.Sprintf("; TEST=%s", token.Test)
}
tokenStr += fmt.Sprintf("; UPDATED=%s -->", today)
plan.WriteString(tokenStr + "\n")
plan.WriteString(fmt.Sprintf("- **%s** (%s) - `%s:%d`\n", token.Feature, token.Aspect, token.FilePath, token.LineNumber))
if token.Test != "" {
plan.WriteString(fmt.Sprintf(" - Test: `%s`\n", token.Test))
}
}
}
plan.WriteString("\n")
}
// Phase 4: Benchmarked features
if statusCounts["BENCHED"] > 0 {
plan.WriteString("### Phase 4: Benchmarked Features (COMPLETE)\n\n")
plan.WriteString("The following features have benchmarks:\n\n")
for _, token := range orphan.Features {
if token.Status == "BENCHED" {
tokenStr := fmt.Sprintf("<!-- CANARY: REQ=%s; FEATURE=\"%s\"; ASPECT=%s; STATUS=%s",
orphan.ReqID, token.Feature, token.Aspect, token.Status)
if token.Bench != "" {
tokenStr += fmt.Sprintf("; BENCH=%s", token.Bench)
}
tokenStr += fmt.Sprintf("; UPDATED=%s -->", today)
plan.WriteString(tokenStr + "\n")
plan.WriteString(fmt.Sprintf("- **%s** (%s) - `%s:%d`\n", token.Feature, token.Aspect, token.FilePath, token.LineNumber))
if token.Bench != "" {
plan.WriteString(fmt.Sprintf(" - Benchmark: `%s`\n", token.Bench))
}
}
}
plan.WriteString("\n")
}
// Testing Strategy
plan.WriteString("## Testing Strategy\n\n")
hasTests := false
hasBenchmarks := false
for _, token := range orphan.Features {
if token.Test != "" {
hasTests = true
}
if token.Bench != "" {
hasBenchmarks = true
}
}
if hasTests {
plan.WriteString("**Existing Tests:**\n")
for _, token := range orphan.Features {
if token.Test != "" {
plan.WriteString(fmt.Sprintf("- `%s`\n", token.Test))
}
}
plan.WriteString("\n")
} else {
plan.WriteString("**Test Coverage:** No tests detected. Consider adding:\n")
plan.WriteString("- Unit tests for core functionality\n")
plan.WriteString("- Integration tests for end-to-end flows\n\n")
}
if hasBenchmarks {
plan.WriteString("**Existing Benchmarks:**\n")
for _, token := range orphan.Features {
if token.Bench != "" {
plan.WriteString(fmt.Sprintf("- `%s`\n", token.Bench))
}
}
plan.WriteString("\n")
}
// Next Steps
plan.WriteString("## Next Steps\n\n")
plan.WriteString("1. Review this auto-generated plan for accuracy\n")
plan.WriteString("2. Update the Overview section with implementation approach\n")
plan.WriteString("3. Document architectural decisions\n")
if statusCounts["STUB"] > 0 {
plan.WriteString(fmt.Sprintf("4. Complete %d stubbed features\n", statusCounts["STUB"]))
}
if !hasTests {
plan.WriteString("4. Add comprehensive test coverage\n")
}
plan.WriteString("5. Update CANARY tokens as implementation progresses\n")
plan.WriteString(fmt.Sprintf("6. Run `canary status %s` to track progress\n\n", orphan.ReqID))
// Constitutional Compliance
plan.WriteString("## Constitutional Compliance\n\n")
plan.WriteString("**Article I (Requirement-First):** β
Specification now exists\n")
plan.WriteString("**Article II (Specification Discipline):** β οΈ Spec needs review\n")
if hasTests {
plan.WriteString("**Article IV (Test-First):** β
Tests exist\n")
} else {
plan.WriteString("**Article IV (Test-First):** β Tests needed\n")
}
plan.WriteString("**Article VII (Documentation Currency):** β
Tokens up to date\n\n")
plan.WriteString("---\n\n")
plan.WriteString("_This plan was auto-generated by `canary migrate` on " + today + "_\n")
return plan.String(), nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-145; FEATURE="SpecGeneration"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-17
package migrate
import (
"fmt"
"strings"
"time"
)
// GenerateSpec creates a specification from an orphaned requirement
func GenerateSpec(orphan *OrphanedRequirement) (string, error) {
if len(orphan.Features) == 0 {
return "", fmt.Errorf("cannot generate spec for orphan with no features")
}
today := time.Now().Format("2006-01-02")
// Extract feature name from first feature
primaryFeature := orphan.Features[0].Feature
featureName := strings.Title(primaryFeature)
var spec strings.Builder
// Header
spec.WriteString(fmt.Sprintf("# Requirement Specification: %s\n\n", featureName))
spec.WriteString(fmt.Sprintf("**Requirement ID:** %s\n", orphan.ReqID))
spec.WriteString(fmt.Sprintf("**Generated:** %s (Auto-generated from legacy tokens)\n", today))
spec.WriteString(fmt.Sprintf("**CONFIDENCE:** %s\n\n", orphan.Confidence))
if orphan.Confidence == ConfidenceLow {
spec.WriteString("> **β οΈ MIGRATION NOTICE:** This specification was automatically generated from existing code tokens.\n")
spec.WriteString("> The confidence level is LOW. Please review and update manually to ensure accuracy.\n\n")
}
// Overview
spec.WriteString("## Overview\n\n")
spec.WriteString(fmt.Sprintf("**Purpose:** This specification was auto-generated from %d existing CANARY tokens found in the codebase.\n\n", orphan.FeatureCount))
spec.WriteString("**Scope:**\n")
spec.WriteString("- Included: Features already implemented in the codebase\n")
spec.WriteString("- Excluded: New features not yet implemented\n\n")
// User Stories (generated from features)
spec.WriteString("## User Stories\n\n")
spec.WriteString("### Primary User Stories\n\n")
spec.WriteString("**US-1: Existing Implementation**\n")
spec.WriteString("As a developer,\n")
spec.WriteString(fmt.Sprintf("I want to have a specification for %s,\n", featureName))
spec.WriteString("So that the existing implementation is properly documented.\n\n")
spec.WriteString("**Acceptance Criteria:**\n")
spec.WriteString("- [x] Implementation exists in codebase\n")
spec.WriteString("- [ ] Specification updated with actual functionality\n")
spec.WriteString("- [ ] Tests cover all features\n\n")
// Functional Requirements (from features)
spec.WriteString("## Functional Requirements\n\n")
// Group features by aspect
aspectGroups := make(map[string][]string)
for _, token := range orphan.Features {
aspectGroups[token.Aspect] = append(aspectGroups[token.Aspect], token.Feature)
}
frNum := 1
for aspect, features := range aspectGroups {
spec.WriteString(fmt.Sprintf("### FR-%d: %s Implementation\n", frNum, aspect))
spec.WriteString("**Priority:** Medium\n")
spec.WriteString(fmt.Sprintf("**Description:** Implements %s aspect with the following features:\n", aspect))
for _, feature := range features {
spec.WriteString(fmt.Sprintf("- %s\n", feature))
}
spec.WriteString("**Acceptance:** Existing implementation validated and tests pass\n\n")
frNum++
}
// Success Criteria
spec.WriteString("## Success Criteria\n\n")
spec.WriteString("**Quantitative Metrics:**\n")
spec.WriteString("- [x] All existing features have CANARY tokens\n")
spec.WriteString("- [ ] All features have unit tests\n")
spec.WriteString("- [ ] Code coverage > 80%\n\n")
// Test Scenarios
spec.WriteString("## User Scenarios & Testing\n\n")
spec.WriteString("### Scenario 1: Feature Usage\n")
spec.WriteString("**Given:** The system is running\n")
spec.WriteString("**When:** User invokes the feature\n")
spec.WriteString("**Then:** Feature operates as implemented\n\n")
spec.WriteString("_Note: Update scenarios based on actual feature behavior_\n\n")
// Dependencies
spec.WriteString("## Dependencies\n\n")
spec.WriteString("- Existing codebase implementation\n")
spec.WriteString("- Current system architecture\n\n")
// Implementation Checklist (from actual tokens)
spec.WriteString("## Implementation Checklist\n\n")
spec.WriteString("The following features were detected in the codebase:\n\n")
for _, token := range orphan.Features {
// Generate CANARY token
tokenStr := fmt.Sprintf("<!-- CANARY: REQ=%s; FEATURE=\"%s\"; ASPECT=%s; STATUS=%s",
orphan.ReqID, token.Feature, token.Aspect, token.Status)
if token.Test != "" {
tokenStr += fmt.Sprintf("; TEST=%s", token.Test)
}
if token.Bench != "" {
tokenStr += fmt.Sprintf("; BENCH=%s", token.Bench)
}
tokenStr += fmt.Sprintf("; UPDATED=%s -->", today)
spec.WriteString(tokenStr + "\n")
spec.WriteString(fmt.Sprintf("**%s (%s)**\n", token.Feature, token.Aspect))
spec.WriteString(fmt.Sprintf("- [%s] Status: %s\n", getCheckbox(token.Status), token.Status))
spec.WriteString(fmt.Sprintf("- **Location:** %s:%d\n", token.FilePath, token.LineNumber))
if token.Test != "" {
spec.WriteString(fmt.Sprintf("- **Test:** %s\n", token.Test))
}
if token.Bench != "" {
spec.WriteString(fmt.Sprintf("- **Benchmark:** %s\n", token.Bench))
}
spec.WriteString("\n")
}
// Review Checklist
spec.WriteString("---\n\n")
spec.WriteString("## Review & Update Checklist\n\n")
spec.WriteString("**Post-Migration Tasks:**\n")
spec.WriteString("- [ ] Review and update Overview section with actual purpose\n")
spec.WriteString("- [ ] Add detailed user stories based on actual functionality\n")
spec.WriteString("- [ ] Update functional requirements with specific details\n")
spec.WriteString("- [ ] Define proper success criteria\n")
spec.WriteString("- [ ] Add realistic test scenarios\n")
spec.WriteString("- [ ] Document dependencies and constraints\n")
spec.WriteString("- [ ] Remove this migration notice\n\n")
spec.WriteString("---\n\n")
spec.WriteString("_This specification was auto-generated by `canary migrate` on " + today + "_\n")
return spec.String(), nil
}
// getCheckbox returns "x" for completed statuses, " " for incomplete
func getCheckbox(status string) string {
if status == "IMPL" || status == "TESTED" || status == "BENCHED" {
return "x"
}
return " "
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-139; FEATURE="AspectIDGenerator"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-16
package reqid
import (
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
)
// GenerateNextID generates the next requirement ID for a given aspect
func GenerateNextID(key, aspect string) (string, error) {
// Validate aspect
if err := ValidateAspect(aspect); err != nil {
return "", err
}
// Normalize aspect to canonical casing
aspect = NormalizeAspect(aspect)
// Find .canary/specs directory
specsDir := filepath.Join(".canary", "specs")
if _, err := os.Stat(specsDir); os.IsNotExist(err) {
return "", fmt.Errorf("specs directory not found: %s", specsDir)
}
// Read all spec directories
entries, err := os.ReadDir(specsDir)
if err != nil {
return "", fmt.Errorf("failed to read specs directory: %w", err)
}
// Find maximum ID for this aspect
maxID := 0
prefix := fmt.Sprintf("%s-%s-", key, aspect)
for _, entry := range entries {
if !entry.IsDir() {
continue
}
name := entry.Name()
// Check if this directory matches our aspect (case-insensitive prefix match)
if !strings.HasPrefix(strings.ToLower(name), strings.ToLower(prefix)) {
continue
}
// Extract ID from directory name: CBIN-CLI-001-feature-name
// Remove prefix to get: 001-feature-name
remainder := name[len(prefix):]
// Split by '-' and take first part (the ID)
parts := strings.SplitN(remainder, "-", 2)
if len(parts) == 0 {
continue
}
// Parse ID as integer
idStr := parts[0]
id, err := strconv.Atoi(idStr)
if err != nil {
// Skip if not a valid integer
continue
}
if id > maxID {
maxID = id
}
}
// Generate next ID
nextID := maxID + 1
// Format as 3-digit zero-padded string
return fmt.Sprintf("%s-%s-%03d", key, aspect, nextID), nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-139; FEATURE="AspectIDParser"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-16
package reqid
import (
"fmt"
"regexp"
)
// RequirementID represents a parsed requirement identifier
type RequirementID struct {
Key string // e.g., "CBIN"
Aspect string // e.g., "CLI", "API" (empty for v1 format)
ID string // e.g., "001", "042"
Format string // "v1" (CBIN-XXX) or "v2" (CBIN-<ASPECT>-XXX)
}
var (
// Pattern for new format: CBIN-CLI-001
v2Pattern = regexp.MustCompile(`^([A-Z]+)-([A-Za-z]+)-(\d{3})$`)
// Pattern for old format: CBIN-001
v1Pattern = regexp.MustCompile(`^([A-Z]+)-(\d{3})$`)
)
// ParseRequirementID parses a requirement ID string into its components
func ParseRequirementID(reqID string) (*RequirementID, error) {
// Try new format first (v2)
if matches := v2Pattern.FindStringSubmatch(reqID); matches != nil {
aspect := matches[2]
// Validate aspect
if err := ValidateAspect(aspect); err != nil {
return nil, fmt.Errorf("invalid aspect %q: %w", aspect, err)
}
return &RequirementID{
Key: matches[1],
Aspect: aspect,
ID: matches[3],
Format: "v2",
}, nil
}
// Try old format (v1) for backward compatibility
if matches := v1Pattern.FindStringSubmatch(reqID); matches != nil {
return &RequirementID{
Key: matches[1],
ID: matches[2],
Format: "v1",
}, nil
}
return nil, fmt.Errorf("invalid requirement ID format: %q", reqID)
}
// String returns the string representation of the RequirementID
func (r *RequirementID) String() string {
if r.Format == "v2" && r.Aspect != "" {
return fmt.Sprintf("%s-%s-%s", r.Key, r.Aspect, r.ID)
}
return fmt.Sprintf("%s-%s", r.Key, r.ID)
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-139; FEATURE="AspectValidator"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-16
package reqid
import (
"fmt"
"strings"
"go.devnw.com/canary/internal/matcher"
)
// validAspects defines all valid aspect values with their canonical casing
var validAspects = []string{
"API",
"CLI",
"Engine",
"Storage",
"Security",
"Docs",
"Wire",
"Planner",
"Decode",
"Encode",
"RoundTrip",
"Bench",
"FrontEnd",
"Dist",
}
// ValidateAspect checks if the given aspect is valid
// Accepts: exact canonical form, all-lowercase, or all-uppercase
// Rejects: partial capitalizations (e.g., "Frontend" when it should be "FrontEnd")
func ValidateAspect(aspect string) error {
if aspect == "" {
return fmt.Errorf("aspect cannot be empty")
}
// Check for valid match
for _, valid := range validAspects {
// Exact match (canonical form)
if aspect == valid {
return nil
}
// All lowercase match
if aspect == strings.ToLower(valid) {
return nil
}
// All uppercase match
if aspect == strings.ToUpper(valid) {
return nil
}
}
// Not valid, provide suggestion
suggestion := SuggestAspect(aspect)
if suggestion != "" {
return fmt.Errorf("invalid aspect %q, did you mean: %s", aspect, suggestion)
}
return fmt.Errorf("invalid aspect %q", aspect)
}
// SuggestAspect returns fuzzy match suggestions for invalid aspects
func SuggestAspect(typo string) string {
if typo == "" {
return ""
}
bestScore := 0
bestMatch := ""
// Find best fuzzy match
for _, valid := range validAspects {
score := matcher.ScoreMatch(typo, valid)
if score > bestScore {
bestScore = score
bestMatch = valid
}
}
// Only suggest if score is reasonable (60+ is a decent match)
if bestScore >= 60 {
return bestMatch
}
return ""
}
// NormalizeAspect normalizes aspect casing to the canonical form
func NormalizeAspect(input string) string {
if input == "" {
return input
}
// Find case-insensitive match and return canonical form
for _, valid := range validAspects {
if strings.EqualFold(input, valid) {
return valid
}
}
// Not found, return as-is
return input
}
package specs
import (
"fmt"
"strings"
)
// CANARY: REQ=CBIN-147; FEATURE="GraphGenerator"; ASPECT=Engine; STATUS=TESTED; TEST=TestBuildGraphFromSpecs,TestGetTransitiveDependencies,TestFormatASCIITree; UPDATED=2025-10-18
// SpecLoader is an interface for loading dependencies from spec files.
type SpecLoader interface {
// LoadDependencies loads all dependencies for a given requirement ID
LoadDependencies(reqID string) ([]Dependency, error)
}
// StatusChecker interface for checking dependency satisfaction
type StatusCheckerInterface interface {
// IsDependencySatisfied checks if a dependency is satisfied
IsDependencySatisfied(dep Dependency) bool
}
// GraphGenerator builds and visualizes dependency graphs.
type GraphGenerator struct {
specLoader SpecLoader
statusChecker StatusCheckerInterface
}
// NewGraphGenerator creates a new graph generator.
func NewGraphGenerator(loader SpecLoader) *GraphGenerator {
return &GraphGenerator{
specLoader: loader,
statusChecker: nil, // Optional
}
}
// SetStatusChecker configures the generator to show dependency status in visualizations.
func (gg *GraphGenerator) SetStatusChecker(checker StatusCheckerInterface) {
gg.statusChecker = checker
}
// BuildGraph builds a complete dependency graph from a list of requirement IDs.
// It loads dependencies for each requirement and constructs the full graph.
func (gg *GraphGenerator) BuildGraph(reqIDs []string) (*DependencyGraph, error) {
graph := NewDependencyGraph()
if gg.specLoader == nil {
return graph, fmt.Errorf("spec loader not configured")
}
// Load dependencies for each requirement
for _, reqID := range reqIDs {
deps, err := gg.specLoader.LoadDependencies(reqID)
if err != nil {
// Skip requirements that can't be loaded (may not have deps)
continue
}
for _, dep := range deps {
graph.AddDependency(dep)
}
}
return graph, nil
}
// GetTransitiveDependencies returns all transitive dependencies of a requirement.
// Uses BFS to traverse the dependency graph and collect all reachable requirements.
func (gg *GraphGenerator) GetTransitiveDependencies(graph *DependencyGraph, reqID string) []string {
visited := make(map[string]bool)
resultSet := make(map[string]bool) // Use set to avoid duplicates
queue := []string{reqID}
for len(queue) > 0 {
current := queue[0]
queue = queue[1:]
if visited[current] {
continue
}
visited[current] = true
// Get direct dependencies
deps := graph.GetDependencies(current)
for _, dep := range deps {
// Add to result set (avoids duplicates)
resultSet[dep.Target] = true
if !visited[dep.Target] {
queue = append(queue, dep.Target)
}
}
}
// Convert set to slice
result := make([]string, 0, len(resultSet))
for reqID := range resultSet {
result = append(result, reqID)
}
return result
}
// GetDependencyDepth returns the maximum depth of the dependency tree.
// Depth is the longest path from the root to any leaf node.
func (gg *GraphGenerator) GetDependencyDepth(graph *DependencyGraph, reqID string) int {
visited := make(map[string]bool)
return gg.getDependencyDepthRecursive(graph, reqID, visited)
}
func (gg *GraphGenerator) getDependencyDepthRecursive(graph *DependencyGraph, reqID string, visited map[string]bool) int {
if visited[reqID] {
return 0 // Avoid cycles
}
visited[reqID] = true
deps := graph.GetDependencies(reqID)
if len(deps) == 0 {
return 0
}
maxDepth := 0
for _, dep := range deps {
depth := gg.getDependencyDepthRecursive(graph, dep.Target, visited)
if depth > maxDepth {
maxDepth = depth
}
}
delete(visited, reqID) // Allow revisiting in other branches
return maxDepth + 1
}
// FormatASCIITree generates an ASCII tree visualization of the dependency graph.
// Shows the structure with Unicode box-drawing characters and optional status indicators.
//
// Example output:
// CBIN-147 (Specification Dependencies)
// βββ CBIN-146 (Multi-Project Support) β
// β βββ CBIN-129 (Migrations) β
// βββ CBIN-145 (Legacy Migration) β
func (gg *GraphGenerator) FormatASCIITree(graph *DependencyGraph, rootReqID string) string {
deps := graph.GetDependencies(rootReqID)
if len(deps) == 0 {
return fmt.Sprintf("%s\n No dependencies", rootReqID)
}
var lines []string
lines = append(lines, rootReqID)
visited := make(map[string]bool)
gg.formatTreeRecursive(graph, deps, "", true, &lines, visited)
return strings.Join(lines, "\n")
}
func (gg *GraphGenerator) formatTreeRecursive(graph *DependencyGraph, deps []Dependency, prefix string, isLast bool, lines *[]string, visited map[string]bool) {
for i, dep := range deps {
isLastDep := i == len(deps)-1
// Build the tree characters
var connector, childPrefix string
if isLastDep {
connector = "βββ "
childPrefix = prefix + " "
} else {
connector = "βββ "
childPrefix = prefix + "β "
}
// Format the dependency line
line := prefix + connector + dep.Target
// Add type annotation if not full
if dep.Type == DependencyTypePartialFeatures {
line += fmt.Sprintf(":%s", strings.Join(dep.RequiredFeatures, ","))
} else if dep.Type == DependencyTypePartialAspect {
line += fmt.Sprintf(":%s", dep.RequiredAspect)
}
// Add status indicator if checker is configured
if gg.statusChecker != nil {
if gg.statusChecker.IsDependencySatisfied(dep) {
line += " β
"
} else {
line += " β"
}
}
*lines = append(*lines, line)
// Recursively process children (avoid cycles)
if !visited[dep.Target] {
visited[dep.Target] = true
childDeps := graph.GetDependencies(dep.Target)
if len(childDeps) > 0 {
gg.formatTreeRecursive(graph, childDeps, childPrefix, isLastDep, lines, visited)
}
}
}
}
// FormatDependencyChain formats a list of requirement IDs as a chain.
// Example: "CBIN-147 β CBIN-146 β CBIN-129"
func (gg *GraphGenerator) FormatDependencyChain(reqIDs []string) string {
return strings.Join(reqIDs, " β ")
}
// FormatCompactList formats dependencies as a compact comma-separated list.
// Example: "CBIN-146, CBIN-145, CBIN-129"
func (gg *GraphGenerator) FormatCompactList(graph *DependencyGraph, reqID string) string {
deps := graph.GetDependencies(reqID)
if len(deps) == 0 {
return "none"
}
targets := make([]string, len(deps))
for i, dep := range deps {
targets[i] = dep.Target
}
return strings.Join(targets, ", ")
}
// FormatDependencySummary generates a multi-line summary of dependencies.
// Includes direct dependencies, transitive count, and depth.
func (gg *GraphGenerator) FormatDependencySummary(graph *DependencyGraph, reqID string) string {
directDeps := graph.GetDependencies(reqID)
transitiveDeps := gg.GetTransitiveDependencies(graph, reqID)
depth := gg.GetDependencyDepth(graph, reqID)
var lines []string
lines = append(lines, fmt.Sprintf("Dependency Summary for %s:", reqID))
lines = append(lines, fmt.Sprintf(" Direct dependencies: %d", len(directDeps)))
lines = append(lines, fmt.Sprintf(" Transitive dependencies: %d", len(transitiveDeps)))
lines = append(lines, fmt.Sprintf(" Maximum depth: %d", depth))
if len(directDeps) > 0 {
lines = append(lines, "\nDirect dependencies:")
for _, dep := range directDeps {
line := fmt.Sprintf(" - %s", dep.Target)
if dep.Type == DependencyTypePartialFeatures {
line += fmt.Sprintf(" (features: %s)", strings.Join(dep.RequiredFeatures, ", "))
} else if dep.Type == DependencyTypePartialAspect {
line += fmt.Sprintf(" (aspect: %s)", dep.RequiredAspect)
}
lines = append(lines, line)
}
}
return strings.Join(lines, "\n")
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-134; FEATURE="ExactIDLookup"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-16
package specs
import (
"fmt"
"os"
"path/filepath"
"go.devnw.com/canary/internal/matcher"
"go.devnw.com/canary/internal/storage"
)
// FindSpecByID locates spec.md file by exact requirement ID
// Uses glob pattern: .canary/specs/CBIN-XXX-*/spec.md
func FindSpecByID(reqID string) (string, error) {
if reqID == "" {
return "", fmt.Errorf("requirement ID cannot be empty")
}
specsDir := ".canary/specs"
pattern := filepath.Join(specsDir, reqID+"-*", "spec.md")
matches, err := filepath.Glob(pattern)
if err != nil {
return "", fmt.Errorf("glob pattern error: %w", err)
}
if len(matches) == 0 {
return "", fmt.Errorf("spec not found for %s", reqID)
}
if len(matches) > 1 {
return "", fmt.Errorf("multiple specs found for %s (ambiguous)", reqID)
}
return matches[0], nil
}
// FindSpecBySearch performs fuzzy search across spec directories
// Reuses CBIN-133 fuzzy matcher for scoring and ranking
func FindSpecBySearch(query string, limit int) ([]matcher.Match, error) {
specsDir := ".canary/specs"
// Check if specs directory exists
if _, err := os.Stat(specsDir); err != nil {
if os.IsNotExist(err) {
return nil, fmt.Errorf("specs directory not found: %s", specsDir)
}
return nil, fmt.Errorf("failed to access specs directory: %w", err)
}
// Use existing fuzzy matcher from CBIN-133
matches, err := matcher.FindBestMatches(query, specsDir, limit)
if err != nil {
return nil, fmt.Errorf("fuzzy search failed: %w", err)
}
return matches, nil
}
// FindSpecInDB queries database for fast spec lookup (optional fallback)
// If database is unavailable, returns error and caller should use FindSpecByID
func FindSpecInDB(db *storage.DB, reqID string) (string, error) {
if db == nil {
return "", fmt.Errorf("database not available")
}
if reqID == "" {
return "", fmt.Errorf("requirement ID cannot be empty")
}
// Query database for tokens matching the requirement ID
// Note: This depends on CBIN-123 TokenStorage implementation
tokens, err := db.GetTokensByReqID(reqID)
if err != nil {
return "", fmt.Errorf("database query failed: %w", err)
}
if len(tokens) == 0 {
return "", fmt.Errorf("spec not found in database: %s", reqID)
}
// Try to find spec.md file based on spec directory pattern
specPattern := fmt.Sprintf(".canary/specs/%s-*/spec.md", reqID)
matches, err := filepath.Glob(specPattern)
if err != nil {
return "", fmt.Errorf("glob pattern error: %w", err)
}
if len(matches) == 0 {
return "", fmt.Errorf("spec file not found for %s (database entry exists but no spec.md)", reqID)
}
return matches[0], nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-134; FEATURE="SectionLoader"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-16
package specs
import (
"fmt"
"strings"
)
// ParseSections extracts specific sections from markdown content
// If sections is empty, returns full content
// Preserves metadata at top (lines before first ##)
// Section names are case-insensitive
func ParseSections(content string, sections []string) (string, error) {
if len(sections) == 0 {
return content, nil // Return full content
}
lines := strings.Split(content, "\n")
var result strings.Builder
var capturing bool
var capturedAny bool
// Always include metadata at top (lines before first ## header)
for _, line := range lines {
if strings.HasPrefix(line, "##") {
break // Stop at first section header
}
result.WriteString(line + "\n")
}
// Process sections
for _, line := range lines {
if strings.HasPrefix(line, "## ") {
// Extract section name from header
section := strings.TrimPrefix(line, "## ")
section = strings.ToLower(strings.TrimSpace(section))
// Check if this section should be captured (case-insensitive match)
capturing = false
for _, requestedSection := range sections {
requestedLower := strings.ToLower(requestedSection)
// Match if section name contains the requested text
// This allows "user stories" to match "## User Stories"
if strings.Contains(section, requestedLower) || section == requestedLower {
capturing = true
capturedAny = true
break
}
}
}
if capturing {
result.WriteString(line + "\n")
}
}
if !capturedAny {
return "", fmt.Errorf("no matching sections found for: %v", sections)
}
return result.String(), nil
}
// ListSections returns all section headers from markdown content
// Extracts all ## level headers
func ListSections(content string) ([]string, error) {
var sections []string
lines := strings.Split(content, "\n")
for _, line := range lines {
if strings.HasPrefix(line, "## ") {
section := strings.TrimPrefix(line, "## ")
sections = append(sections, strings.TrimSpace(section))
}
}
return sections, nil
}
package specs
import (
"bufio"
"fmt"
"io"
"os"
"regexp"
"strings"
)
// CANARY: REQ=CBIN-147; FEATURE="DependencyParser"; ASPECT=Engine; STATUS=TESTED; TEST=TestParseDependencies_FullDependency,TestParseDependencies_PartialFeatures,TestParseDependencies_PartialAspect,TestParseDependencies_MixedTypes; UPDATED=2025-10-18
var (
// Regex patterns for parsing dependency lines
// Format: "- CBIN-123 (Description)" for full dependencies
// Format: "- CBIN-123:Feature1,Feature2 (Description)" for partial feature dependencies
// Format: "- CBIN-123:AspectName (Description)" for partial aspect dependencies
fullDependencyPattern = regexp.MustCompile(`^-\s+(CBIN-\d+)\s*(?:\(([^)]+)\))?`)
partialDependencyPattern = regexp.MustCompile(`^-\s+(CBIN-\d+):([^(\s]+)\s*(?:\(([^)]+)\))?`)
)
// ParseDependenciesFromFile reads a spec.md file and extracts all dependencies.
// Returns a slice of Dependency objects or an error if the file cannot be read.
func ParseDependenciesFromFile(sourceReqID, specPath string) ([]Dependency, error) {
file, err := os.Open(specPath)
if err != nil {
return nil, fmt.Errorf("failed to open spec file %s: %w", specPath, err)
}
defer file.Close()
return ParseDependencies(sourceReqID, file)
}
// ParseDependencies parses dependency declarations from a spec.md file reader.
// It looks for the "## Dependencies" section and extracts all dependency declarations.
//
// Supported formats:
// - Full: "- CBIN-123 (Description)"
// - Partial Features: "- CBIN-123:Feature1,Feature2 (Description)"
// - Partial Aspect: "- CBIN-123:AspectName (Description)"
//
// Returns a slice of Dependency objects. Returns empty slice if no dependencies found.
func ParseDependencies(sourceReqID string, reader io.Reader) ([]Dependency, error) {
var dependencies []Dependency
scanner := bufio.NewScanner(reader)
inDependencySection := false
for scanner.Scan() {
line := scanner.Text()
trimmedLine := strings.TrimSpace(line)
// Check if we've entered the Dependencies section
if strings.HasPrefix(trimmedLine, "## Dependencies") {
inDependencySection = true
continue
}
// Exit Dependencies section if we hit another ## section
if inDependencySection && strings.HasPrefix(trimmedLine, "## ") {
break
}
// Track subsections (### Full Dependencies, ### Partial Dependencies)
// These are informational headers, we can skip them
if inDependencySection && strings.HasPrefix(trimmedLine, "###") {
continue
}
// Skip empty lines and non-list items
if !inDependencySection || trimmedLine == "" || !strings.HasPrefix(trimmedLine, "-") {
continue
}
// Try to parse as partial dependency first (has colon)
if matches := partialDependencyPattern.FindStringSubmatch(trimmedLine); matches != nil {
reqID := matches[1]
featuresOrAspect := matches[2]
description := ""
if len(matches) > 3 {
description = strings.TrimSpace(matches[3])
}
dep := parseDependency(sourceReqID, reqID, featuresOrAspect, description)
dependencies = append(dependencies, dep)
continue
}
// Try to parse as full dependency (no colon)
if matches := fullDependencyPattern.FindStringSubmatch(trimmedLine); matches != nil {
reqID := matches[1]
description := ""
if len(matches) > 2 {
description = strings.TrimSpace(matches[2])
}
dependencies = append(dependencies, Dependency{
Source: sourceReqID,
Target: reqID,
Type: DependencyTypeFull,
Description: description,
})
}
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("error reading spec file: %w", err)
}
return dependencies, nil
}
// parseDependency determines whether a partial dependency is for specific features
// or an entire aspect based on the content after the colon.
//
// Logic:
// - If contains comma: PartialFeatures (e.g., "Feature1,Feature2")
// - If single capitalized word matching known aspect: PartialAspect (e.g., "Storage", "Engine")
// - Otherwise: PartialFeatures with single feature
func parseDependency(sourceReqID, targetReqID, featuresOrAspect, description string) Dependency {
// Check if it contains multiple features (comma-separated)
if strings.Contains(featuresOrAspect, ",") {
features := strings.Split(featuresOrAspect, ",")
for i := range features {
features[i] = strings.TrimSpace(features[i])
}
return Dependency{
Source: sourceReqID,
Target: targetReqID,
Type: DependencyTypePartialFeatures,
RequiredFeatures: features,
Description: description,
}
}
// Check if it's a known aspect
featuresOrAspect = strings.TrimSpace(featuresOrAspect)
if isKnownAspect(featuresOrAspect) {
return Dependency{
Source: sourceReqID,
Target: targetReqID,
Type: DependencyTypePartialAspect,
RequiredAspect: featuresOrAspect,
Description: description,
}
}
// Default to single feature dependency
return Dependency{
Source: sourceReqID,
Target: targetReqID,
Type: DependencyTypePartialFeatures,
RequiredFeatures: []string{featuresOrAspect},
Description: description,
}
}
// isKnownAspect checks if a string matches a known CANARY aspect name.
// Valid aspects from spec: API, CLI, Engine, Storage, Security, Docs, Wire,
// Planner, Decode, Encode, RoundTrip, Bench, FrontEnd, Dist
func isKnownAspect(s string) bool {
knownAspects := map[string]bool{
"API": true,
"CLI": true,
"Engine": true,
"Storage": true,
"Security": true,
"Docs": true,
"Wire": true,
"Planner": true,
"Decode": true,
"Encode": true,
"RoundTrip": true,
"Bench": true,
"FrontEnd": true,
"Dist": true,
}
return knownAspects[s]
}
package specs
import (
"fmt"
"strings"
)
// CANARY: REQ=CBIN-147; FEATURE="StatusChecker"; ASPECT=Engine; STATUS=TESTED; TEST=TestCheckDependencyStatus_FullSatisfied,TestCheckDependencyStatus_FullBlocked,TestCheckDependencyStatus_PartialFeaturesSatisfied,TestCheckDependencyStatus_PartialFeaturesBlocked; UPDATED=2025-10-18
// TokenProvider is an interface for retrieving CANARY tokens from storage.
// This allows the status checker to query token status without tight coupling to storage.
type TokenProvider interface {
// GetTokensByReqID returns all CANARY tokens for a given requirement ID
GetTokensByReqID(reqID string) []TokenInfo
}
// StatusChecker checks whether dependencies are satisfied based on CANARY token status.
type StatusChecker struct {
tokenProvider TokenProvider
}
// NewStatusChecker creates a new status checker with the given token provider.
func NewStatusChecker(provider TokenProvider) *StatusChecker {
return &StatusChecker{
tokenProvider: provider,
}
}
// CheckDependency checks whether a single dependency is satisfied.
// Satisfaction rules:
// - Full: All features of target requirement must be TESTED or BENCHED
// - PartialFeatures: All RequiredFeatures must be TESTED or BENCHED
// - PartialAspect: All features of RequiredAspect must be TESTED or BENCHED
//
// IMPL status is NOT sufficient - dependencies require tests.
func (sc *StatusChecker) CheckDependency(dep Dependency) DependencyStatus {
// Get all tokens for target requirement
tokens := sc.tokenProvider.GetTokensByReqID(dep.Target)
// Check if requirement exists
if len(tokens) == 0 {
return DependencyStatus{
Dependency: dep,
IsSatisfied: false,
Blocking: true,
Message: fmt.Sprintf("Requirement %s not found or has no tokens", dep.Target),
CurrentStatus: "MISSING",
}
}
switch dep.Type {
case DependencyTypeFull:
return sc.checkFullDependency(dep, tokens)
case DependencyTypePartialFeatures:
return sc.checkPartialFeaturesDependency(dep, tokens)
case DependencyTypePartialAspect:
return sc.checkPartialAspectDependency(dep, tokens)
default:
return DependencyStatus{
Dependency: dep,
IsSatisfied: false,
Blocking: true,
Message: fmt.Sprintf("Unknown dependency type: %v", dep.Type),
}
}
}
// checkFullDependency verifies all features are TESTED or BENCHED.
func (sc *StatusChecker) checkFullDependency(dep Dependency, tokens []TokenInfo) DependencyStatus {
var missingFeatures []string
allStatuses := make(map[string]string) // feature -> status
for _, token := range tokens {
allStatuses[token.Feature] = token.Status
if !isStatusSatisfied(token.Status) {
missingFeatures = append(missingFeatures, token.Feature)
}
}
if len(missingFeatures) == 0 {
return DependencyStatus{
Dependency: dep,
IsSatisfied: true,
Blocking: false,
Message: fmt.Sprintf("All features of %s are satisfied", dep.Target),
CurrentStatus: "SATISFIED",
}
}
return DependencyStatus{
Dependency: dep,
IsSatisfied: false,
Blocking: true,
Message: fmt.Sprintf("%s has %d feature(s) not yet TESTED/BENCHED", dep.Target, len(missingFeatures)),
MissingFeatures: missingFeatures,
CurrentStatus: "PARTIAL",
}
}
// checkPartialFeaturesDependency verifies only the required features are TESTED or BENCHED.
func (sc *StatusChecker) checkPartialFeaturesDependency(dep Dependency, tokens []TokenInfo) DependencyStatus {
// Build map of feature -> status
featureStatus := make(map[string]string)
for _, token := range tokens {
featureStatus[token.Feature] = token.Status
}
var missingFeatures []string
for _, requiredFeature := range dep.RequiredFeatures {
status, exists := featureStatus[requiredFeature]
if !exists {
missingFeatures = append(missingFeatures, requiredFeature)
continue
}
if !isStatusSatisfied(status) {
missingFeatures = append(missingFeatures, requiredFeature)
}
}
if len(missingFeatures) == 0 {
return DependencyStatus{
Dependency: dep,
IsSatisfied: true,
Blocking: false,
Message: fmt.Sprintf("Required features %v of %s are satisfied", dep.RequiredFeatures, dep.Target),
CurrentStatus: "SATISFIED",
}
}
return DependencyStatus{
Dependency: dep,
IsSatisfied: false,
Blocking: true,
Message: fmt.Sprintf("%s missing features: %v", dep.Target, missingFeatures),
MissingFeatures: missingFeatures,
CurrentStatus: "PARTIAL",
}
}
// checkPartialAspectDependency verifies all features of the required aspect are TESTED or BENCHED.
func (sc *StatusChecker) checkPartialAspectDependency(dep Dependency, tokens []TokenInfo) DependencyStatus {
var aspectTokens []TokenInfo
for _, token := range tokens {
if token.Aspect == dep.RequiredAspect {
aspectTokens = append(aspectTokens, token)
}
}
if len(aspectTokens) == 0 {
return DependencyStatus{
Dependency: dep,
IsSatisfied: false,
Blocking: true,
Message: fmt.Sprintf("No features found for aspect %s in %s", dep.RequiredAspect, dep.Target),
CurrentStatus: "MISSING",
}
}
var missingFeatures []string
for _, token := range aspectTokens {
if !isStatusSatisfied(token.Status) {
missingFeatures = append(missingFeatures, token.Feature)
}
}
if len(missingFeatures) == 0 {
return DependencyStatus{
Dependency: dep,
IsSatisfied: true,
Blocking: false,
Message: fmt.Sprintf("All features of aspect %s in %s are satisfied", dep.RequiredAspect, dep.Target),
CurrentStatus: "SATISFIED",
}
}
return DependencyStatus{
Dependency: dep,
IsSatisfied: false,
Blocking: true,
Message: fmt.Sprintf("%s aspect %s has unsatisfied features: %v", dep.Target, dep.RequiredAspect, missingFeatures),
MissingFeatures: missingFeatures,
CurrentStatus: "PARTIAL",
}
}
// CheckAllDependencies checks all dependencies and returns their statuses.
func (sc *StatusChecker) CheckAllDependencies(deps []Dependency) []DependencyStatus {
statuses := make([]DependencyStatus, len(deps))
for i, dep := range deps {
statuses[i] = sc.CheckDependency(dep)
}
return statuses
}
// GetBlockingDependencies returns only the dependencies that are blocking (not satisfied).
func (sc *StatusChecker) GetBlockingDependencies(deps []Dependency) []DependencyStatus {
var blocking []DependencyStatus
for _, dep := range deps {
status := sc.CheckDependency(dep)
if status.Blocking {
blocking = append(blocking, status)
}
}
return blocking
}
// FormatBlockingReport generates a human-readable report of blocking dependencies.
func (sc *StatusChecker) FormatBlockingReport(deps []Dependency) string {
blocking := sc.GetBlockingDependencies(deps)
if len(blocking) == 0 {
return "All dependencies are satisfied β
"
}
var lines []string
lines = append(lines, fmt.Sprintf("β οΈ %d blocking dependencies:", len(blocking)))
for i, status := range blocking {
lines = append(lines, fmt.Sprintf("\n%d. %s -> %s (%s)",
i+1, status.Dependency.Source, status.Dependency.Target, status.Dependency.Type))
lines = append(lines, fmt.Sprintf(" Status: %s", status.Message))
if len(status.MissingFeatures) > 0 {
lines = append(lines, fmt.Sprintf(" Missing: %s", strings.Join(status.MissingFeatures, ", ")))
}
}
return strings.Join(lines, "\n")
}
// isStatusSatisfied returns true if the status satisfies dependency requirements.
// Only TESTED and BENCHED satisfy dependencies. IMPL is insufficient.
func isStatusSatisfied(status string) bool {
return status == "TESTED" || status == "BENCHED"
}
package specs
// CANARY: REQ=CBIN-147; FEATURE="DependencyModel"; ASPECT=Storage; STATUS=TESTED; TEST=TestDependencyCreation,TestPartialFeatureDependency,TestPartialAspectDependency; UPDATED=2025-10-18
// TokenInfo represents a CANARY token from storage.
// This is used by StatusChecker to query token status.
type TokenInfo struct {
ReqID string
Feature string
Aspect string
Status string
}
// DependencyType represents the type of dependency relationship between requirements.
type DependencyType int
const (
// DependencyTypeFull indicates the entire target requirement must be complete
// (all features must be in TESTED or BENCHED status).
DependencyTypeFull DependencyType = iota
// DependencyTypePartialFeatures indicates only specific features of the target
// requirement must be complete.
DependencyTypePartialFeatures
// DependencyTypePartialAspect indicates all features of a specific aspect
// of the target requirement must be complete.
DependencyTypePartialAspect
)
// String returns a human-readable string representation of the DependencyType.
func (dt DependencyType) String() string {
switch dt {
case DependencyTypeFull:
return "Full"
case DependencyTypePartialFeatures:
return "PartialFeatures"
case DependencyTypePartialAspect:
return "PartialAspect"
default:
return "Unknown"
}
}
// Dependency represents a dependency relationship between two requirements.
// It captures the source requirement, target requirement, type of dependency,
// and any specific features or aspects that must be satisfied.
type Dependency struct {
// Source is the requirement ID that has the dependency (e.g., "CBIN-147")
Source string
// Target is the requirement ID being depended upon (e.g., "CBIN-146")
Target string
// Type indicates whether this is a full, partial feature, or aspect dependency
Type DependencyType
// RequiredFeatures lists specific features needed for PartialFeatures dependencies.
// Only populated when Type is DependencyTypePartialFeatures.
RequiredFeatures []string
// RequiredAspect specifies the aspect needed for PartialAspect dependencies.
// Only populated when Type is DependencyTypePartialAspect.
RequiredAspect string
// Description provides human-readable context about why this dependency exists.
// This is optional and used for documentation purposes.
Description string
}
// DependencyStatus represents the current satisfaction status of a dependency.
// It indicates whether the dependency is satisfied and provides contextual information.
type DependencyStatus struct {
// Dependency is the dependency being evaluated
Dependency Dependency
// IsSatisfied indicates whether the dependency requirements are met.
// For Full: All features of target are TESTED or BENCHED
// For PartialFeatures: All RequiredFeatures are TESTED or BENCHED
// For PartialAspect: All features of RequiredAspect are TESTED or BENCHED
IsSatisfied bool
// Blocking indicates whether this unsatisfied dependency blocks implementation.
// Set to true when IsSatisfied is false.
Blocking bool
// Message provides human-readable explanation of the status.
// Examples:
// - "All required features are TESTED"
// - "Waiting for CBIN-146:ProjectRegistry to reach TESTED status (currently IMPL)"
// - "Target requirement CBIN-999 does not exist"
Message string
// MissingFeatures lists features that are not yet in TESTED/BENCHED status.
// Only populated for PartialFeatures dependencies when IsSatisfied is false.
MissingFeatures []string
// CurrentStatus describes the current status of the target requirement or features.
// Used for debugging and reporting.
CurrentStatus string
}
// DependencyGraph represents the complete dependency graph for all requirements.
// It provides methods for querying, traversal, and cycle detection.
type DependencyGraph struct {
// Nodes maps requirement IDs to their list of outgoing dependencies.
// Key: Source requirement ID (e.g., "CBIN-147")
// Value: List of dependencies where this requirement is the source
Nodes map[string][]Dependency
}
// NewDependencyGraph creates a new empty DependencyGraph.
func NewDependencyGraph() *DependencyGraph {
return &DependencyGraph{
Nodes: make(map[string][]Dependency),
}
}
// AddDependency adds a dependency to the graph.
// If the source node doesn't exist, it creates it.
func (dg *DependencyGraph) AddDependency(dep Dependency) {
if dg.Nodes == nil {
dg.Nodes = make(map[string][]Dependency)
}
dg.Nodes[dep.Source] = append(dg.Nodes[dep.Source], dep)
}
// GetDependencies returns all dependencies for a given requirement ID.
// Returns an empty slice if the requirement has no dependencies.
func (dg *DependencyGraph) GetDependencies(reqID string) []Dependency {
if deps, exists := dg.Nodes[reqID]; exists {
return deps
}
return []Dependency{}
}
// GetReverseDependencies returns all requirements that depend on the given requirement ID.
// This answers the question: "What would be blocked if this requirement changes?"
func (dg *DependencyGraph) GetReverseDependencies(reqID string) []Dependency {
var reverseDeps []Dependency
for _, deps := range dg.Nodes {
for _, dep := range deps {
if dep.Target == reqID {
reverseDeps = append(reverseDeps, dep)
}
}
}
return reverseDeps
}
// GetAllRequirements returns all unique requirement IDs in the graph
// (both sources and targets).
func (dg *DependencyGraph) GetAllRequirements() []string {
uniqueReqs := make(map[string]bool)
// Add all source requirements
for reqID := range dg.Nodes {
uniqueReqs[reqID] = true
}
// Add all target requirements
for _, deps := range dg.Nodes {
for _, dep := range deps {
uniqueReqs[dep.Target] = true
}
}
// Convert to slice
reqs := make([]string, 0, len(uniqueReqs))
for reqID := range uniqueReqs {
reqs = append(reqs, reqID)
}
return reqs
}
package specs
import (
"fmt"
"strings"
)
// CANARY: REQ=CBIN-147; FEATURE="DependencyValidator"; ASPECT=Engine; STATUS=TESTED; TEST=TestValidateDependencies_Valid,TestValidateDependencies_SimpleCycle,TestValidateDependencies_ComplexCycle; UPDATED=2025-10-18
// SpecFinder is an interface for finding and checking specification existence.
// This allows validation against the actual filesystem or a mock for testing.
type SpecFinder interface {
// SpecExists checks if a specification exists for the given requirement ID
SpecExists(reqID string) bool
// FindSpecPath returns the path to the spec.md file for a requirement
FindSpecPath(reqID string) (string, error)
}
// ValidationResult contains the results of dependency validation.
type ValidationResult struct {
// IsValid is true if the dependency graph is valid (no cycles, all requirements exist)
IsValid bool
// Cycles contains all detected circular dependencies.
// Each cycle is represented as a slice of requirement IDs forming the cycle.
// Example: ["CBIN-100", "CBIN-101", "CBIN-102", "CBIN-100"]
Cycles [][]string
// MissingRequirements lists requirement IDs that are referenced but don't exist
MissingRequirements []string
// Errors contains human-readable error messages
Errors []string
}
// FormatErrors returns a formatted string containing all validation errors.
func (vr *ValidationResult) FormatErrors() string {
if vr.IsValid {
return "No validation errors"
}
var parts []string
if len(vr.Cycles) > 0 {
parts = append(parts, "Circular dependencies detected:")
for i, cycle := range vr.Cycles {
parts = append(parts, fmt.Sprintf(" Cycle %d: %s", i+1, strings.Join(cycle, " -> ")))
}
}
if len(vr.MissingRequirements) > 0 {
parts = append(parts, "Missing requirements:")
for _, reqID := range vr.MissingRequirements {
parts = append(parts, fmt.Sprintf(" - %s", reqID))
}
}
if len(vr.Errors) > 0 {
parts = append(parts, "Other errors:")
for _, err := range vr.Errors {
parts = append(parts, fmt.Sprintf(" - %s", err))
}
}
return strings.Join(parts, "\n")
}
// DependencyValidator validates dependency graphs for cycles and missing requirements.
type DependencyValidator struct {
graph *DependencyGraph
specFinder SpecFinder
}
// NewDependencyValidator creates a new dependency validator for the given graph.
func NewDependencyValidator(graph *DependencyGraph) *DependencyValidator {
return &DependencyValidator{
graph: graph,
specFinder: nil, // Optional, can be set with SetSpecFinder
}
}
// SetSpecFinder configures the validator to check for missing requirements.
func (dv *DependencyValidator) SetSpecFinder(finder SpecFinder) {
dv.specFinder = finder
}
// Validate performs comprehensive validation of the dependency graph.
// It checks for:
// 1. Circular dependencies (using DFS with recursion stack)
// 2. Missing requirements (if SpecFinder is configured)
func (dv *DependencyValidator) Validate() ValidationResult {
result := ValidationResult{
IsValid: true,
Cycles: [][]string{},
MissingRequirements: []string{},
Errors: []string{},
}
// Detect circular dependencies
cycles := dv.detectCycles()
if len(cycles) > 0 {
result.IsValid = false
result.Cycles = cycles
for _, cycle := range cycles {
result.Errors = append(result.Errors,
fmt.Sprintf("Cycle detected: %s", strings.Join(cycle, " -> ")))
}
}
// Check for missing requirements if SpecFinder is configured
if dv.specFinder != nil {
missing := dv.checkMissingRequirements()
if len(missing) > 0 {
result.IsValid = false
result.MissingRequirements = missing
for _, reqID := range missing {
result.Errors = append(result.Errors,
fmt.Sprintf("Missing requirement: %s", reqID))
}
}
}
return result
}
// detectCycles uses Depth-First Search (DFS) with a recursion stack to detect cycles.
// Algorithm:
// 1. For each unvisited node, start a DFS
// 2. Track visited nodes and recursion stack
// 3. If we visit a node already in the recursion stack, we found a cycle
// 4. Extract the cycle path from the recursion stack
//
// Time Complexity: O(V + E) where V = requirements, E = dependencies
func (dv *DependencyValidator) detectCycles() [][]string {
var cycles [][]string
visited := make(map[string]bool)
recStack := make(map[string]bool)
path := []string{}
// DFS function that detects back edges (cycles)
var dfs func(string) bool
dfs = func(node string) bool {
visited[node] = true
recStack[node] = true
path = append(path, node)
// Explore all dependencies of this node
for _, dep := range dv.graph.GetDependencies(node) {
neighbor := dep.Target
if !visited[neighbor] {
// Continue DFS on unvisited neighbor
if dfs(neighbor) {
return true
}
} else if recStack[neighbor] {
// Found a back edge (cycle)!
// Extract the cycle from path
cycleStart := -1
for i, n := range path {
if n == neighbor {
cycleStart = i
break
}
}
if cycleStart >= 0 {
// Build cycle: path from cycleStart to end + neighbor
cycle := make([]string, len(path)-cycleStart+1)
copy(cycle, path[cycleStart:])
cycle[len(cycle)-1] = neighbor // Close the cycle
cycles = append(cycles, cycle)
}
return true
}
}
// Backtrack
path = path[:len(path)-1]
recStack[node] = false
return false
}
// Run DFS from each unvisited node
for _, node := range dv.graph.GetAllRequirements() {
if !visited[node] {
// Only process nodes that have outgoing edges
if len(dv.graph.GetDependencies(node)) > 0 {
dfs(node)
}
}
}
return cycles
}
// checkMissingRequirements verifies that all target requirements exist.
// Returns a list of requirement IDs that are referenced but don't have specs.
func (dv *DependencyValidator) checkMissingRequirements() []string {
var missing []string
checked := make(map[string]bool)
// Check all targets in the graph
for _, deps := range dv.graph.Nodes {
for _, dep := range deps {
target := dep.Target
// Skip if already checked
if checked[target] {
continue
}
checked[target] = true
// Check if spec exists
if !dv.specFinder.SpecExists(target) {
missing = append(missing, target)
}
}
}
return missing
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-146; FEATURE="ContextManagement"; ASPECT=Engine; STATUS=IMPL; UPDATED=2025-10-18
package storage
import (
"errors"
"fmt"
"os"
"path/filepath"
"strings"
)
// ContextManager manages the current project context
type ContextManager struct {
manager *DatabaseManager
registry *ProjectRegistry
}
// NewContextManager creates a new context manager
func NewContextManager(manager *DatabaseManager) *ContextManager {
return &ContextManager{
manager: manager,
registry: NewProjectRegistry(manager),
}
}
// DetectProject attempts to detect the current project from the working directory
func (cm *ContextManager) DetectProject() (*Project, error) {
// Get current working directory
cwd, err := os.Getwd()
if err != nil {
return nil, fmt.Errorf("get working directory: %w", err)
}
// List all registered projects
projects, err := cm.registry.List()
if err != nil {
return nil, fmt.Errorf("list projects: %w", err)
}
if len(projects) == 0 {
return nil, errors.New("no projects registered")
}
// Find project that matches current path or is a parent of current path
var matchedProject *Project
maxMatchLength := 0
for _, project := range projects {
// Normalize paths for comparison
projectPath := filepath.Clean(project.Path)
currentPath := filepath.Clean(cwd)
// Check if current path is under project path
if currentPath == projectPath || strings.HasPrefix(currentPath, projectPath+string(filepath.Separator)) {
// Use longest match (most specific project)
if len(projectPath) > maxMatchLength {
matchedProject = project
maxMatchLength = len(projectPath)
}
}
}
if matchedProject == nil {
return nil, fmt.Errorf("no project found for path: %s", cwd)
}
return matchedProject, nil
}
// SwitchTo switches the current project context to the specified project ID
func (cm *ContextManager) SwitchTo(projectID string) error {
// Verify project exists
project, err := cm.registry.GetByID(projectID)
if err != nil {
return fmt.Errorf("get project: %w", err)
}
// Deactivate all projects first
if err := cm.deactivateAll(); err != nil {
return fmt.Errorf("deactivate projects: %w", err)
}
// Activate the target project
if err := cm.setActive(project.ID, true); err != nil {
return fmt.Errorf("activate project: %w", err)
}
return nil
}
// GetCurrent returns the currently active project
func (cm *ContextManager) GetCurrent() (*Project, error) {
// Query for active project
query := `
SELECT id, name, path, active, created_at, COALESCE(metadata, '') as metadata
FROM projects
WHERE active = 1
LIMIT 1
`
p := &Project{}
err := cm.manager.conn.QueryRow(query).Scan(
&p.ID, &p.Name, &p.Path, &p.Active, &p.CreatedAt, &p.Metadata,
)
if err != nil {
return nil, errors.New("no active project context set")
}
return p, nil
}
// deactivateAll sets all projects to inactive
func (cm *ContextManager) deactivateAll() error {
query := `UPDATE projects SET active = 0`
_, err := cm.manager.conn.Exec(query)
if err != nil {
return fmt.Errorf("update projects: %w", err)
}
return nil
}
// setActive sets the active flag for a project
func (cm *ContextManager) setActive(projectID string, active bool) error {
query := `UPDATE projects SET active = ? WHERE id = ?`
result, err := cm.manager.conn.Exec(query, active, projectID)
if err != nil {
return fmt.Errorf("update project: %w", err)
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return fmt.Errorf("get rows affected: %w", err)
}
if rowsAffected == 0 {
return fmt.Errorf("project %s not found", projectID)
}
return nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-129; FEATURE="DatabaseMigrations"; ASPECT=Storage; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
package storage
import (
"embed"
"errors"
"fmt"
"log/slog"
"os"
"path/filepath"
"strconv"
"github.com/golang-migrate/migrate/v4"
_ "github.com/golang-migrate/migrate/v4/database/sqlite"
"github.com/golang-migrate/migrate/v4/source/iofs"
"github.com/jmoiron/sqlx"
_ "modernc.org/sqlite" // Pure Go SQLite implementation (no CGO)
)
//go:embed migrations/*.sql
var migrationFiles embed.FS
const (
DBDriver = "sqlite"
DBMigrationPath = "migrations"
DBSourceName = "iofs"
DBURLProtocol = "sqlite://"
MigrateAll = "all"
LatestVersion = 5 // Update this when adding new migrations
)
var ErrDatabaseNotPopulated = errors.New("database not migrated")
// InitDB initializes the database connection
func InitDB(dbPath string) (*sqlx.DB, error) {
slog.Info("Initializing database", "path", dbPath)
dir := filepath.Dir(dbPath)
if err := os.MkdirAll(dir, 0755); err != nil {
return nil, fmt.Errorf("failed to create database directory at %s: %w", dir, err)
}
db, err := sqlx.Open(DBDriver, dbPath)
if err != nil {
return nil, fmt.Errorf("error opening database at %s: %w", dbPath, err)
}
slog.Info("Database connection initialized")
return db, nil
}
// MigrateDB applies the database migrations stored in migrations/*.sql
// It takes a single argument which is either "all" to migrate to the latest version
// or an integer to migrate by that many steps.
func MigrateDB(dbPath string, steps string) error {
slog.Info("Migrating database", "path", dbPath, "steps", steps)
// Ensure the database directory exists before migrating
if err := os.MkdirAll(filepath.Dir(dbPath), 0755); err != nil {
return fmt.Errorf("failed to create database directory at %s: %w", filepath.Dir(dbPath), err)
}
driver, err := iofs.New(migrationFiles, DBMigrationPath)
if err != nil {
return fmt.Errorf("failed to create migration source: %w", err)
}
m, err := migrate.NewWithSourceInstance(DBSourceName, driver, DBURLProtocol+dbPath)
if err != nil {
return fmt.Errorf("error creating migration instance for database at %s: %w", dbPath, err)
}
defer m.Close()
switch {
case steps == MigrateAll:
slog.Info("Migrating database to latest version")
if err = m.Up(); err != nil && err != migrate.ErrNoChange {
return fmt.Errorf("failed to migrate database: %w", err)
}
if err == migrate.ErrNoChange {
slog.Info("Database already at latest version")
}
case isInt(steps):
slog.Info("Migrating database by steps", "steps", steps)
stepCount, err := strconv.Atoi(steps)
if err != nil {
return fmt.Errorf("invalid number of migration steps: %s: %w", steps, err)
}
if stepCount == 0 {
return errors.New("migration steps cannot be zero, please specify a positive integer or 'all'")
}
if err = m.Steps(stepCount); err != nil && err != migrate.ErrNoChange {
return fmt.Errorf("failed to migrate database by %d steps: %w", stepCount, err)
}
if err == migrate.ErrNoChange {
slog.Info("No migration changes to apply")
}
default:
return fmt.Errorf("invalid argument for migration steps: %s, expected 'all' or an integer", steps)
}
slog.Info("Database migrated successfully")
return nil
}
// TeardownDB is the negative inverse of MigrateDB, rolling back migrations
// It takes a single argument which is either "all" to roll back all migrations
// or an integer to roll back by that many steps.
func TeardownDB(dbPath string, steps string) error {
slog.Debug("Tearing down database", "path", dbPath, "steps", steps)
driver, err := iofs.New(migrationFiles, DBMigrationPath)
if err != nil {
return fmt.Errorf("failed to create migration source: %w", err)
}
m, err := migrate.NewWithSourceInstance(DBSourceName, driver, DBURLProtocol+dbPath)
if err != nil {
return fmt.Errorf("error creating migration instance: %w", err)
}
defer m.Close()
switch {
case steps == MigrateAll:
slog.Info("Rolling back all migrations")
if err = m.Down(); err != nil && err != migrate.ErrNoChange {
return fmt.Errorf("failed to roll back all migrations: %w", err)
}
case isInt(steps):
slog.Info("Rolling back database by steps", "steps", steps)
stepCount, err := strconv.Atoi(steps)
if err != nil {
return fmt.Errorf("invalid number of migration steps: %s: %w", steps, err)
}
if stepCount == 0 {
return errors.New("migration steps cannot be zero, please specify a positive integer or 'all'")
}
if err = m.Steps(-stepCount); err != nil && err != migrate.ErrNoChange {
return fmt.Errorf("failed to roll back database by %d steps: %w", stepCount, err)
}
default:
return fmt.Errorf("invalid argument for migration steps: %s, expected 'all' or an integer", steps)
}
slog.Info("Database teardown completed")
return nil
}
// DatabasePopulated checks if the database is fully migrated and populated
// We only return an error here if we're getting database issues. Bool return should
// reflect the state of the database.
func DatabasePopulated(db *sqlx.DB, targetVersion int) (bool, error) {
slog.Debug("Checking if database is fully migrated and populated")
var populated bool
err := db.Get(&populated, "SELECT EXISTS(SELECT 1 FROM schema_migrations)")
if err != nil {
return false, fmt.Errorf("failed to check if database is populated: %w", err)
}
if !populated {
slog.Warn("Database is not populated", "targetVersion", targetVersion)
return false, nil
}
// If no specific target version is provided, consider population sufficient.
if targetVersion <= 0 {
return true, nil
}
var version int
err = db.Get(&version, "SELECT version FROM schema_migrations ORDER BY version DESC LIMIT 1")
if err != nil {
return false, fmt.Errorf("failed to retrieve current database version: %w", err)
}
slog.Debug("Current database version", "version", version)
if version < targetVersion {
slog.Warn("Database is not fully migrated", "currentVersion", version, "targetVersion", targetVersion)
return false, nil
}
slog.Debug("Database version is up to date or ahead", "version", version, "targetVersion", targetVersion)
return true, nil
}
// isInt checks if a string is a valid integer
func isInt(s string) bool {
_, err := strconv.Atoi(s)
return err == nil
}
// NeedsMigration checks if the database exists and needs migration
func NeedsMigration(dbPath string) (bool, int, error) {
// Check if database file exists
if _, err := os.Stat(dbPath); os.IsNotExist(err) {
return false, 0, nil // Database doesn't exist yet
}
// Open database to check version
db, err := sqlx.Open(DBDriver, dbPath)
if err != nil {
return false, 0, fmt.Errorf("failed to open database: %w", err)
}
defer db.Close()
// Check if schema_migrations table exists
var tableExists bool
err = db.Get(&tableExists, "SELECT EXISTS(SELECT 1 FROM sqlite_master WHERE type='table' AND name='schema_migrations')")
if err != nil {
return false, 0, fmt.Errorf("failed to check schema_migrations table: %w", err)
}
if !tableExists {
return true, 0, nil // Database exists but not migrated
}
// Get current version
var currentVersion int
err = db.Get(¤tVersion, "SELECT COALESCE(MAX(version), 0) FROM schema_migrations WHERE dirty = 0")
if err != nil {
return false, 0, fmt.Errorf("failed to get current version: %w", err)
}
// Check if migration needed
if currentVersion < LatestVersion {
return true, currentVersion, nil
}
return false, currentVersion, nil
}
// AutoMigrate automatically migrates the database if needed
func AutoMigrate(dbPath string) error {
// Check if database file exists
_, err := os.Stat(dbPath)
dbExists := err == nil
if dbExists {
needsMigration, currentVersion, err := NeedsMigration(dbPath)
if err != nil {
return fmt.Errorf("failed to check migration status: %w", err)
}
if !needsMigration {
slog.Debug("Database is up to date", "version", currentVersion)
return nil
}
slog.Info("Database migration needed", "currentVersion", currentVersion, "targetVersion", LatestVersion)
fmt.Printf("π Migrating database from version %d to %d...\n", currentVersion, LatestVersion)
} else {
slog.Info("Database does not exist, will create with migrations", "path", dbPath)
fmt.Printf("π Creating database with schema version %d...\n", LatestVersion)
}
if err := MigrateDB(dbPath, MigrateAll); err != nil {
return fmt.Errorf("auto-migration failed: %w", err)
}
if dbExists {
fmt.Printf("β
Database migrated to version %d\n", LatestVersion)
} else {
fmt.Printf("β
Database created at version %d\n", LatestVersion)
}
return nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-140; FEATURE="GapRepository"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-17
package storage
import (
"database/sql"
"fmt"
"strings"
"time"
)
// GapEntry represents a gap analysis entry
type GapEntry struct {
ID int
GapID string
ReqID string
Feature string
Aspect string
Category string
Description string
CorrectiveAction string
CreatedAt time.Time
CreatedBy string
HelpfulCount int
UnhelpfulCount int
}
// GapCategory represents a gap category
type GapCategory struct {
ID int
Name string
Description string
CreatedAt time.Time
}
// GapConfig represents gap analysis configuration
type GapConfig struct {
ID int
MaxGapInjection int
MinHelpfulThreshold int
RankingStrategy string
CreatedAt time.Time
UpdatedAt time.Time
}
// GapQueryFilter represents query filters for gap entries
type GapQueryFilter struct {
ReqID string
Feature string
Aspect string
Category string
Limit int
}
// GapRepository handles gap analysis database operations
type GapRepository struct {
db *DB
}
// NewGapRepository creates a new gap repository
func NewGapRepository(db *DB) *GapRepository {
return &GapRepository{db: db}
}
// CreateEntry creates a new gap analysis entry
func (r *GapRepository) CreateEntry(entry *GapEntry) error {
// Get category ID
var categoryID int
err := r.db.conn.Get(&categoryID, "SELECT id FROM gap_categories WHERE name = ?", entry.Category)
if err != nil {
return fmt.Errorf("get category ID: %w", err)
}
query := `
INSERT INTO gap_entries (
gap_id, req_id, feature, aspect, category_id,
description, corrective_action, created_at, created_by,
helpful_count, unhelpful_count
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`
createdAt := entry.CreatedAt
if createdAt.IsZero() {
createdAt = time.Now()
}
createdBy := entry.CreatedBy
if createdBy == "" {
createdBy = "unknown"
}
_, err = r.db.conn.Exec(query,
entry.GapID, entry.ReqID, entry.Feature, entry.Aspect, categoryID,
entry.Description, entry.CorrectiveAction, createdAt, createdBy,
entry.HelpfulCount, entry.UnhelpfulCount,
)
if err != nil {
return fmt.Errorf("insert gap entry: %w", err)
}
return nil
}
// GetEntryByGapID retrieves a gap entry by its gap ID
func (r *GapRepository) GetEntryByGapID(gapID string) (*GapEntry, error) {
query := `
SELECT
e.id, e.gap_id, e.req_id, e.feature, e.aspect,
c.name as category, e.description, e.corrective_action,
e.created_at, e.created_by, e.helpful_count, e.unhelpful_count
FROM gap_entries e
JOIN gap_categories c ON e.category_id = c.id
WHERE e.gap_id = ?
`
entry := &GapEntry{}
err := r.db.conn.QueryRow(query, gapID).Scan(
&entry.ID, &entry.GapID, &entry.ReqID, &entry.Feature, &entry.Aspect,
&entry.Category, &entry.Description, &entry.CorrectiveAction,
&entry.CreatedAt, &entry.CreatedBy, &entry.HelpfulCount, &entry.UnhelpfulCount,
)
if err != nil {
return nil, fmt.Errorf("get gap entry: %w", err)
}
return entry, nil
}
// GetEntriesByReqID retrieves all gap entries for a requirement
func (r *GapRepository) GetEntriesByReqID(reqID string) ([]*GapEntry, error) {
query := `
SELECT
e.id, e.gap_id, e.req_id, e.feature, e.aspect,
c.name as category, e.description, e.corrective_action,
e.created_at, e.created_by, e.helpful_count, e.unhelpful_count
FROM gap_entries e
JOIN gap_categories c ON e.category_id = c.id
WHERE e.req_id = ?
ORDER BY e.helpful_count DESC, e.created_at DESC
`
rows, err := r.db.conn.Query(query, reqID)
if err != nil {
return nil, fmt.Errorf("query gap entries: %w", err)
}
defer rows.Close()
return r.scanGapEntries(rows)
}
// MarkHelpful increments the helpful count for a gap entry
func (r *GapRepository) MarkHelpful(gapID string) error {
query := `UPDATE gap_entries SET helpful_count = helpful_count + 1 WHERE gap_id = ?`
_, err := r.db.conn.Exec(query, gapID)
if err != nil {
return fmt.Errorf("mark helpful: %w", err)
}
return nil
}
// MarkUnhelpful increments the unhelpful count for a gap entry
func (r *GapRepository) MarkUnhelpful(gapID string) error {
query := `UPDATE gap_entries SET unhelpful_count = unhelpful_count + 1 WHERE gap_id = ?`
_, err := r.db.conn.Exec(query, gapID)
if err != nil {
return fmt.Errorf("mark unhelpful: %w", err)
}
return nil
}
// QueryEntries queries gap entries with filters
func (r *GapRepository) QueryEntries(filter GapQueryFilter) ([]*GapEntry, error) {
query := `
SELECT
e.id, e.gap_id, e.req_id, e.feature, e.aspect,
c.name as category, e.description, e.corrective_action,
e.created_at, e.created_by, e.helpful_count, e.unhelpful_count
FROM gap_entries e
JOIN gap_categories c ON e.category_id = c.id
WHERE 1=1
`
args := []interface{}{}
if filter.ReqID != "" {
query += " AND e.req_id = ?"
args = append(args, filter.ReqID)
}
if filter.Feature != "" {
query += " AND e.feature = ?"
args = append(args, filter.Feature)
}
if filter.Aspect != "" {
query += " AND e.aspect = ?"
args = append(args, filter.Aspect)
}
if filter.Category != "" {
query += " AND c.name = ?"
args = append(args, filter.Category)
}
query += " ORDER BY e.helpful_count DESC, e.created_at DESC"
if filter.Limit > 0 {
query += " LIMIT ?"
args = append(args, filter.Limit)
}
rows, err := r.db.conn.Query(query, args...)
if err != nil {
return nil, fmt.Errorf("query entries: %w", err)
}
defer rows.Close()
return r.scanGapEntries(rows)
}
// GetTopGaps retrieves top gaps for a requirement based on configuration
func (r *GapRepository) GetTopGaps(reqID string, config *GapConfig) ([]*GapEntry, error) {
query := `
SELECT
e.id, e.gap_id, e.req_id, e.feature, e.aspect,
c.name as category, e.description, e.corrective_action,
e.created_at, e.created_by, e.helpful_count, e.unhelpful_count
FROM gap_entries e
JOIN gap_categories c ON e.category_id = c.id
WHERE e.req_id = ?
AND e.helpful_count >= ?
`
args := []interface{}{reqID, config.MinHelpfulThreshold}
// Apply ranking strategy
switch config.RankingStrategy {
case "helpful_desc":
query += " ORDER BY e.helpful_count DESC, e.created_at DESC"
case "recency_desc":
query += " ORDER BY e.created_at DESC"
case "weighted":
// Weighted: (helpful_count * 2) - unhelpful_count, then recency
query += " ORDER BY (e.helpful_count * 2 - e.unhelpful_count) DESC, e.created_at DESC"
default:
query += " ORDER BY e.helpful_count DESC, e.created_at DESC"
}
query += " LIMIT ?"
args = append(args, config.MaxGapInjection)
rows, err := r.db.conn.Query(query, args...)
if err != nil {
return nil, fmt.Errorf("get top gaps: %w", err)
}
defer rows.Close()
return r.scanGapEntries(rows)
}
// GetCategories retrieves all gap categories
func (r *GapRepository) GetCategories() ([]*GapCategory, error) {
query := `SELECT id, name, description, created_at FROM gap_categories ORDER BY name ASC`
rows, err := r.db.conn.Query(query)
if err != nil {
return nil, fmt.Errorf("get categories: %w", err)
}
defer rows.Close()
var categories []*GapCategory
for rows.Next() {
cat := &GapCategory{}
err := rows.Scan(&cat.ID, &cat.Name, &cat.Description, &cat.CreatedAt)
if err != nil {
return nil, fmt.Errorf("scan category: %w", err)
}
categories = append(categories, cat)
}
return categories, rows.Err()
}
// GetConfig retrieves the gap analysis configuration
func (r *GapRepository) GetConfig() (*GapConfig, error) {
query := `
SELECT id, max_gap_injection, min_helpful_threshold, ranking_strategy,
created_at, updated_at
FROM gap_config
WHERE id = 1
`
config := &GapConfig{}
err := r.db.conn.QueryRow(query).Scan(
&config.ID, &config.MaxGapInjection, &config.MinHelpfulThreshold,
&config.RankingStrategy, &config.CreatedAt, &config.UpdatedAt,
)
if err != nil {
return nil, fmt.Errorf("get config: %w", err)
}
return config, nil
}
// UpdateConfig updates the gap analysis configuration
func (r *GapRepository) UpdateConfig(config *GapConfig) error {
query := `
UPDATE gap_config
SET max_gap_injection = ?,
min_helpful_threshold = ?,
ranking_strategy = ?,
updated_at = ?
WHERE id = 1
`
updatedAt := config.UpdatedAt
if updatedAt.IsZero() {
updatedAt = time.Now()
}
_, err := r.db.conn.Exec(query,
config.MaxGapInjection,
config.MinHelpfulThreshold,
config.RankingStrategy,
updatedAt,
)
if err != nil {
return fmt.Errorf("update config: %w", err)
}
return nil
}
// GenerateGapReport generates a formatted gap analysis report
func (r *GapRepository) GenerateGapReport(reqID string) (string, error) {
entries, err := r.GetEntriesByReqID(reqID)
if err != nil {
return "", fmt.Errorf("get entries: %w", err)
}
if len(entries) == 0 {
return fmt.Sprintf("No gap analysis entries found for %s\n", reqID), nil
}
var report strings.Builder
report.WriteString(fmt.Sprintf("# Gap Analysis Report for %s\n\n", reqID))
report.WriteString(fmt.Sprintf("Total Gaps: %d\n\n", len(entries)))
// Group by category
categoryGroups := make(map[string][]*GapEntry)
for _, entry := range entries {
categoryGroups[entry.Category] = append(categoryGroups[entry.Category], entry)
}
for category, catEntries := range categoryGroups {
report.WriteString(fmt.Sprintf("## Category: %s (%d)\n\n", category, len(catEntries)))
for _, entry := range catEntries {
report.WriteString(fmt.Sprintf("### %s - %s\n", entry.GapID, entry.Feature))
report.WriteString(fmt.Sprintf("**Description:** %s\n\n", entry.Description))
if entry.CorrectiveAction != "" {
report.WriteString(fmt.Sprintf("**Corrective Action:** %s\n\n", entry.CorrectiveAction))
}
report.WriteString(fmt.Sprintf("**Helpful:** %d | **Unhelpful:** %d | **Created:** %s\n\n",
entry.HelpfulCount, entry.UnhelpfulCount, entry.CreatedAt.Format("2006-01-02")))
report.WriteString("---\n\n")
}
}
return report.String(), nil
}
// scanGapEntries scans gap entries from SQL rows
func (r *GapRepository) scanGapEntries(rows *sql.Rows) ([]*GapEntry, error) {
var entries []*GapEntry
for rows.Next() {
entry := &GapEntry{}
err := rows.Scan(
&entry.ID, &entry.GapID, &entry.ReqID, &entry.Feature, &entry.Aspect,
&entry.Category, &entry.Description, &entry.CorrectiveAction,
&entry.CreatedAt, &entry.CreatedBy, &entry.HelpfulCount, &entry.UnhelpfulCount,
)
if err != nil {
return nil, fmt.Errorf("scan gap entry: %w", err)
}
entries = append(entries, entry)
}
return entries, rows.Err()
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-146; FEATURE="DatabaseModes"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-18
package storage
import (
"database/sql"
"errors"
"fmt"
"os"
"path/filepath"
"github.com/jmoiron/sqlx"
_ "modernc.org/sqlite"
)
// DatabaseMode represents the initialization mode for the database
type DatabaseMode int
const (
GlobalMode DatabaseMode = iota
LocalMode
)
// String returns the string representation of DatabaseMode
func (dm DatabaseMode) String() string {
switch dm {
case GlobalMode:
return "global"
case LocalMode:
return "local"
default:
return "unknown"
}
}
// DatabaseManager manages both global and local database connections
type DatabaseManager struct {
conn *sqlx.DB
path string
mode DatabaseMode
}
// NewDatabaseManager creates a new database manager
func NewDatabaseManager() *DatabaseManager {
return &DatabaseManager{}
}
// Initialize initializes the database in the specified mode
func (dm *DatabaseManager) Initialize(mode DatabaseMode) error {
var dbPath string
switch mode {
case GlobalMode:
// Global database location: ~/.canary/canary.db
homeDir, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("get home directory: %w", err)
}
dbPath = filepath.Join(homeDir, ".canary", "canary.db")
case LocalMode:
// Local database location: ./.canary/canary.db
cwd, err := os.Getwd()
if err != nil {
return fmt.Errorf("get working directory: %w", err)
}
dbPath = filepath.Join(cwd, ".canary", "canary.db")
default:
return fmt.Errorf("invalid database mode: %v", mode)
}
// Create directory if needed
dbDir := filepath.Dir(dbPath)
if err := os.MkdirAll(dbDir, 0755); err != nil {
return fmt.Errorf("create database directory: %w", err)
}
// Open database connection
conn, err := InitDB(dbPath)
if err != nil {
return fmt.Errorf("initialize database: %w", err)
}
// Enable foreign keys
if _, err := conn.Exec("PRAGMA foreign_keys = ON"); err != nil {
conn.Close()
return fmt.Errorf("enable foreign keys: %w", err)
}
dm.conn = conn
dm.path = dbPath
dm.mode = mode
return nil
}
// Discover attempts to find an existing database, with local taking precedence over global
func (dm *DatabaseManager) Discover() error {
// Check for local database first (./.canary/canary.db)
cwd, err := os.Getwd()
if err != nil {
return fmt.Errorf("get working directory: %w", err)
}
localPath := filepath.Join(cwd, ".canary", "canary.db")
if _, err := os.Stat(localPath); err == nil {
// Local database exists - use it
return dm.open(localPath, LocalMode)
}
// Check for global database (~/canary/canary.db)
homeDir, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("get home directory: %w", err)
}
globalPath := filepath.Join(homeDir, ".canary", "canary.db")
if _, err := os.Stat(globalPath); err == nil {
// Global database exists - use it
return dm.open(globalPath, GlobalMode)
}
// No database found
return errors.New("no database found: run 'canary init' or 'canary init --global' to initialize")
}
// open opens an existing database at the specified path
func (dm *DatabaseManager) open(dbPath string, mode DatabaseMode) error {
conn, err := InitDB(dbPath)
if err != nil {
return fmt.Errorf("open database: %w", err)
}
// Enable foreign keys
if _, err := conn.Exec("PRAGMA foreign_keys = ON"); err != nil {
conn.Close()
return fmt.Errorf("enable foreign keys: %w", err)
}
dm.conn = conn
dm.path = dbPath
dm.mode = mode
return nil
}
// Mode returns the current database mode
func (dm *DatabaseManager) Mode() DatabaseMode {
return dm.mode
}
// Location returns the database file path
func (dm *DatabaseManager) Location() string {
return dm.path
}
// DB returns the underlying database connection
func (dm *DatabaseManager) DB() *sql.DB {
if dm.conn == nil {
return nil
}
return dm.conn.DB
}
// Close closes the database connection
func (dm *DatabaseManager) Close() error {
if dm.conn == nil {
return nil
}
return dm.conn.Close()
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-146; FEATURE="ProjectRegistry"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-18
package storage
import (
"database/sql"
"errors"
"fmt"
"regexp"
"strings"
"time"
)
// Project represents a registered project in the canary system
type Project struct {
ID string
Name string
Path string
Active bool
CreatedAt string
Metadata string // JSON metadata
}
// ProjectRegistry manages project registration and queries
type ProjectRegistry struct {
manager *DatabaseManager
}
// NewProjectRegistry creates a new project registry
func NewProjectRegistry(manager *DatabaseManager) *ProjectRegistry {
return &ProjectRegistry{
manager: manager,
}
}
// Register adds a new project to the registry
func (pr *ProjectRegistry) Register(project *Project) error {
if project == nil {
return errors.New("project cannot be nil")
}
if project.Name == "" {
return errors.New("project name is required")
}
if project.Path == "" {
return errors.New("project path is required")
}
// Generate slug from project name
baseSlug := generateSlug(project.Name)
// Check for slug collisions and generate unique ID
slug, err := pr.generateUniqueSlug(baseSlug)
if err != nil {
return fmt.Errorf("generate unique slug: %w", err)
}
project.ID = slug
project.CreatedAt = time.Now().UTC().Format(time.RFC3339)
// Ensure projects table exists
if err := pr.ensureProjectsTable(); err != nil {
return fmt.Errorf("ensure projects table: %w", err)
}
// Insert project
query := `
INSERT INTO projects (id, name, path, active, created_at, metadata)
VALUES (?, ?, ?, ?, ?, ?)
`
_, err = pr.manager.conn.Exec(query,
project.ID,
project.Name,
project.Path,
project.Active,
project.CreatedAt,
project.Metadata,
)
if err != nil {
// Check for unique constraint violation on path
if strings.Contains(err.Error(), "UNIQUE") || strings.Contains(err.Error(), "unique") {
return fmt.Errorf("project with path %s already exists", project.Path)
}
return fmt.Errorf("insert project: %w", err)
}
return nil
}
// List returns all registered projects
func (pr *ProjectRegistry) List() ([]*Project, error) {
if err := pr.ensureProjectsTable(); err != nil {
return nil, fmt.Errorf("ensure projects table: %w", err)
}
query := `
SELECT id, name, path, active, created_at, COALESCE(metadata, '') as metadata
FROM projects
ORDER BY created_at DESC
`
rows, err := pr.manager.conn.Query(query)
if err != nil {
return nil, fmt.Errorf("query projects: %w", err)
}
defer rows.Close()
var projects []*Project
for rows.Next() {
p := &Project{}
err := rows.Scan(&p.ID, &p.Name, &p.Path, &p.Active, &p.CreatedAt, &p.Metadata)
if err != nil {
return nil, fmt.Errorf("scan project: %w", err)
}
projects = append(projects, p)
}
if err = rows.Err(); err != nil {
return nil, fmt.Errorf("iterate projects: %w", err)
}
return projects, nil
}
// Remove deletes a project from the registry
func (pr *ProjectRegistry) Remove(id string) error {
if err := pr.ensureProjectsTable(); err != nil {
return fmt.Errorf("ensure projects table: %w", err)
}
query := `DELETE FROM projects WHERE id = ?`
result, err := pr.manager.conn.Exec(query, id)
if err != nil {
return fmt.Errorf("delete project: %w", err)
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return fmt.Errorf("get rows affected: %w", err)
}
if rowsAffected == 0 {
return fmt.Errorf("project with id %s not found", id)
}
return nil
}
// GetByID retrieves a project by its ID
func (pr *ProjectRegistry) GetByID(id string) (*Project, error) {
if err := pr.ensureProjectsTable(); err != nil {
return nil, fmt.Errorf("ensure projects table: %w", err)
}
query := `
SELECT id, name, path, active, created_at, COALESCE(metadata, '') as metadata
FROM projects
WHERE id = ?
`
p := &Project{}
err := pr.manager.conn.QueryRow(query, id).Scan(
&p.ID, &p.Name, &p.Path, &p.Active, &p.CreatedAt, &p.Metadata,
)
if err == sql.ErrNoRows {
return nil, fmt.Errorf("project with id %s not found", id)
}
if err != nil {
return nil, fmt.Errorf("query project: %w", err)
}
return p, nil
}
// GetByPath retrieves a project by its path
func (pr *ProjectRegistry) GetByPath(path string) (*Project, error) {
if err := pr.ensureProjectsTable(); err != nil {
return nil, fmt.Errorf("ensure projects table: %w", err)
}
query := `
SELECT id, name, path, active, created_at, COALESCE(metadata, '') as metadata
FROM projects
WHERE path = ?
`
p := &Project{}
err := pr.manager.conn.QueryRow(query, path).Scan(
&p.ID, &p.Name, &p.Path, &p.Active, &p.CreatedAt, &p.Metadata,
)
if err == sql.ErrNoRows {
return nil, fmt.Errorf("project with path %s not found", path)
}
if err != nil {
return nil, fmt.Errorf("query project: %w", err)
}
return p, nil
}
// generateSlug creates a URL-friendly slug from a project name
func generateSlug(name string) string {
// Convert to lowercase
slug := strings.ToLower(name)
// Replace spaces and underscores with hyphens
slug = strings.ReplaceAll(slug, " ", "-")
slug = strings.ReplaceAll(slug, "_", "-")
// Remove special characters, keep only alphanumeric and hyphens
reg := regexp.MustCompile("[^a-z0-9-]+")
slug = reg.ReplaceAllString(slug, "-")
// Remove leading/trailing hyphens
slug = strings.Trim(slug, "-")
// Replace multiple consecutive hyphens with single hyphen
reg = regexp.MustCompile("-+")
slug = reg.ReplaceAllString(slug, "-")
return slug
}
// generateUniqueSlug ensures the slug is unique, appending a counter if needed
func (pr *ProjectRegistry) generateUniqueSlug(baseSlug string) (string, error) {
slug := baseSlug
counter := 2
for {
// Check if slug exists
exists, err := pr.slugExists(slug)
if err != nil {
return "", err
}
if !exists {
return slug, nil
}
// Slug exists, try with counter
slug = fmt.Sprintf("%s-%d", baseSlug, counter)
counter++
// Safety limit to prevent infinite loops
if counter > 1000 {
return "", errors.New("unable to generate unique slug after 1000 attempts")
}
}
}
// slugExists checks if a slug is already in use
func (pr *ProjectRegistry) slugExists(slug string) (bool, error) {
if err := pr.ensureProjectsTable(); err != nil {
return false, err
}
query := `SELECT COUNT(*) FROM projects WHERE id = ?`
var count int
err := pr.manager.conn.QueryRow(query, slug).Scan(&count)
if err != nil {
return false, fmt.Errorf("check slug existence: %w", err)
}
return count > 0, nil
}
// ensureProjectsTable creates the projects table if it doesn't exist
func (pr *ProjectRegistry) ensureProjectsTable() error {
query := `
CREATE TABLE IF NOT EXISTS projects (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
path TEXT NOT NULL UNIQUE,
active BOOLEAN DEFAULT FALSE,
created_at TEXT NOT NULL,
metadata TEXT
)
`
_, err := pr.manager.conn.Exec(query)
if err != nil {
return fmt.Errorf("create projects table: %w", err)
}
// Create index on path
indexQuery := `CREATE INDEX IF NOT EXISTS idx_projects_path ON projects(path)`
_, err = pr.manager.conn.Exec(indexQuery)
if err != nil {
return fmt.Errorf("create path index: %w", err)
}
return nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-123; FEATURE="TokenStorage"; ASPECT=Storage; STATUS=IMPL; OWNER=canary; UPDATED=2025-10-16
package storage
import (
"database/sql"
"fmt"
"time"
"github.com/jmoiron/sqlx"
_ "modernc.org/sqlite"
)
// Token represents a parsed CANARY token with extended metadata
type Token struct {
ID int
ReqID string
Feature string
Aspect string
Status string
FilePath string
LineNumber int
Test string
Bench string
Owner string
Priority int
Phase string
Keywords string
SpecStatus string
CreatedAt string
UpdatedAt string
StartedAt string
CompletedAt string
CommitHash string
Branch string
DependsOn string
Blocks string
RelatedTo string
RawToken string
IndexedAt string
// CANARY: REQ=CBIN-136; FEATURE="DocDatabaseSchema"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-16
// Documentation tracking fields
DocPath string // Comma-separated doc file paths (e.g., "user:docs/user.md,api:docs/api.md")
DocHash string // Comma-separated SHA256 hashes (abbreviated, first 16 chars)
DocType string // Documentation type (user, technical, feature, api, architecture)
DocCheckedAt string // ISO 8601 timestamp of last staleness check
DocStatus string // DOC_CURRENT, DOC_STALE, DOC_MISSING, DOC_UNHASHED
// CANARY: REQ=CBIN-146; FEATURE="TokenNamespacing"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-18
// Multi-project support
ProjectID string // Project identifier for token isolation
}
// Checkpoint represents a state snapshot
type Checkpoint struct {
ID int
Name string
Description string
CommitHash string
CreatedAt string
TotalTokens int
StubCount int
ImplCount int
TestedCount int
BenchedCount int
SnapshotJSON string
}
// DB wraps the SQLite database connection
type DB struct {
conn *sqlx.DB
path string
}
// Open opens or creates the CANARY database
// Note: Migrations are handled automatically by the CLI's PersistentPreRunE
func Open(dbPath string) (*DB, error) {
// Initialize database connection
conn, err := InitDB(dbPath)
if err != nil {
return nil, fmt.Errorf("initialize database: %w", err)
}
// Enable foreign keys
if _, err := conn.Exec("PRAGMA foreign_keys = ON"); err != nil {
conn.Close()
return nil, fmt.Errorf("enable foreign keys: %w", err)
}
return &DB{conn: conn, path: dbPath}, nil
}
// Close closes the database connection
func (db *DB) Close() error {
return db.conn.Close()
}
// UpsertToken inserts or updates a token
func (db *DB) UpsertToken(token *Token) error {
// Ensure tokens table exists
if err := db.ensureTokensTable(); err != nil {
return fmt.Errorf("ensure tokens table: %w", err)
}
query := `
INSERT INTO tokens (
req_id, feature, aspect, status, file_path, line_number,
test, bench, owner, priority, phase, keywords, spec_status,
created_at, updated_at, started_at, completed_at,
commit_hash, branch, depends_on, blocks, related_to,
raw_token, indexed_at,
doc_path, doc_hash, doc_type, doc_checked_at, doc_status,
project_id
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(req_id, feature, file_path, line_number, project_id)
DO UPDATE SET
aspect = excluded.aspect,
status = excluded.status,
test = excluded.test,
bench = excluded.bench,
owner = excluded.owner,
priority = excluded.priority,
phase = excluded.phase,
keywords = excluded.keywords,
spec_status = excluded.spec_status,
updated_at = excluded.updated_at,
started_at = excluded.started_at,
completed_at = excluded.completed_at,
commit_hash = excluded.commit_hash,
branch = excluded.branch,
depends_on = excluded.depends_on,
blocks = excluded.blocks,
related_to = excluded.related_to,
raw_token = excluded.raw_token,
indexed_at = excluded.indexed_at,
doc_path = excluded.doc_path,
doc_hash = excluded.doc_hash,
doc_type = excluded.doc_type,
doc_checked_at = excluded.doc_checked_at,
doc_status = excluded.doc_status,
project_id = excluded.project_id
`
_, err := db.conn.Exec(query,
token.ReqID, token.Feature, token.Aspect, token.Status,
token.FilePath, token.LineNumber,
token.Test, token.Bench, token.Owner,
token.Priority, token.Phase, token.Keywords, token.SpecStatus,
token.CreatedAt, token.UpdatedAt, token.StartedAt, token.CompletedAt,
token.CommitHash, token.Branch,
token.DependsOn, token.Blocks, token.RelatedTo,
token.RawToken, token.IndexedAt,
token.DocPath, token.DocHash, token.DocType, token.DocCheckedAt, token.DocStatus,
token.ProjectID,
)
return err
}
// GetTokensByReqID retrieves all tokens for a requirement
func (db *DB) GetTokensByReqID(reqID string) ([]*Token, error) {
query := `
SELECT id, req_id, feature, aspect, status, file_path, line_number,
test, bench, owner, priority, phase, keywords, spec_status,
created_at, updated_at, started_at, completed_at,
commit_hash, branch, depends_on, blocks, related_to,
raw_token, indexed_at,
doc_path, doc_hash, doc_type, doc_checked_at, doc_status
FROM tokens
WHERE req_id = ?
ORDER BY priority ASC, feature ASC
`
rows, err := db.conn.Query(query, reqID)
if err != nil {
return nil, err
}
defer rows.Close()
return scanTokens(rows)
}
// isHiddenPath determines if a token should be hidden based on its file path
// Hidden paths include test files, templates, documentation examples, and AI agent directories
func isHiddenPath(filePath string) bool {
hiddenPatterns := []string{
// Test files
"_test.go",
"Test.",
"/tests/",
"/test/",
// Template directories
".canary/templates/",
"/templates/",
"/base/",
"/embedded/base/",
"/embedded/",
// Documentation examples
"IMPLEMENTATION_SUMMARY",
"FINAL_SUMMARY",
"README_CANARY.md",
"GAP_ANALYSIS.md",
// AI agent directories
".claude/",
".cursor/",
".github/prompts/",
".windsurf/",
".kilocode/",
".roo/",
".opencode/",
".codex/",
".augment/",
".codebuddy/",
".amazonq/",
}
for _, pattern := range hiddenPatterns {
if len(filePath) >= len(pattern) && contains(filePath, pattern) {
return true
}
}
return false
}
// contains checks if a string contains a substring
func contains(s, substr string) bool {
return len(s) >= len(substr) && indexOfSubstring(s, substr) >= 0
}
// indexOfSubstring returns the index of the first occurrence of substr in s, or -1 if not found
func indexOfSubstring(s, substr string) int {
for i := 0; i <= len(s)-len(substr); i++ {
if s[i:i+len(substr)] == substr {
return i
}
}
return -1
}
// CANARY: REQ=CBIN-145; FEATURE="PriorityFiltering"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-17
// ListTokens retrieves tokens with filters and ordering
// idPattern is a regex pattern for filtering requirement IDs (e.g., "CBIN-[1-9][0-9]{2,}")
func (db *DB) ListTokens(filters map[string]string, idPattern string, orderBy string, limit int) ([]*Token, error) {
query := `
SELECT id, req_id, feature, aspect, status, file_path, line_number,
test, bench, owner, priority, phase, keywords, spec_status,
created_at, updated_at, started_at, completed_at,
commit_hash, branch, depends_on, blocks, related_to,
raw_token, indexed_at,
doc_path, doc_hash, doc_type, doc_checked_at, doc_status
FROM tokens
WHERE 1=1
`
args := []interface{}{}
// Apply ID pattern filter using GLOB (SQLite pattern matching)
// Convert regex pattern to GLOB pattern for common cases
if idPattern != "" {
// Support both CBIN and BUG patterns
// For pattern like "CBIN-[1-9][0-9]{2,}", match CBIN-100 and above
// For BUG patterns, match BUG-ASPECT-NNN format
// Use GLOB which supports ? (any char) and * (any chars)
// Since we can't easily convert regex to GLOB, we'll use a SQL filter
// that excludes common placeholder patterns
query += " AND req_id NOT LIKE 'CBIN-XXX%'"
query += " AND req_id NOT LIKE 'CBIN-###%'"
query += " AND req_id NOT LIKE '{{%'"
query += " AND req_id NOT LIKE 'REQ-XXX%'"
// Match both CBIN and BUG patterns
// Match 3+ digit CBIN IDs (CBIN-100 and above) OR BUG-ASPECT-NNN format
query += " AND ("
query += " (req_id GLOB 'CBIN-[0-9][0-9][0-9]*' AND req_id NOT GLOB 'CBIN-0[0-9][0-9]*')" // CBIN-100 and above
query += " OR req_id GLOB 'BUG-*-[0-9][0-9][0-9]*'" // BUG-ASPECT-NNN format
query += " )"
}
// Filter hidden paths by default (unless include_hidden is set)
includeHidden, _ := filters["include_hidden"]
if includeHidden != "true" {
// Exclude test files
query += " AND file_path NOT LIKE '%_test.go%'"
query += " AND file_path NOT LIKE '%Test.%'"
query += " AND file_path NOT LIKE '%/tests/%'"
query += " AND file_path NOT LIKE '%/test/%'"
// Exclude template directories
query += " AND file_path NOT LIKE '%.canary/templates/%'"
query += " AND file_path NOT LIKE '%/templates/%'"
query += " AND file_path NOT LIKE '%/base/%'"
query += " AND file_path NOT LIKE '%/embedded/%'"
// Exclude documentation examples
query += " AND file_path NOT LIKE '%IMPLEMENTATION_SUMMARY%'"
query += " AND file_path NOT LIKE '%FINAL_SUMMARY%'"
query += " AND file_path NOT LIKE '%README_CANARY.md%'"
// Exclude AI agent directories
query += " AND file_path NOT LIKE '.claude/%'"
query += " AND file_path NOT LIKE '.cursor/%'"
query += " AND file_path NOT LIKE '.github/prompts/%'"
query += " AND file_path NOT LIKE '.windsurf/%'"
query += " AND file_path NOT LIKE '.kilocode/%'"
query += " AND file_path NOT LIKE '.roo/%'"
query += " AND file_path NOT LIKE '.opencode/%'"
query += " AND file_path NOT LIKE '.codex/%'"
query += " AND file_path NOT LIKE '.augment/%'"
query += " AND file_path NOT LIKE '.codebuddy/%'"
query += " AND file_path NOT LIKE '.amazonq/%'"
}
// Apply filters
if v, ok := filters["status"]; ok {
query += " AND status = ?"
args = append(args, v)
}
if v, ok := filters["aspect"]; ok {
query += " AND aspect = ?"
args = append(args, v)
}
if v, ok := filters["spec_status"]; ok {
query += " AND spec_status = ?"
args = append(args, v)
}
if v, ok := filters["phase"]; ok {
query += " AND phase = ?"
args = append(args, v)
}
if v, ok := filters["owner"]; ok {
query += " AND owner = ?"
args = append(args, v)
}
if v, ok := filters["priority_min"]; ok {
query += " AND priority >= ?"
args = append(args, v)
}
if v, ok := filters["priority_max"]; ok {
query += " AND priority <= ?"
args = append(args, v)
}
// Ordering
if orderBy == "" {
orderBy = "priority ASC, updated_at DESC"
}
query += " ORDER BY " + orderBy
// Limit
if limit > 0 {
query += " LIMIT ?"
args = append(args, limit)
}
rows, err := db.conn.Query(query, args...)
if err != nil {
return nil, err
}
defer rows.Close()
return scanTokens(rows)
}
// SearchTokens searches by keywords
func (db *DB) SearchTokens(keywords string) ([]*Token, error) {
query := `
SELECT id, req_id, feature, aspect, status, file_path, line_number,
test, bench, owner, priority, phase, keywords, spec_status,
created_at, updated_at, started_at, completed_at,
commit_hash, branch, depends_on, blocks, related_to,
raw_token, indexed_at,
doc_path, doc_hash, doc_type, doc_checked_at, doc_status
FROM tokens
WHERE keywords LIKE ? OR feature LIKE ? OR req_id LIKE ?
ORDER BY priority ASC
`
pattern := "%" + keywords + "%"
rows, err := db.conn.Query(query, pattern, pattern, pattern)
if err != nil {
return nil, err
}
defer rows.Close()
return scanTokens(rows)
}
// CANARY: REQ=CBIN-CLI-001; FEATURE="QueryAbstraction"; ASPECT=Storage; STATUS=TESTED; TEST=TestCANARY_CBIN_CLI_001_Storage_GetFilesByReqID; UPDATED=2025-10-16
// GetFilesByReqID groups tokens by file path for a requirement
func (db *DB) GetFilesByReqID(reqID string, excludeSpecs bool) (map[string][]*Token, error) {
tokens, err := db.GetTokensByReqID(reqID)
if err != nil {
return nil, err
}
// Group by file path, filter specs/templates if requested
fileGroups := make(map[string][]*Token)
for _, token := range tokens {
if excludeSpecs && shouldExcludeFile(token.FilePath) {
continue
}
fileGroups[token.FilePath] = append(fileGroups[token.FilePath], token)
}
return fileGroups, nil
}
// shouldExcludeFile checks if file is spec/template/plan
func shouldExcludeFile(path string) bool {
excludePatterns := []string{
".canary/specs/",
".canary/templates/",
"base/",
"/plan.md",
"/spec.md",
}
for _, pattern := range excludePatterns {
if contains(path, pattern) {
return true
}
}
return false
}
// UpdatePriority updates the priority of a token
func (db *DB) UpdatePriority(reqID, feature string, priority int) error {
query := `UPDATE tokens SET priority = ? WHERE req_id = ? AND feature = ?`
_, err := db.conn.Exec(query, priority, reqID, feature)
return err
}
// UpdateSpecStatus updates the spec status
func (db *DB) UpdateSpecStatus(reqID, specStatus string) error {
query := `UPDATE tokens SET spec_status = ? WHERE req_id = ?`
_, err := db.conn.Exec(query, specStatus, reqID)
return err
}
// CreateCheckpoint creates a state snapshot
func (db *DB) CreateCheckpoint(name, description, commitHash, snapshotJSON string) error {
// Get current counts
var total, stub, impl, tested, benched int
err := db.conn.QueryRow(`
SELECT
COUNT(*),
SUM(CASE WHEN status = 'STUB' THEN 1 ELSE 0 END),
SUM(CASE WHEN status = 'IMPL' THEN 1 ELSE 0 END),
SUM(CASE WHEN status = 'TESTED' THEN 1 ELSE 0 END),
SUM(CASE WHEN status = 'BENCHED' THEN 1 ELSE 0 END)
FROM tokens
`).Scan(&total, &stub, &impl, &tested, &benched)
if err != nil {
return err
}
query := `
INSERT INTO checkpoints (name, description, commit_hash, created_at,
total_tokens, stub_count, impl_count, tested_count, benched_count,
snapshot_json)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`
_, err = db.conn.Exec(query, name, description, commitHash, time.Now().UTC().Format(time.RFC3339),
total, stub, impl, tested, benched, snapshotJSON)
return err
}
// GetCheckpoints retrieves all checkpoints
func (db *DB) GetCheckpoints() ([]*Checkpoint, error) {
query := `
SELECT id, name, description, commit_hash, created_at,
total_tokens, stub_count, impl_count, tested_count, benched_count,
snapshot_json
FROM checkpoints
ORDER BY created_at DESC
`
rows, err := db.conn.Query(query)
if err != nil {
return nil, err
}
defer rows.Close()
var checkpoints []*Checkpoint
for rows.Next() {
cp := &Checkpoint{}
err := rows.Scan(&cp.ID, &cp.Name, &cp.Description, &cp.CommitHash, &cp.CreatedAt,
&cp.TotalTokens, &cp.StubCount, &cp.ImplCount, &cp.TestedCount, &cp.BenchedCount,
&cp.SnapshotJSON)
if err != nil {
return nil, err
}
checkpoints = append(checkpoints, cp)
}
return checkpoints, rows.Err()
}
// Helper function to scan token rows
func scanTokens(rows *sql.Rows) ([]*Token, error) {
var tokens []*Token
for rows.Next() {
t := &Token{}
err := rows.Scan(
&t.ID, &t.ReqID, &t.Feature, &t.Aspect, &t.Status,
&t.FilePath, &t.LineNumber,
&t.Test, &t.Bench, &t.Owner,
&t.Priority, &t.Phase, &t.Keywords, &t.SpecStatus,
&t.CreatedAt, &t.UpdatedAt, &t.StartedAt, &t.CompletedAt,
&t.CommitHash, &t.Branch,
&t.DependsOn, &t.Blocks, &t.RelatedTo,
&t.RawToken, &t.IndexedAt,
&t.DocPath, &t.DocHash, &t.DocType, &t.DocCheckedAt, &t.DocStatus,
)
if err != nil {
return nil, err
}
tokens = append(tokens, t)
}
return tokens, rows.Err()
}
// ensureTokensTable creates the tokens table if it doesn't exist
func (db *DB) ensureTokensTable() error {
query := `
CREATE TABLE IF NOT EXISTS tokens (
id INTEGER PRIMARY KEY AUTOINCREMENT,
req_id TEXT NOT NULL,
feature TEXT NOT NULL,
aspect TEXT NOT NULL,
status TEXT NOT NULL,
-- File location
file_path TEXT NOT NULL,
line_number INTEGER NOT NULL,
-- Optional fields
test TEXT,
bench TEXT,
owner TEXT,
-- Extended metadata
priority INTEGER DEFAULT 5,
phase TEXT,
keywords TEXT,
-- Spec lifecycle
spec_status TEXT DEFAULT 'draft',
-- Dates
created_at TEXT,
updated_at TEXT NOT NULL,
started_at TEXT,
completed_at TEXT,
-- Git integration
commit_hash TEXT,
branch TEXT,
-- Relationships
depends_on TEXT,
blocks TEXT,
related_to TEXT,
-- Full token content for reference
raw_token TEXT NOT NULL,
-- Timestamps
indexed_at TEXT NOT NULL,
-- Documentation tracking
doc_path TEXT,
doc_hash TEXT,
doc_type TEXT,
doc_checked_at TEXT,
doc_status TEXT,
-- Multi-project support
project_id TEXT DEFAULT '',
UNIQUE(req_id, feature, file_path, line_number, project_id)
)
`
_, err := db.conn.Exec(query)
if err != nil {
return fmt.Errorf("create tokens table: %w", err)
}
// Create indexes
indexes := []string{
`CREATE INDEX IF NOT EXISTS idx_tokens_req_id ON tokens(req_id)`,
`CREATE INDEX IF NOT EXISTS idx_tokens_status ON tokens(status)`,
`CREATE INDEX IF NOT EXISTS idx_tokens_priority ON tokens(priority)`,
`CREATE INDEX IF NOT EXISTS idx_tokens_aspect ON tokens(aspect)`,
`CREATE INDEX IF NOT EXISTS idx_tokens_spec_status ON tokens(spec_status)`,
`CREATE INDEX IF NOT EXISTS idx_tokens_phase ON tokens(phase)`,
`CREATE INDEX IF NOT EXISTS idx_tokens_keywords ON tokens(keywords)`,
`CREATE INDEX IF NOT EXISTS idx_tokens_project_id ON tokens(project_id)`,
}
for _, indexQuery := range indexes {
if _, err := db.conn.Exec(indexQuery); err != nil {
return fmt.Errorf("create index: %w", err)
}
}
return nil
}
// CANARY: REQ=CBIN-146; FEATURE="TokenNamespacing"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-18
// GetTokensByProject retrieves all tokens for a specific project
func (db *DB) GetTokensByProject(projectID string) ([]*Token, error) {
if err := db.ensureTokensTable(); err != nil {
return nil, fmt.Errorf("ensure tokens table: %w", err)
}
query := `
SELECT id, req_id, feature, aspect, status, file_path, line_number,
test, bench, owner, priority, phase, keywords, spec_status,
created_at, updated_at, started_at, completed_at,
commit_hash, branch, depends_on, blocks, related_to,
raw_token, indexed_at,
doc_path, doc_hash, doc_type, doc_checked_at, doc_status,
COALESCE(project_id, '') as project_id
FROM tokens
WHERE COALESCE(project_id, '') = ?
ORDER BY priority ASC, feature ASC
`
rows, err := db.conn.Query(query, projectID)
if err != nil {
return nil, err
}
defer rows.Close()
return scanTokensWithProject(rows)
}
// GetAllTokens retrieves all tokens across all projects
func (db *DB) GetAllTokens() ([]*Token, error) {
if err := db.ensureTokensTable(); err != nil {
return nil, fmt.Errorf("ensure tokens table: %w", err)
}
query := `
SELECT id, req_id, feature, aspect, status, file_path, line_number,
test, bench, owner, priority, phase, keywords, spec_status,
created_at, updated_at, started_at, completed_at,
commit_hash, branch, depends_on, blocks, related_to,
raw_token, indexed_at,
doc_path, doc_hash, doc_type, doc_checked_at, doc_status,
COALESCE(project_id, '') as project_id
FROM tokens
ORDER BY priority ASC, updated_at DESC
`
rows, err := db.conn.Query(query)
if err != nil {
return nil, err
}
defer rows.Close()
return scanTokensWithProject(rows)
}
// GetTokensByReqIDAndProject retrieves tokens for a requirement within a specific project
func (db *DB) GetTokensByReqIDAndProject(reqID, projectID string) ([]*Token, error) {
if err := db.ensureTokensTable(); err != nil {
return nil, fmt.Errorf("ensure tokens table: %w", err)
}
query := `
SELECT id, req_id, feature, aspect, status, file_path, line_number,
test, bench, owner, priority, phase, keywords, spec_status,
created_at, updated_at, started_at, completed_at,
commit_hash, branch, depends_on, blocks, related_to,
raw_token, indexed_at,
doc_path, doc_hash, doc_type, doc_checked_at, doc_status,
COALESCE(project_id, '') as project_id
FROM tokens
WHERE req_id = ? AND COALESCE(project_id, '') = ?
ORDER BY priority ASC, feature ASC
`
rows, err := db.conn.Query(query, reqID, projectID)
if err != nil {
return nil, err
}
defer rows.Close()
return scanTokensWithProject(rows)
}
// Helper function to scan token rows including project_id
func scanTokensWithProject(rows *sql.Rows) ([]*Token, error) {
var tokens []*Token
for rows.Next() {
t := &Token{}
err := rows.Scan(
&t.ID, &t.ReqID, &t.Feature, &t.Aspect, &t.Status,
&t.FilePath, &t.LineNumber,
&t.Test, &t.Bench, &t.Owner,
&t.Priority, &t.Phase, &t.Keywords, &t.SpecStatus,
&t.CreatedAt, &t.UpdatedAt, &t.StartedAt, &t.CompletedAt,
&t.CommitHash, &t.Branch,
&t.DependsOn, &t.Blocks, &t.RelatedTo,
&t.RawToken, &t.IndexedAt,
&t.DocPath, &t.DocHash, &t.DocType, &t.DocCheckedAt, &t.DocStatus,
&t.ProjectID,
)
if err != nil {
return nil, err
}
tokens = append(tokens, t)
}
return tokens, rows.Err()
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
// CANARY: REQ=CBIN-146; FEATURE="TestInfrastructure"; ASPECT=Storage; STATUS=IMPL; UPDATED=2025-10-18
package testutil
import (
"os"
"path/filepath"
"testing"
)
// TempDir creates a temporary directory for testing
// Returns the path and a cleanup function
func TempDir(t *testing.T) (string, func()) {
t.Helper()
tmpDir, err := os.MkdirTemp("", "canary-test-*")
if err != nil {
t.Fatalf("failed to create temp dir: %v", err)
}
cleanup := func() {
os.RemoveAll(tmpDir)
}
return tmpDir, cleanup
}
// TempHomeDir creates a temporary HOME directory for testing
// Returns the original HOME value and a cleanup function
func TempHomeDir(t *testing.T) (string, func()) {
t.Helper()
originalHome := os.Getenv("HOME")
tmpHome, err := os.MkdirTemp("", "canary-home-*")
if err != nil {
t.Fatalf("failed to create temp home dir: %v", err)
}
os.Setenv("HOME", tmpHome)
cleanup := func() {
os.RemoveAll(tmpHome)
if originalHome != "" {
os.Setenv("HOME", originalHome)
} else {
os.Unsetenv("HOME")
}
}
return tmpHome, cleanup
}
// SetupProjectDir creates a project directory with .canary subdirectory
func SetupProjectDir(t *testing.T, dir string) error {
t.Helper()
canaryDir := filepath.Join(dir, ".canary")
if err := os.MkdirAll(canaryDir, 0755); err != nil {
return err
}
return nil
}
// Chdir changes to a directory and returns a cleanup function to restore
func Chdir(t *testing.T, dir string) func() {
t.Helper()
originalDir, err := os.Getwd()
if err != nil {
t.Fatalf("failed to get current directory: %v", err)
}
if err := os.Chdir(dir); err != nil {
t.Fatalf("failed to change directory to %s: %v", dir, err)
}
return func() {
if err := os.Chdir(originalDir); err != nil {
t.Fatalf("failed to restore directory: %v", err)
}
}
}
// FileExists checks if a file exists at the given path
func FileExists(path string) bool {
_, err := os.Stat(path)
return err == nil
}
// TB is an interface that covers both *testing.T and *testing.B
type TB interface {
Helper()
Fatalf(format string, args ...interface{})
}
// TempHomeDirB creates a temporary HOME directory for benchmarking
// Returns the original HOME value and a cleanup function
func TempHomeDirB(b *testing.B) (string, func()) {
b.Helper()
originalHome := os.Getenv("HOME")
tmpHome, err := os.MkdirTemp("", "canary-home-*")
if err != nil {
b.Fatalf("failed to create temp home dir: %v", err)
}
os.Setenv("HOME", tmpHome)
cleanup := func() {
os.RemoveAll(tmpHome)
if originalHome != "" {
os.Setenv("HOME", originalHome)
} else {
os.Unsetenv("HOME")
}
}
return tmpHome, cleanup
}
// TempDirB creates a temporary directory for benchmarking
// Returns the path and a cleanup function
func TempDirB(b *testing.B) (string, func()) {
b.Helper()
tmpDir, err := os.MkdirTemp("", "canary-test-*")
if err != nil {
b.Fatalf("failed to create temp dir: %v", err)
}
cleanup := func() {
os.RemoveAll(tmpDir)
}
return tmpDir, cleanup
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
// CANARY: REQ=CBIN-101; FEATURE="ScannerCore"; ASPECT=Engine; STATUS=TESTED; TEST=TestCANARY_CBIN_101_Engine_ScanBasic; BENCH=BenchmarkCANARY_CBIN_101_Engine_Scan; OWNER=canary; UPDATED=2025-10-15
import (
"encoding/json"
"flag"
"fmt"
"log"
"os"
"time"
)
type report struct {
GeneratedAt time.Time `json:"generated_at"`
Requirements []requirementRow `json:"requirements"`
Summary summary `json:"summary"`
}
type requirementRow struct {
ID string `json:"id"`
Features []featureEntry `json:"features"`
}
type featureEntry struct {
Feature string `json:"feature"`
Aspect string `json:"aspect"`
Status string `json:"status"`
Files []string `json:"files"`
Tests []string `json:"tests"`
Benches []string `json:"benches"`
Owner string `json:"owner,omitempty"`
Updated string `json:"updated,omitempty"`
}
type summary struct {
ByStatus map[string]int `json:"by_status"`
ByAspect map[string]int `json:"by_aspect"`
}
func main() {
var root, out, csv, verify string
var strict bool
flag.StringVar(&root, "root", ".", "repository root to scan")
flag.StringVar(&out, "out", "status.json", "output JSON file")
flag.StringVar(&csv, "csv", "", "optional CSV output file")
flag.StringVar(&verify, "verify", "", "verify claims in GAP_ANALYSIS.md")
flag.BoolVar(&strict, "strict", false, "strict mode: fail on stale UPDATED (>30 days) for TESTED/BENCHED")
flag.Parse()
rep, err := Scan(root)
if err != nil {
log.Printf("ERROR scan: %v", err)
os.Exit(3)
}
rep.GeneratedAt = time.Now().UTC()
// Strict staleness check
if strict {
if err := CheckStaleness(rep, 30*24*time.Hour); err != nil {
log.Printf("CANARY_STALE %v", err)
// still write outputs for inspection
//nolint:errcheck // Error doesn't matter here, we're exiting anyway
writeOutputs(rep, out, csv)
os.Exit(2)
}
}
// Verify GAP/claims
if verify != "" {
claims, err := ParseGAPClaims(verify)
if err != nil {
log.Printf("ERROR verify-parse: %v", err)
//nolint:errcheck // Error doesn't matter here, we're exiting anyway
writeOutputs(rep, out, csv)
os.Exit(3)
}
if err := VerifyClaims(rep, claims); err != nil {
log.Printf("CANARY_VERIFY_FAIL %v", err)
//nolint:errcheck // Error doesn't matter here, we're exiting anyway
writeOutputs(rep, out, csv)
os.Exit(2)
}
}
if err := writeOutputs(rep, out, csv); err != nil {
log.Printf("ERROR write: %v", err)
os.Exit(3)
}
}
func writeOutputs(rep report, out, csv string) error {
// JSON
jf, err := os.Create(out)
if err != nil {
return err
}
defer jf.Close()
enc := json.NewEncoder(jf)
enc.SetEscapeHTML(false)
// Minified JSON output (no indentation)
if err := enc.Encode(rep); err != nil {
return err
}
// CSV optional
if csv != "" {
if err := WriteCSV(rep, csv); err != nil {
return err
}
}
fmt.Printf("CANARY_OK wrote %s\n", out)
return nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"context"
"encoding/json"
"errors"
"net/http"
"github.com/invopop/jsonschema"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
)
type doer interface {
Do(req *http.Request) (*http.Response, error)
}
type ClientOption func(*LLMClient) error
type LLMClient struct {
ctx context.Context
baseURL string
key string
httpClient doer // http.Client
middleware []option.Middleware
openaiC openai.Client
}
func LLM(ctx context.Context, opts ...ClientOption) (*LLMClient, error) {
out := &LLMClient{
ctx: ctx,
}
for _, opt := range opts {
if err := opt(out); err != nil {
return nil, err
}
}
copts := []option.RequestOption{}
if out.baseURL != "" {
copts = append(copts, option.WithBaseURL(out.baseURL))
}
if out.key != "" {
copts = append(copts, option.WithAPIKey(out.key))
}
if out.httpClient != nil {
copts = append(copts, option.WithHTTPClient(out.httpClient))
}
if len(out.middleware) > 0 {
copts = append(copts, option.WithMiddleware(out.middleware...))
}
out.openaiC = openai.NewClient(copts...)
return out, nil
}
func GenerateSchema[T any]() any {
// Structured Outputs uses a subset of JSON schema
// These flags are necessary to comply with the subset
reflector := jsonschema.Reflector{
AllowAdditionalProperties: false,
DoNotReference: true,
}
var v T
schema := reflector.Reflect(v)
return schema
}
type Output[T any] struct {
Data T `json:"data"`
}
func Single[T any](
ctx context.Context, c *LLMClient,
model, desc string,
system []string,
context ...string,
) (T, error) {
msgs := []openai.ChatCompletionMessageParamUnion{}
for _, c := range system {
msgs = append(msgs, openai.ChatCompletionMessageParamUnion{
OfSystem: &openai.ChatCompletionSystemMessageParam{
Content: openai.ChatCompletionSystemMessageParamContentUnion{
OfString: openai.String(c),
},
},
})
}
for _, c := range context {
msgs = append(msgs, openai.ChatCompletionMessageParamUnion{
OfUser: &openai.ChatCompletionUserMessageParam{
Content: openai.ChatCompletionUserMessageParamContentUnion{
OfString: openai.String(c),
},
},
})
}
var schema = GenerateSchema[*Output[T]]()
schemaParam := openai.ResponseFormatJSONSchemaJSONSchemaParam{
Name: "request",
Description: openai.String(desc),
Schema: schema,
Strict: openai.Bool(false),
}
var data T
chat, err := c.openaiC.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{
Messages: msgs,
ResponseFormat: openai.ChatCompletionNewParamsResponseFormatUnion{
OfJSONSchema: &openai.ResponseFormatJSONSchemaParam{
JSONSchema: schemaParam,
},
},
Model: model,
})
if err != nil {
return data, err
}
if len(chat.Choices) == 0 {
return data, errors.New("no choices returned")
}
output := &Output[T]{}
err = json.Unmarshal([]byte(chat.Choices[0].Message.Content), &output)
if err != nil {
return data, err
}
return output.Data, nil
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package prompts
import _ "embed"
//go:embed sys/init.md
var Init string
//go:embed sys/policy.md
var Policy string
//go:embed sys/requirements.md
var Requirements string
//go:embed sys/evaluate.md
var Evaluate string
func All() map[string]string {
return map[string]string{"init": Init, "policy": Policy, "requirements": Requirements, "evaluate": Evaluate}
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"bufio"
"encoding/csv"
"errors"
"fmt"
"io/fs"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"time"
)
var canaryRe = regexp.MustCompile(`^\s*(?://|#|--|\[//\]:\s*#|<!--)\s*CANARY:\s*(.*)$`)
var kvRe = regexp.MustCompile(`\s*([^=;\s]+)\s*=\s*([^;]+)\s*`)
// directories to skip during scan
var skipDirs = map[string]bool{
".git": true, "node_modules": true, "vendor": true, "bin": true, "zig-out": true, ".zig-cache": true,
".crush": true, "data": true, "certs": true,
}
func Scan(root string) (report, error) {
type key struct {
id, feature, aspect, status, owner, updated string
}
type val struct {
files map[string]struct{}
tests map[string]struct{}
benches map[string]struct{}
}
agg := map[key]*val{}
err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
if skipDirs[filepath.Base(path)] {
return filepath.SkipDir
}
return nil
}
// only scan likely text files (simple heuristic)
if isBinary(path) {
return nil
}
f, err := os.Open(path)
if err != nil {
return nil // skip unreadable files
}
defer f.Close()
sc := bufio.NewScanner(f)
// Set max scanner buffer to 1MB for large lines
const maxScanTokenSize = 1024 * 1024
buf := make([]byte, maxScanTokenSize)
sc.Buffer(buf, maxScanTokenSize)
ln := 0
for sc.Scan() {
ln++
line := sc.Text()
m := canaryRe.FindStringSubmatch(line)
if len(m) < 2 {
continue
}
rec, err := parseCanaryKV(m[1])
if err != nil {
return fmt.Errorf("parse %s:%d: %w", path, ln, err)
}
k := key{
id: rec["REQ"],
feature: unquote(rec["FEATURE"]),
aspect: rec["ASPECT"],
status: rec["STATUS"],
owner: rec["OWNER"],
updated: rec["UPDATED"],
}
if k.id == "" || k.aspect == "" || k.status == "" {
return fmt.Errorf("missing required fields in %s:%d", path, ln)
}
if _, ok := agg[k]; !ok {
agg[k] = &val{
files: map[string]struct{}{},
tests: map[string]struct{}{},
benches: map[string]struct{}{},
}
}
agg[k].files[path] = struct{}{}
for _, t := range splitList(rec["TEST"]) {
if t != "" {
agg[k].tests[t] = struct{}{}
}
}
for _, b := range splitList(rec["BENCH"]) {
if b != "" {
agg[k].benches[b] = struct{}{}
}
}
}
return nil
})
if err != nil {
return report{}, err
}
// shape report
reqMap := map[string][]featureEntry{}
sumStatus := map[string]int{}
sumAspect := map[string]int{}
for k, v := range agg {
// Auto-promotion: if status is IMPL and at least one test present -> TESTED
// If at least one bench present and status IMPL or TESTED -> BENCHED (bench is stronger evidence)
promoted_status := k.status
if promoted_status == "IMPL" && len(v.tests) > 0 {
promoted_status = "TESTED"
}
if (promoted_status == "IMPL" || promoted_status == "TESTED") && len(v.benches) > 0 {
promoted_status = "BENCHED"
}
ent := featureEntry{
Feature: k.feature,
Aspect: k.aspect,
Status: promoted_status,
Files: keys(v.files),
Tests: keys(v.tests),
Benches: keys(v.benches),
Owner: k.owner,
Updated: k.updated,
}
reqMap[normalizeReq(k.id)] = append(reqMap[normalizeReq(k.id)], ent)
sumStatus[ent.Status]++
sumAspect[k.aspect]++
}
var rows []requirementRow
for id, feats := range reqMap {
sort.Slice(feats, func(i, j int) bool {
if feats[i].Feature == feats[j].Feature {
return feats[i].Aspect < feats[j].Aspect
}
return feats[i].Feature < feats[j].Feature
})
rows = append(rows, requirementRow{ID: id, Features: feats})
}
sort.Slice(rows, func(i, j int) bool { return rows[i].ID < rows[j].ID })
return report{
Requirements: rows,
Summary: summary{
ByStatus: sumStatus,
ByAspect: sumAspect,
},
}, nil
}
func parseCanaryKV(s string) (map[string]string, error) {
// Strip HTML comment closing marker if present
s = strings.TrimSuffix(strings.TrimSpace(s), "-->")
s = strings.TrimSpace(s)
out := map[string]string{}
for _, seg := range strings.Split(s, ";") {
seg = strings.TrimSpace(seg)
if seg == "" {
continue
}
m := kvRe.FindStringSubmatch(seg)
if len(m) != 3 {
return nil, fmt.Errorf("bad kv segment %q", seg)
}
key := strings.ToUpper(strings.TrimSpace(m[1]))
val := strings.TrimSpace(m[2])
out[key] = strings.TrimSpace(val)
}
// Validate enums (bestβeffort)
status := strings.ToUpper(out["STATUS"])
switch status {
case "MISSING", "STUB", "IMPL", "TESTED", "BENCHED", "REMOVED":
default:
if status != "" {
return nil, fmt.Errorf("invalid STATUS %q", status)
}
}
return out, nil
}
func unquote(v string) string {
v = strings.TrimSpace(v)
if len(v) >= 2 && ((v[0] == '"' && v[len(v)-1] == '"') || (v[0] == '\'' && v[len(v)-1] == '\'')) {
return v[1 : len(v)-1]
}
return v
}
func splitList(v string) []string {
v = strings.TrimSpace(v)
if v == "" {
return nil
}
// allow comma or space
parts := strings.FieldsFunc(v, func(r rune) bool { return r == ',' })
var out []string
for _, p := range parts {
p = strings.TrimSpace(p)
if p != "" {
out = append(out, p)
}
}
return out
}
func keys(m map[string]struct{}) []string {
out := make([]string, 0, len(m))
for k := range m {
out = append(out, k)
}
sort.Strings(out)
return out
}
func isBinary(path string) bool {
ext := strings.ToLower(filepath.Ext(path))
switch ext {
case ".png", ".jpg", ".jpeg", ".gif", ".pdf", ".zip", ".gz", ".tgz", ".jar", ".wasm",
".exe", ".dll", ".so", ".dylib", ".db", ".db-wal", ".db-shm":
return true
}
// Skip files with no extension that look binary
if ext == "" && strings.Contains(path, "zig-out") {
return true
}
return false
}
// CheckStaleness: fail if any TESTED/BENCHED has UPDATED older than dur
func CheckStaleness(rep report, dur time.Duration) error {
cut := time.Now().UTC().Add(-dur)
var errs []string
for _, r := range rep.Requirements {
for _, f := range r.Features {
if f.Status == "TESTED" || f.Status == "BENCHED" {
if f.Updated == "" {
errs = append(errs, fmt.Sprintf("%s %s missing UPDATED", r.ID, f.Feature))
continue
}
t, err := time.Parse("2006-01-02", f.Updated)
if err != nil || t.Before(time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC)) {
errs = append(errs, fmt.Sprintf("%s %s bad UPDATED=%q", r.ID, f.Feature, f.Updated))
continue
}
if t.Before(cut) {
errs = append(errs, fmt.Sprintf("REQ=%s feature=%s UPDATED=%s > %dd", r.ID, f.Feature, f.Updated, int(dur.Hours()/24)))
}
}
}
}
if len(errs) > 0 {
return errors.New(strings.Join(errs, "; "))
}
return nil
}
func WriteCSV(rep report, path string) error {
f, err := os.Create(path)
if err != nil {
return err
}
defer f.Close()
w := csv.NewWriter(f)
defer w.Flush()
_ = w.Write([]string{"req", "feature", "aspect", "status", "file", "test", "bench", "owner", "updated"})
for _, r := range rep.Requirements {
for _, f := range r.Features {
rowBase := []string{r.ID, f.Feature, f.Aspect, f.Status, "", "", "", f.Owner, f.Updated}
// explode files/tests/benches (max(len))
max := max3(len(f.Files), len(f.Tests), len(f.Benches))
for i := 0; i < max; i++ {
row := make([]string, len(rowBase))
copy(row, rowBase)
if i < len(f.Files) {
row[4] = f.Files[i]
}
if i < len(f.Tests) {
row[5] = f.Tests[i]
}
if i < len(f.Benches) {
row[6] = f.Benches[i]
}
if err := w.Write(row); err != nil {
return err
}
}
if max == 0 {
if err := w.Write(rowBase); err != nil {
return err
}
}
}
}
return nil
}
func max3(a, b, c int) int {
if a < b {
a = b
}
if a < c {
a = c
}
return a
}
func normalizeReq(id string) string {
id = strings.TrimSpace(id)
id = strings.ReplaceAll(id, "β", "-") // NB hyphen -> ASCII
id = strings.ReplaceAll(id, "β", "-")
return id
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
// CANARY: REQ=CBIN-101; FEATURE="ScannerCore"; ASPECT=Engine; STATUS=BENCHED; TEST=TestCANARY_CBIN_101_Engine_ScanBasic; BENCH=BenchmarkCANARY_CBIN_101_Engine_Scan; OWNER=canary; UPDATED=2025-10-15
import (
"encoding/json"
"flag"
"fmt"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"time"
ignore "github.com/sabhiram/go-gitignore"
"gopkg.in/yaml.v3"
)
type StatusCounts map[string]int
func (m StatusCounts) MarshalJSON() ([]byte, error) { return marshalSortedMap(m) }
type AspectCounts map[string]int
func (m AspectCounts) MarshalJSON() ([]byte, error) { return marshalSortedMap(m) }
// ProjectConfig defines project-specific configuration
type ProjectConfig struct {
Project struct {
Name string `yaml:"name"`
Description string `yaml:"description"`
} `yaml:"project"`
Requirements struct {
IDPattern string `yaml:"id_pattern"`
} `yaml:"requirements"`
Scanner struct {
ExcludePaths []string `yaml:"exclude_paths"`
} `yaml:"scanner"`
}
type Report struct {
GeneratedAt string `json:"generated_at"`
Requirements []Requirement `json:"requirements"`
Summary Summary `json:"summary"`
}
type Requirement struct {
ID string `json:"id"`
Features []Feature `json:"features"`
}
type Feature struct {
Feature string `json:"feature"`
Aspect string `json:"aspect"`
Status string `json:"status"`
Files []string `json:"files"`
Tests []string `json:"tests"`
Benches []string `json:"benches"`
Owner string `json:"owner,omitempty"`
Updated string `json:"updated"`
}
type Summary struct {
ByStatus StatusCounts `json:"by_status"`
ByAspect AspectCounts `json:"by_aspect"`
TotalTokens int `json:"total_tokens"`
UniqueRequirements int `json:"unique_requirements"`
}
var (
// Allow optional comment markers before CANARY (//, #, or /*) to permit embedding in source files.
tokenLineRe = regexp.MustCompile(`(?m)^[ \t]*(?:\/\/|#|\/\*)?[ \t]*CANARY:\s*(.*)$`)
kvRe = regexp.MustCompile(`\s*([A-Za-z_]+)\s*=\s*([^;]+)\s*`)
claimRe = regexp.MustCompile(`(?m)^\s*β
\s+(CBIN-\d{3})\b`)
skipDefault = regexp.MustCompile(`(^|/)(.git|node_modules|vendor|bin|dist|build|zig-out|.zig-cache)(/|$)`)
aspects = map[string]struct{}{"API": {}, "CLI": {}, "Engine": {}, "Planner": {}, "Storage": {}, "Wire": {}, "Security": {}, "Docs": {}, "Decode": {}, "Encode": {}, "RoundTrip": {}, "Bench": {}, "FrontEnd": {}, "Dist": {}}
statuses = []string{"MISSING", "STUB", "IMPL", "TESTED", "BENCHED", "REMOVED"}
statusSet = func() map[string]struct{} {
m := map[string]struct{}{}
for _, s := range statuses {
m[s] = struct{}{}
}
return m
}()
)
func main() {
root := flag.String("root", ".", "root directory to scan")
outJSON := flag.String("out", "status.json", "output status.json path")
outCSV := flag.String("csv", "", "optional status.csv path")
verifyPath := flag.String("verify", "", "GAP_ANALYSIS file to verify claims")
strict := flag.Bool("strict", false, "enforce staleness on TESTED/BENCHED (30d)")
skipExpr := flag.String("skip", skipDefault.String(), "skip path regex (RE2)")
updateStale := flag.Bool("update-stale", false, "rewrite UPDATED field for stale TESTED/BENCHED tokens")
projectOnly := flag.Bool("project-only", false, "filter by project requirement ID pattern from .canary/project.yaml")
flag.Parse()
skip, err := regexp.Compile(*skipExpr)
if err != nil {
failParse(fmt.Errorf("bad --skip regex: %w", err))
}
// Load project config if --project-only is set
var projectFilter *regexp.Regexp
if *projectOnly {
cfg, err := loadProjectConfig(*root)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: --project-only specified but failed to load .canary/project.yaml: %v\n", err)
fmt.Fprintf(os.Stderr, "Scanning all requirements. Run 'canary init' to create project config.\n")
} else if cfg.Requirements.IDPattern != "" {
projectFilter, err = regexp.Compile(cfg.Requirements.IDPattern)
if err != nil {
failParse(fmt.Errorf("invalid project id_pattern %q: %w", cfg.Requirements.IDPattern, err))
}
fmt.Fprintf(os.Stderr, "Filtering by project pattern: %s\n", cfg.Requirements.IDPattern)
}
}
// Load .canaryignore if it exists
ignorePatterns, err := loadCanaryIgnore(*root)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to load .canaryignore: %v\n", err)
}
if ignorePatterns != nil {
fmt.Fprintf(os.Stderr, "Loaded .canaryignore patterns\n")
}
rep, err := scan(*root, skip, projectFilter, ignorePatterns)
if err != nil {
failParse(err)
}
// Handle --update-stale before writing output files
if *updateStale {
staleTokens := stale(rep, 30*24*time.Hour)
if len(staleTokens) > 0 {
updatedFiles, err := updateStaleTokens(*root, skip, staleTokens)
if err != nil {
fmt.Fprintf(os.Stderr, "CANARY_UPDATE_ERROR: %v\n", err)
os.Exit(3)
}
fmt.Fprintf(os.Stderr, "Updated %d stale tokens in %d files\n", len(staleTokens), len(updatedFiles))
// Re-scan after updates
rep, err = scan(*root, skip, projectFilter, ignorePatterns)
if err != nil {
failParse(err)
}
} else {
fmt.Fprintln(os.Stderr, "No stale tokens found")
}
}
if err := writeJSON(*outJSON, rep); err != nil {
failParse(err)
}
if *outCSV != "" {
if err := writeCSV(*outCSV, rep); err != nil {
failParse(err)
}
}
var diags []string
if *verifyPath != "" {
diags = append(diags, verifyClaims(rep, *verifyPath)...)
}
if *strict && !*updateStale {
diags = append(diags, stale(rep, 30*24*time.Hour)...)
}
if len(diags) > 0 {
for _, d := range diags {
fmt.Fprintln(os.Stderr, d)
}
os.Exit(2)
}
}
type aggregateKey struct{ req, feature, aspect, owner, updated string }
type aggregateVal struct {
status string
files, tests, benches map[string]struct{}
}
// loadProjectConfig loads .canary/project.yaml from the given root
func loadProjectConfig(root string) (*ProjectConfig, error) {
configPath := filepath.Join(root, ".canary", "project.yaml")
data, err := os.ReadFile(configPath)
if err != nil {
return nil, err
}
var cfg ProjectConfig
if err := yaml.Unmarshal(data, &cfg); err != nil {
return nil, fmt.Errorf("parse project.yaml: %w", err)
}
return &cfg, nil
}
// loadCanaryIgnore loads .canaryignore patterns from the given root
// Returns nil if .canaryignore doesn't exist (not an error)
func loadCanaryIgnore(root string) (*ignore.GitIgnore, error) {
ignorePath := filepath.Join(root, ".canaryignore")
if _, err := os.Stat(ignorePath); os.IsNotExist(err) {
return nil, nil // No .canaryignore file, not an error
}
gi, err := ignore.CompileIgnoreFile(ignorePath)
if err != nil {
return nil, fmt.Errorf("parse .canaryignore: %w", err)
}
return gi, nil
}
func scan(root string, skip *regexp.Regexp, projectFilter *regexp.Regexp, ignorePatterns *ignore.GitIgnore) (Report, error) {
if root == "" {
root = "."
}
if skip == nil {
skip = skipDefault
}
agg := map[aggregateKey]*aggregateVal{}
err := filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
// Get relative path for .canaryignore matching
relPath, err := filepath.Rel(root, path)
if err != nil {
relPath = path
}
// Check .canaryignore patterns
if ignorePatterns != nil && ignorePatterns.MatchesPath(relPath) {
if d.IsDir() {
return filepath.SkipDir
}
return nil
}
if d.IsDir() {
if skip.MatchString(path) {
return filepath.SkipDir
}
return nil
}
// Skip acceptance fixture testdata only when scanning the canary tool itself (root path equals the tools/canary dir)
if strings.Contains(path, string(filepath.Separator)+"testdata"+string(filepath.Separator)) {
// Only skip when the scan root itself is the canary tool directory (self-scan)
base := filepath.Clean(root)
if strings.HasSuffix(base, string(filepath.Join("tools", "canary"))) {
return nil
}
}
if skip.MatchString(path) {
return nil
}
b, err := os.ReadFile(path)
if err != nil {
return err
}
matches := tokenLineRe.FindAllStringSubmatch(string(b), -1)
for _, m := range matches {
fields, perr := parseKV(m[1])
if perr != nil {
return fmt.Errorf("%s: %w", path, perr)
}
for _, k := range []string{"REQ", "FEATURE", "ASPECT", "STATUS", "UPDATED"} {
if fields[k] == "" {
absPath, _ := filepath.Abs(path)
return fmt.Errorf("%s (abs: %s): missing %s in token: %s", path, absPath, k, m[0])
}
}
req := normalizeREQ(fields["REQ"])
// Apply project filter if specified
if projectFilter != nil && !projectFilter.MatchString(req) {
continue // Skip requirements that don't match project pattern
}
aspect := fields["ASPECT"]
if _, ok := aspects[aspect]; !ok {
return fmt.Errorf("%s: invalid ASPECT %s", path, aspect)
}
if _, ok := statusSet[fields["STATUS"]]; !ok {
return fmt.Errorf("%s: invalid STATUS %s", path, fields["STATUS"])
}
k := aggregateKey{req: req, feature: unquote(fields["FEATURE"]), aspect: aspect, owner: fields["OWNER"], updated: fields["UPDATED"]}
a := agg[k]
if a == nil {
a = &aggregateVal{status: fields["STATUS"], files: map[string]struct{}{}, tests: map[string]struct{}{}, benches: map[string]struct{}{}}
agg[k] = a
}
a.files[path] = struct{}{}
for _, t := range splitList(fields["TEST"]) {
if t != "" {
a.tests[t] = struct{}{}
}
}
for _, b := range splitList(fields["BENCH"]) {
if b != "" {
a.benches[b] = struct{}{}
}
}
}
return nil
})
if err != nil {
return Report{}, err
}
byReq := map[string][]Feature{}
byStatus := StatusCounts{"MISSING": 0, "STUB": 0, "IMPL": 0, "TESTED": 0, "BENCHED": 0, "REMOVED": 0}
byAspect := AspectCounts{}
total := 0
for k, v := range agg {
status := promote(v.status, len(v.tests) > 0, len(v.benches) > 0)
f := Feature{Feature: k.feature, Aspect: k.aspect, Status: status, Files: keys(v.files), Tests: keys(v.tests), Benches: keys(v.benches), Owner: k.owner, Updated: k.updated}
byReq[k.req] = append(byReq[k.req], f)
byStatus[status]++
byAspect[k.aspect]++
total++
}
var reqs []Requirement
for id, feats := range byReq {
sort.Slice(feats, func(i, j int) bool { return feats[i].Feature+feats[i].Aspect < feats[j].Feature+feats[j].Aspect })
reqs = append(reqs, Requirement{ID: id, Features: feats})
}
sort.Slice(reqs, func(i, j int) bool { return reqs[i].ID < reqs[j].ID })
rep := Report{GeneratedAt: getTimestamp(), Requirements: reqs, Summary: Summary{ByStatus: byStatus, ByAspect: byAspect, TotalTokens: total, UniqueRequirements: len(reqs)}}
return rep, nil
}
// getTimestamp returns current UTC timestamp in RFC3339 format, or a fixed timestamp if CANARY_TEST_TIMESTAMP is set
func getTimestamp() string {
if testTS := os.Getenv("CANARY_TEST_TIMESTAMP"); testTS != "" {
return testTS
}
return time.Now().UTC().Format(time.RFC3339)
}
func promote(status string, hasTests, hasBenches bool) string {
if status == "IMPL" && hasTests {
status = "TESTED"
}
if (status == "IMPL" || status == "TESTED") && hasBenches {
status = "BENCHED"
}
return status
}
func verifyClaims(rep Report, gapPath string) []string {
b, err := os.ReadFile(gapPath)
if err != nil {
return []string{fmt.Sprintf("CANARY_PARSE_ERROR file=%s err=%q", gapPath, err)}
}
matches := claimRe.FindAllStringSubmatch(string(b), -1)
claimed := map[string]struct{}{}
for _, m := range matches {
claimed[m[1]] = struct{}{}
}
evidence := map[string]bool{}
for _, r := range rep.Requirements {
ok := false
for _, f := range r.Features {
if f.Status == "TESTED" || f.Status == "BENCHED" {
ok = true
break
}
}
evidence[r.ID] = ok
}
var diags []string
for id := range claimed {
if !evidence[id] {
diags = append(diags, fmt.Sprintf("CANARY_VERIFY_FAIL REQ=%s reason=claimed_but_not_TESTED_OR_BENCHED", id))
}
}
return diags
}
func stale(rep Report, maxAge time.Duration) []string {
cut := time.Now().UTC().Add(-maxAge)
var diags []string
for _, r := range rep.Requirements {
for _, f := range r.Features {
if f.Status == "TESTED" || f.Status == "BENCHED" {
t, err := time.Parse("2006-01-02", f.Updated)
if err != nil {
diags = append(diags, fmt.Sprintf("CANARY_PARSE_ERROR file=%s err=%q", strings.Join(f.Files, ","), err))
continue
}
if t.Before(cut) {
age := int(time.Since(t).Hours() / 24)
diags = append(diags, fmt.Sprintf("CANARY_STALE REQ=%s updated=%s age_days=%d threshold=%d", r.ID, f.Updated, age, int(maxAge.Hours()/24)))
}
}
}
}
return diags
}
// updateStaleTokens rewrites UPDATED field for stale tokens in source files.
// Returns map of file paths that were updated.
func updateStaleTokens(root string, skip *regexp.Regexp, staleDiags []string) (map[string]bool, error) {
// Parse stale diagnostics to get REQ IDs that need updating
staleReqs := make(map[string]bool)
reqRe := regexp.MustCompile(`REQ=([A-Z]+-\d{3})`)
for _, diag := range staleDiags {
matches := reqRe.FindStringSubmatch(diag)
if len(matches) > 1 {
staleReqs[matches[1]] = true
}
}
if len(staleReqs) == 0 {
return nil, nil
}
updatedFiles := make(map[string]bool)
today := time.Now().UTC().Format("2006-01-02")
// Walk directory and update files
err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
rel, _ := filepath.Rel(root, path)
if rel != "." && skip.MatchString(rel) {
return filepath.SkipDir
}
return nil
}
rel, _ := filepath.Rel(root, path)
if skip.MatchString(rel) {
return nil
}
// Read file
content, err := os.ReadFile(path)
if err != nil {
return nil // Skip unreadable files
}
// Check if file contains CANARY tokens
if !tokenLineRe.Match(content) {
return nil
}
lines := strings.Split(string(content), "\n")
modified := false
for i, line := range lines {
if !tokenLineRe.MatchString(line) {
continue
}
// Parse the token to get REQ ID
match := tokenLineRe.FindStringSubmatch(line)
if len(match) < 2 {
continue
}
attrs, err := parseKV(match[1])
if err != nil {
continue // Skip malformed tokens
}
reqID, hasReq := attrs["REQ"]
if !hasReq || !staleReqs[reqID] {
continue
}
// Check if token is TESTED or BENCHED
status, hasStatus := attrs["STATUS"]
if !hasStatus || (status != "TESTED" && status != "BENCHED") {
continue
}
// Update UPDATED field
updatedRe := regexp.MustCompile(`(UPDATED=)([0-9]{4}-[0-9]{2}-[0-9]{2})`)
if updatedRe.MatchString(line) {
lines[i] = updatedRe.ReplaceAllString(line, fmt.Sprintf("${1}%s", today))
modified = true
}
}
if modified {
// Write back to file
newContent := strings.Join(lines, "\n")
if err := os.WriteFile(path, []byte(newContent), info.Mode()); err != nil {
return fmt.Errorf("write %s: %w", path, err)
}
updatedFiles[path] = true
}
return nil
})
if err != nil {
return nil, err
}
return updatedFiles, nil
}
func writeJSON(path string, rep Report) error {
f, err := os.Create(path)
if err != nil {
return err
}
defer f.Close()
enc := json.NewEncoder(f)
enc.SetEscapeHTML(false)
return enc.Encode(rep)
}
func writeCSV(path string, rep Report) error {
f, err := os.Create(path)
if err != nil {
return err
}
defer f.Close()
fmt.Fprintln(f, "req,feature,aspect,status,file,test,bench,owner,updated")
for _, r := range rep.Requirements {
for _, ft := range r.Features {
max := max3(len(ft.Files), len(ft.Tests), len(ft.Benches))
if max == 0 {
fmt.Fprintf(f, "%s,%s,%s,%s,,,,%s,%s\n", r.ID, ft.Feature, ft.Aspect, ft.Status, ft.Owner, ft.Updated)
continue
}
for i := 0; i < max; i++ {
file, test, bench := "", "", ""
if i < len(ft.Files) {
file = ft.Files[i]
}
if i < len(ft.Tests) {
test = ft.Tests[i]
}
if i < len(ft.Benches) {
bench = ft.Benches[i]
}
fmt.Fprintf(f, "%s,%s,%s,%s,%s,%s,%s,%s,%s\n", r.ID, ft.Feature, ft.Aspect, ft.Status, file, test, bench, ft.Owner, ft.Updated)
}
}
}
return nil
}
func parseKV(s string) (map[string]string, error) {
out := map[string]string{}
for _, seg := range strings.Split(s, ";") {
seg = strings.TrimSpace(seg)
if seg == "" {
continue
}
m := kvRe.FindStringSubmatch(seg)
if len(m) != 3 {
return nil, fmt.Errorf("bad kv segment %q", seg)
}
out[strings.ToUpper(m[1])] = strings.TrimSpace(m[2])
}
return out, nil
}
func splitList(v string) []string {
v = strings.TrimSpace(v)
if v == "" {
return nil
}
parts := strings.FieldsFunc(v, func(r rune) bool { return r == ',' })
var out []string
for _, p := range parts {
p = strings.TrimSpace(p)
if p != "" {
out = append(out, p)
}
}
return out
}
func keys(m map[string]struct{}) []string {
out := make([]string, 0, len(m))
for k := range m {
out = append(out, k)
}
sort.Strings(out)
return out
}
func unquote(v string) string {
v = strings.TrimSpace(v)
if len(v) >= 2 && ((v[0] == '"' && v[len(v)-1] == '"') || (v[0] == '\'' && v[len(v)-1] == '\'')) {
return v[1 : len(v)-1]
}
return v
}
func max3(a, b, c int) int {
if a < b {
a = b
}
if a < c {
a = c
}
return a
}
func normalizeREQ(v string) string {
v = strings.TrimSpace(v)
v = strings.ReplaceAll(v, "β", "-")
v = strings.ReplaceAll(v, "β", "-")
if m := regexp.MustCompile(`^(CBIN-)(\d{1,3})$`).FindStringSubmatch(v); len(m) == 3 {
n := m[2]
for len(n) < 3 {
n = "0" + n
}
return m[1] + n
}
return v
}
func marshalSortedMap(m map[string]int) ([]byte, error) {
keys := make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
}
sort.Strings(keys)
var b strings.Builder
b.WriteByte('{')
for i, k := range keys {
if i > 0 {
b.WriteByte(',')
}
b.WriteString(fmt.Sprintf("%q:%d", k, m[k]))
}
b.WriteByte('}')
return []byte(b.String()), nil
}
func failParse(err error) { fmt.Fprintf(os.Stderr, "CANARY_PARSE_ERROR err=%q\n", err); os.Exit(3) }
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
import (
"bufio"
"fmt"
"os"
"path/filepath"
"strings"
)
func main() {
count := 0
err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Skip directories we don't want to process
if info.IsDir() {
name := info.Name()
if name == ".git" || name == "node_modules" || name == "vendor" {
return filepath.SkipDir
}
return nil
}
// Only process markdown files
if !strings.HasSuffix(path, ".md") {
return nil
}
// Read file
file, err := os.Open(path)
if err != nil {
return err
}
defer file.Close()
var lines []string
scanner := bufio.NewScanner(file)
modified := false
for scanner.Scan() {
line := scanner.Text()
// Check if line starts with # CANARY:
if strings.HasPrefix(line, "# CANARY:") {
// Convert to HTML comment
line = strings.Replace(line, "# CANARY:", "<!-- CANARY:", 1)
line = line + " -->"
modified = true
}
lines = append(lines, line)
}
if err := scanner.Err(); err != nil {
return err
}
file.Close()
// Write back if modified
if modified {
output := strings.Join(lines, "\n") + "\n"
if err := os.WriteFile(path, []byte(output), 0644); err != nil {
return err
}
count++
fmt.Printf("Converted: %s\n", path)
}
return nil
})
if err != nil {
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
os.Exit(1)
}
fmt.Printf("\nβ
Converted %d files\n", count)
}
// Copyright (c) 2025 by Developer Network.
//
// For more details, see the LICENSE file in the root directory of this
// source code repository or contact Developer Network at info@devnw.com.
package main
// CANARY: REQ=CBIN-102; FEATURE="VerifyGate"; ASPECT=CLI; STATUS=TESTED; TEST=TestCANARY_CBIN_102_CLI_Verify; BENCH=BenchmarkCANARY_CBIN_102_CLI_Verify; OWNER=canary; UPDATED=2025-10-15
import (
"bufio"
"fmt"
"os"
"regexp"
"strings"
)
// claim means "this REQ is claimed Implemented/Complete in GAP"
type claim struct {
REQ string
Implemented bool
RawLine string
}
var reqRe = regexp.MustCompile(`REQ[\-β]GQL[\-β](\d{3,})`)
var implementedRe = regexp.MustCompile(`\b(Implemented|Complete|β
)\b`)
var notImplRe = regexp.MustCompile(`\b(STUB|NOT IMPLEMENTED|β|β»)\b`)
func ParseGAPClaims(path string) (map[string]claim, error) {
f, err := os.Open(path)
if err != nil {
return nil, err
}
defer f.Close()
claims := map[string]claim{}
sc := bufio.NewScanner(f)
for sc.Scan() {
line := sc.Text()
ids := reqRe.FindAllStringSubmatch(line, -1)
if len(ids) == 0 {
continue
}
for _, m := range ids {
id := "REQ-GQL-" + m[1]
c := claims[id]
c.REQ = id
c.RawLine = line
// A very conservative read: mark Implemented if line suggests it and no obvious NOT markers.
if implementedRe.MatchString(line) && !notImplRe.MatchString(line) {
c.Implemented = true
}
claims[id] = c
}
}
return claims, sc.Err()
}
func VerifyClaims(rep report, claims map[string]claim) error {
var errs []string
evidence := map[string]bool{} // REQ -> has TESTED/BENCHED
for _, r := range rep.Requirements {
ok := false
for _, f := range r.Features {
if f.Status == "TESTED" || f.Status == "BENCHED" {
ok = true
break
}
}
evidence[r.ID] = ok
}
for id, c := range claims {
if !c.Implemented {
continue
}
if !evidence[normalizeReq(id)] {
errs = append(errs, fmt.Sprintf("REQ=%s claimed Implemented without CANARY TESTED/BENCHED (%s)", id, strings.TrimSpace(c.RawLine)))
}
}
if len(errs) > 0 {
return fmt.Errorf("%s", strings.Join(errs, "; "))
}
return nil
}