DORZF5HSV672ZP5HUDYB3J6TBH5O2LMXJE4HPSE7H5SOGZQBDCXQC YCGYOCFORTFK53WZ2B7GYZLT4HYXT55LCCKOTXM76ATWJNZ5WLJQC ZVYOPUNH7UJL3YALGNNXQW2B4H4ONI5Z6XWAUZUONFG7LR55W4SQC IFVRAERTCCDICNTYTG3TX2WASB6RXQQEJWWXQMQZJSQDQ3HLE5OQC 25GQ5TYGSGL7QED7L2IAPFLZ4WJJ2ZFAM6O6X5AOSTYPVJNCOGPQC OGLLBQQYE5KICDMI6EX7ZI4TZT5RB7UFHH7O2DUOZ44QQXVL5YAAC 65G4H2V6262GLHTPQQ5H4NIPDJB7HRPBRVNAD2EL26N75YUM5PWQC 2GJMZ6YA6OPHNS5KFFFI6POQ2BJ33SSS3NIPXYBFTJSN4BZBVEVAC L4STQEXDGCPZXDHTEUBCOQKBMTFDRVXRLNFQHPDHOVXDCJO33LQQC package toolsimport ("context""fmt""strings""github.com/modelcontextprotocol/go-sdk/mcp""skraak_mcp/db")// UpdatePatternInput defines input parameterstype UpdatePatternInput struct {PatternID string `json:"pattern_id" jsonschema:"required,Pattern ID (12 characters)"`RecordS *int `json:"record_s,omitempty" jsonschema:"Record duration in seconds (must be > 0)"`SleepS *int `json:"sleep_s,omitempty" jsonschema:"Sleep duration in seconds (must be >= 0)"`}// UpdatePatternOutput defines output structuretype UpdatePatternOutput struct {PatternID string `json:"pattern_id" jsonschema:"Updated pattern ID"`Success bool `json:"success" jsonschema:"Whether update succeeded"`Message string `json:"message" jsonschema:"Status message"`}// UpdatePattern implements the update_pattern MCP toolfunc UpdatePattern(ctx context.Context,req *mcp.CallToolRequest,input UpdatePatternInput,) (*mcp.CallToolResult, UpdatePatternOutput, error) {// Open writable databasedatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, UpdatePatternOutput{}, fmt.Errorf("failed to open database: %w", err)}defer database.Close()// Verify pattern existsvar exists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM cyclic_recording_pattern WHERE id = ?)", input.PatternID).Scan(&exists)if err != nil {return nil, UpdatePatternOutput{}, fmt.Errorf("failed to query pattern: %w", err)}if !exists {return nil, UpdatePatternOutput{}, fmt.Errorf("pattern not found: %s", input.PatternID)}// Validate RecordS if providedif input.RecordS != nil && *input.RecordS <= 0 {return nil, UpdatePatternOutput{}, fmt.Errorf("record_s must be greater than 0: %d", *input.RecordS)}// Validate SleepS if providedif input.SleepS != nil && *input.SleepS < 0 {return nil, UpdatePatternOutput{}, fmt.Errorf("sleep_s must be greater than or equal to 0: %d", *input.SleepS)}// Build dynamic UPDATE query based on provided fieldsupdates := []string{}args := []any{}if input.RecordS != nil {updates = append(updates, "record_s = ?")args = append(args, *input.RecordS)}if input.SleepS != nil {updates = append(updates, "sleep_s = ?")args = append(args, *input.SleepS)}if len(updates) == 0 {return nil, UpdatePatternOutput{}, fmt.Errorf("no fields provided to update")}// Always update last_modifiedupdates = append(updates, "last_modified = now()")args = append(args, input.PatternID)// Execute updatequery := fmt.Sprintf("UPDATE cyclic_recording_pattern SET %s WHERE id = ?", strings.Join(updates, ", "))_, err = database.Exec(query, args...)if err != nil {return nil, UpdatePatternOutput{}, fmt.Errorf("failed to update pattern: %w", err)}output := UpdatePatternOutput{PatternID: input.PatternID,Success: true,Message: "Pattern updated successfully",}return &mcp.CallToolResult{}, output, nil}
package toolsimport ("context""fmt""strings""github.com/modelcontextprotocol/go-sdk/mcp""skraak_mcp/db")// UpdateClusterInput defines input parameterstype UpdateClusterInput struct {}// UpdateClusterOutput defines output structuretype UpdateClusterOutput struct {ClusterID string `json:"cluster_id" jsonschema:"Updated cluster ID"`Success bool `json:"success" jsonschema:"Whether update succeeded"`Message string `json:"message" jsonschema:"Status message"`}// UpdateCluster implements the update_cluster MCP toolfunc UpdateCluster(ctx context.Context,req *mcp.CallToolRequest,input UpdateClusterInput,) (*mcp.CallToolResult, UpdateClusterOutput, error) {// Open writable databasedatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, UpdateClusterOutput{}, fmt.Errorf("failed to open database: %w", err)}defer database.Close()// Verify cluster existsvar exists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM cluster WHERE id = ?)", input.ClusterID).Scan(&exists)if err != nil {return nil, UpdateClusterOutput{}, fmt.Errorf("failed to query cluster: %w", err)}if !exists {return nil, UpdateClusterOutput{}, fmt.Errorf("cluster not found: %s", input.ClusterID)}// Validate cyclic_recording_pattern_id if providedif input.CyclicRecordingPatternID != nil {trimmedPatternID := strings.TrimSpace(*input.CyclicRecordingPatternID)// If provided but empty, allow it (NULL update to clear the field)if trimmedPatternID != "" {// Verify pattern exists and is activevar patternExists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM cyclic_recording_pattern WHERE id = ? AND active = true)",trimmedPatternID,).Scan(&patternExists)if err != nil {return nil, UpdateClusterOutput{}, fmt.Errorf("failed to verify cyclic recording pattern: %w", err)}if !patternExists {return nil, UpdateClusterOutput{}, fmt.Errorf("cyclic recording pattern not found or not active: %s", trimmedPatternID)}}}// Build dynamic UPDATE query based on provided fieldsupdates := []string{}args := []any{}if input.Name != nil {updates = append(updates, "name = ?")args = append(args, *input.Name)}if input.Path != nil {updates = append(updates, "path = ?")args = append(args, *input.Path)}if input.SampleRate != nil {updates = append(updates, "sample_rate = ?")args = append(args, *input.SampleRate)}if input.Description != nil {updates = append(updates, "description = ?")args = append(args, *input.Description)}if input.CyclicRecordingPatternID != nil {trimmedPatternID := strings.TrimSpace(*input.CyclicRecordingPatternID)if trimmedPatternID == "" {// Clear the field by setting to NULLupdates = append(updates, "cyclic_recording_pattern_id = NULL")} else {// Set to the provided valueupdates = append(updates, "cyclic_recording_pattern_id = ?")args = append(args, trimmedPatternID)}}if len(updates) == 0 {return nil, UpdateClusterOutput{}, fmt.Errorf("no fields provided to update")}// Always update last_modifiedupdates = append(updates, "last_modified = now()")args = append(args, input.ClusterID)// Execute updatequery := fmt.Sprintf("UPDATE cluster SET %s WHERE id = ?", strings.Join(updates, ", "))_, err = database.Exec(query, args...)if err != nil {return nil, UpdateClusterOutput{}, fmt.Errorf("failed to update cluster: %w", err)}output := UpdateClusterOutput{ClusterID: input.ClusterID,Success: true,Message: "Cluster updated successfully",}return &mcp.CallToolResult{}, output, nil}ClusterID string `json:"cluster_id" jsonschema:"required,Cluster ID (12 characters)"`Name *string `json:"name,omitempty" jsonschema:"Cluster name (max 140 characters)"`Path *string `json:"path,omitempty" jsonschema:"Normalized folder path (max 255 characters)"`SampleRate *int `json:"sample_rate,omitempty" jsonschema:"Sample rate in Hz"`Description *string `json:"description,omitempty" jsonschema:"Cluster description (max 255 characters)"`CyclicRecordingPatternID *string `json:"cyclic_recording_pattern_id,omitempty" jsonschema:"Optional ID of cyclic recording pattern (12-character nanoid). Set to empty string to clear."`
package toolsimport ("context""fmt""strings""github.com/modelcontextprotocol/go-sdk/mcp""skraak_mcp/db")// UpdateLocationInput defines input parameterstype UpdateLocationInput struct {LocationID string `json:"location_id" jsonschema:"required,Location ID (12 characters)"`DatasetID *string `json:"dataset_id,omitempty" jsonschema:"Parent dataset ID (12 characters)"`Name *string `json:"name,omitempty" jsonschema:"Location name (max 140 characters)"`Latitude *float64 `json:"latitude,omitempty" jsonschema:"Latitude in decimal degrees (-90.0 to 90.0)"`Longitude *float64 `json:"longitude,omitempty" jsonschema:"Longitude in decimal degrees (-180.0 to 180.0)"`Description *string `json:"description,omitempty" jsonschema:"Description (max 255 characters)"`TimezoneID *string `json:"timezone_id,omitempty" jsonschema:"IANA timezone ID (max 40 characters)"`}// UpdateLocationOutput defines output structuretype UpdateLocationOutput struct {LocationID string `json:"location_id" jsonschema:"Updated location ID"`Success bool `json:"success" jsonschema:"Whether update succeeded"`Message string `json:"message" jsonschema:"Status message"`}// UpdateLocation implements the update_location MCP toolfunc UpdateLocation(ctx context.Context,req *mcp.CallToolRequest,input UpdateLocationInput,) (*mcp.CallToolResult, UpdateLocationOutput, error) {// Open writable databasedatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, UpdateLocationOutput{}, fmt.Errorf("failed to open database: %w", err)}defer database.Close()// Verify location existsvar exists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM location WHERE id = ?)", input.LocationID).Scan(&exists)if err != nil {return nil, UpdateLocationOutput{}, fmt.Errorf("failed to query location: %w", err)}if !exists {return nil, UpdateLocationOutput{}, fmt.Errorf("location not found: %s", input.LocationID)}// Verify dataset exists if DatasetID providedif input.DatasetID != nil {err = database.QueryRow("SELECT EXISTS(SELECT 1 FROM dataset WHERE id = ?)", *input.DatasetID).Scan(&exists)if err != nil {return nil, UpdateLocationOutput{}, fmt.Errorf("failed to query dataset: %w", err)}if !exists {return nil, UpdateLocationOutput{}, fmt.Errorf("dataset not found: %s", *input.DatasetID)}}// Validate Latitude if providedif input.Latitude != nil {if *input.Latitude < -90.0 || *input.Latitude > 90.0 {return nil, UpdateLocationOutput{}, fmt.Errorf("latitude out of range: %f (must be between -90.0 and 90.0)", *input.Latitude)}}// Validate Longitude if providedif input.Longitude != nil {if *input.Longitude < -180.0 || *input.Longitude > 180.0 {return nil, UpdateLocationOutput{}, fmt.Errorf("longitude out of range: %f (must be between -180.0 and 180.0)", *input.Longitude)}}// Validate Name length if providedif input.Name != nil && len(*input.Name) > 140 {return nil, UpdateLocationOutput{}, fmt.Errorf("name too long: %d characters (max 140)", len(*input.Name))}// Validate Description length if providedif input.Description != nil && len(*input.Description) > 255 {return nil, UpdateLocationOutput{}, fmt.Errorf("description too long: %d characters (max 255)", len(*input.Description))}// Validate TimezoneID length if providedif input.TimezoneID != nil && len(*input.TimezoneID) > 40 {return nil, UpdateLocationOutput{}, fmt.Errorf("timezone_id too long: %d characters (max 40)", len(*input.TimezoneID))}// Build dynamic UPDATE query based on provided fieldsupdates := []string{}args := []any{}if input.DatasetID != nil {updates = append(updates, "dataset_id = ?")args = append(args, *input.DatasetID)}if input.Name != nil {updates = append(updates, "name = ?")args = append(args, *input.Name)}if input.Latitude != nil {updates = append(updates, "latitude = ?")args = append(args, *input.Latitude)}if input.Longitude != nil {updates = append(updates, "longitude = ?")args = append(args, *input.Longitude)}if input.Description != nil {updates = append(updates, "description = ?")args = append(args, *input.Description)}if input.TimezoneID != nil {updates = append(updates, "timezone_id = ?")args = append(args, *input.TimezoneID)}if len(updates) == 0 {return nil, UpdateLocationOutput{}, fmt.Errorf("no fields provided to update")}// Always update last_modifiedupdates = append(updates, "last_modified = now()")args = append(args, input.LocationID)// Execute updatequery := fmt.Sprintf("UPDATE location SET %s WHERE id = ?", strings.Join(updates, ", "))_, err = database.Exec(query, args...)if err != nil {return nil, UpdateLocationOutput{}, fmt.Errorf("failed to update location: %w", err)}output := UpdateLocationOutput{LocationID: input.LocationID,Success: true,Message: "Location updated successfully",}return &mcp.CallToolResult{}, output, nil}
package toolsimport ("context""fmt""strings""github.com/modelcontextprotocol/go-sdk/mcp""skraak_mcp/db")// UpdateDatasetInput defines input parameterstype UpdateDatasetInput struct {DatasetID string `json:"dataset_id" jsonschema:"required,Dataset ID (12 characters)"`Name *string `json:"name,omitempty" jsonschema:"Dataset name (max 255 characters)"`Description *string `json:"description,omitempty" jsonschema:"Description (max 255 characters)"`Type *string `json:"type,omitempty" jsonschema:"Dataset type: 'organise', 'test', or 'train'"`}// UpdateDatasetOutput defines output structuretype UpdateDatasetOutput struct {DatasetID string `json:"dataset_id" jsonschema:"Updated dataset ID"`Success bool `json:"success" jsonschema:"Whether update succeeded"`Message string `json:"message" jsonschema:"Status message"`}// UpdateDataset implements the update_dataset MCP toolfunc UpdateDataset(ctx context.Context,req *mcp.CallToolRequest,input UpdateDatasetInput,) (*mcp.CallToolResult, UpdateDatasetOutput, error) {// Open writable databasedatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, UpdateDatasetOutput{}, fmt.Errorf("failed to open database: %w", err)}defer database.Close()// Verify dataset existsvar exists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM dataset WHERE id = ?)", input.DatasetID).Scan(&exists)if err != nil {return nil, UpdateDatasetOutput{}, fmt.Errorf("failed to query dataset: %w", err)}if !exists {return nil, UpdateDatasetOutput{}, fmt.Errorf("dataset not found: %s", input.DatasetID)}// Validate Type if providedif input.Type != nil {typeValue := strings.ToLower(*input.Type)if typeValue != "organise" && typeValue != "test" && typeValue != "train" {return nil, UpdateDatasetOutput{}, fmt.Errorf("invalid dataset type: %s (must be 'organise', 'test', or 'train')", *input.Type)}}// Validate Name length if providedif input.Name != nil && len(*input.Name) > 255 {return nil, UpdateDatasetOutput{}, fmt.Errorf("name too long: %d characters (max 255)", len(*input.Name))}// Validate Description length if providedif input.Description != nil && len(*input.Description) > 255 {return nil, UpdateDatasetOutput{}, fmt.Errorf("description too long: %d characters (max 255)", len(*input.Description))}// Build dynamic UPDATE query based on provided fieldsupdates := []string{}args := []any{}if input.Name != nil {updates = append(updates, "name = ?")args = append(args, *input.Name)}if input.Description != nil {updates = append(updates, "description = ?")args = append(args, *input.Description)}if input.Type != nil {updates = append(updates, "type = ?")args = append(args, strings.ToLower(*input.Type))}if len(updates) == 0 {return nil, UpdateDatasetOutput{}, fmt.Errorf("no fields provided to update")}// Always update last_modifiedupdates = append(updates, "last_modified = now()")args = append(args, input.DatasetID)// Execute updatequery := fmt.Sprintf("UPDATE dataset SET %s WHERE id = ?", strings.Join(updates, ", "))_, err = database.Exec(query, args...)if err != nil {return nil, UpdateDatasetOutput{}, fmt.Errorf("failed to update dataset: %w", err)}output := UpdateDatasetOutput{DatasetID: input.DatasetID,Success: true,Message: "Dataset updated successfully",}return &mcp.CallToolResult{}, output, nil}
package toolsimport ("context""os""path/filepath""testing""github.com/modelcontextprotocol/go-sdk/mcp")func TestCreateCyclicRecordingPattern_Duplicate(t *testing.T) {// Setup: Use test databasetestDB := filepath.Join("..", "db", "test.duckdb")if _, err := os.Stat(testDB); os.IsNotExist(err) {t.Skipf("Test database not found at %s", testDB)}SetDBPath(testDB)ctx := context.Background()// Test 1: Try to create duplicate of existing pattern (60s/1740s)// Should return existing pattern IBv_KxDGsNQst.Run("CreateDuplicatePattern", func(t *testing.T) {input := CreateCyclicRecordingPatternInput{RecordSeconds: 60,SleepSeconds: 1740,}result, output, err := CreateCyclicRecordingPattern(ctx, &mcp.CallToolRequest{}, input)if err != nil {t.Fatalf("Expected no error, got: %v", err)}if result == nil {t.Fatal("Expected non-nil result")}// Should return existing patternif output.Pattern.ID != "IBv_KxDGsNQs" {t.Errorf("Expected existing pattern ID 'IBv_KxDGsNQs', got '%s'", output.Pattern.ID)}if output.Pattern.RecordS != 60 {t.Errorf("Expected record_s 60, got %d", output.Pattern.RecordS)}if output.Pattern.SleepS != 1740 {t.Errorf("Expected sleep_s 1740, got %d", output.Pattern.SleepS)}// Check message indicates existing patternif output.Message == "" {t.Error("Expected non-empty message")}t.Logf("Message: %s", output.Message)})// Test 2: Create new unique patternt.Run("CreateUniquePattern", func(t *testing.T) {input := CreateCyclicRecordingPatternInput{RecordSeconds: 999,SleepSeconds: 888,}result, output, err := CreateCyclicRecordingPattern(ctx, &mcp.CallToolRequest{}, input)if err != nil {t.Fatalf("Expected no error, got: %v", err)}if result == nil {t.Fatal("Expected non-nil result")}// Should create new patternfirstID := output.Pattern.IDif firstID == "" {t.Fatal("Expected non-empty ID")}if output.Pattern.RecordS != 999 {t.Errorf("Expected record_s 999, got %d", output.Pattern.RecordS)}if output.Pattern.SleepS != 888 {t.Errorf("Expected sleep_s 888, got %d", output.Pattern.SleepS)}t.Logf("Created pattern ID: %s", firstID)// Test 3: Try to create duplicate of the pattern we just created (idempotent)result2, output2, err2 := CreateCyclicRecordingPattern(ctx, &mcp.CallToolRequest{}, input)if err2 != nil {t.Fatalf("Expected no error on duplicate, got: %v", err2)}if result2 == nil {t.Fatal("Expected non-nil result")}// Should return same patternif output2.Pattern.ID != firstID {t.Errorf("Expected same pattern ID '%s', got '%s'", firstID, output2.Pattern.ID)}t.Logf("Idempotent test passed - returned same ID: %s", output2.Pattern.ID)})}func TestCreateCyclicRecordingPattern_Validation(t *testing.T) {testDB := filepath.Join("..", "db", "test.duckdb")if _, err := os.Stat(testDB); os.IsNotExist(err) {t.Skipf("Test database not found at %s", testDB)}SetDBPath(testDB)ctx := context.Background()// Test invalid inputstests := []struct {name stringrecordSeconds intsleepSeconds intwantError bool}{{"ZeroRecordSeconds", 0, 100, true},{"NegativeRecordSeconds", -10, 100, true},{"ZeroSleepSeconds", 100, 0, true},{"NegativeSleepSeconds", 100, -10, true},{"ValidInputs", 10, 20, false},}for _, tt := range tests {t.Run(tt.name, func(t *testing.T) {input := CreateCyclicRecordingPatternInput{RecordSeconds: tt.recordSeconds,SleepSeconds: tt.sleepSeconds,}_, _, err := CreateCyclicRecordingPattern(ctx, &mcp.CallToolRequest{}, input)if (err != nil) != tt.wantError {t.Errorf("Expected error=%v, got error=%v", tt.wantError, err != nil)}})}}
package toolsimport ("context""database/sql""fmt""github.com/modelcontextprotocol/go-sdk/mcp")// CreateCyclicRecordingPatternInput defines the input parameters for the create_cyclic_recording_pattern tooltype CreateCyclicRecordingPatternInput struct {RecordSeconds int `json:"record_seconds" jsonschema:"required,Number of seconds to record (must be positive)"`SleepSeconds int `json:"sleep_seconds" jsonschema:"required,Number of seconds to sleep between recordings (must be positive)"`}// CreateCyclicRecordingPatternOutput defines the output structuretype CreateCyclicRecordingPatternOutput struct {Pattern db.CyclicRecordingPattern `json:"pattern" jsonschema:"The created recording pattern with generated ID and timestamps"`Message string `json:"message" jsonschema:"Success message"`}// CreateCyclicRecordingPattern implements the create_cyclic_recording_pattern tool handler// Creates a new cyclic recording pattern with record/sleep cycle in secondsfunc CreateCyclicRecordingPattern(ctx context.Context,req *mcp.CallToolRequest,input CreateCyclicRecordingPatternInput,) (*mcp.CallToolResult, CreateCyclicRecordingPatternOutput, error) {var output CreateCyclicRecordingPatternOutput// Validate inputsif input.RecordSeconds <= 0 {return nil, output, fmt.Errorf("record_seconds must be positive (got %d)", input.RecordSeconds)}if input.SleepSeconds <= 0 {return nil, output, fmt.Errorf("sleep_seconds must be positive (got %d)", input.SleepSeconds)}// Open writable database connectiondatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("database connection failed: %w", err)}defer database.Close()// Begin transactiontx, err := database.BeginTx(ctx, nil)if err != nil {return nil, output, fmt.Errorf("failed to begin transaction: %w", err)}defer func() {if err != nil {tx.Rollback()}}()// Generate IDif err != nil {return nil, output, fmt.Errorf("failed to generate ID: %w", err)}// Check if pattern with same record_s/sleep_s already existsvar existingID stringerr = tx.QueryRowContext(ctx,"SELECT id FROM cyclic_recording_pattern WHERE record_s = ? AND sleep_s = ? AND active = true",input.RecordSeconds, input.SleepSeconds,).Scan(&existingID)if err == nil {// Pattern already exists, return it instead of creating duplicatevar pattern db.CyclicRecordingPatternerr = tx.QueryRowContext(ctx,"SELECT id, record_s, sleep_s, created_at, last_modified, active FROM cyclic_recording_pattern WHERE id = ?",existingID,).Scan(&pattern.ID, &pattern.RecordS, &pattern.SleepS, &pattern.CreatedAt, &pattern.LastModified, &pattern.Active)if err != nil {return nil, output, fmt.Errorf("failed to fetch existing pattern: %w", err)}// Commit transaction (no changes made, but clean up)if err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Pattern = patternoutput.Message = fmt.Sprintf("Pattern already exists with ID %s (record %ds, sleep %ds) - returning existing pattern",pattern.ID, pattern.RecordS, pattern.SleepS)return &mcp.CallToolResult{}, output, nil} else if err != sql.ErrNoRows {// Real error occurred (not just "no rows")return nil, output, fmt.Errorf("failed to check for existing pattern: %w", err)}// If sql.ErrNoRows, pattern doesn't exist, continue with INSERT below// Insert pattern (explicitly set timestamps and active for schema compatibility)_, err = tx.ExecContext(ctx,"INSERT INTO cyclic_recording_pattern (id, record_s, sleep_s, created_at, last_modified, active) VALUES (?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, TRUE)",id, input.RecordSeconds, input.SleepSeconds,)if err != nil {return nil, output, fmt.Errorf("failed to create pattern: %w", err)}// Fetch the created pattern (gets DB-generated timestamps and defaults)var pattern db.CyclicRecordingPatternerr = tx.QueryRowContext(ctx,"SELECT id, record_s, sleep_s, created_at, last_modified, active FROM cyclic_recording_pattern WHERE id = ?",id,).Scan(&pattern.ID, &pattern.RecordS, &pattern.SleepS, &pattern.CreatedAt, &pattern.LastModified, &pattern.Active)if err != nil {return nil, output, fmt.Errorf("failed to fetch created pattern: %w", err)}// Commit transactionif err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Pattern = patternoutput.Message = fmt.Sprintf("Successfully created cyclic recording pattern with ID %s (record %ds, sleep %ds)",pattern.ID, pattern.RecordS, pattern.SleepS)return &mcp.CallToolResult{}, output, nil}id, err := gonanoid.Generate("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", 12)gonanoid "github.com/matoous/go-nanoid/v2""skraak_mcp/db"
package toolsimport ("context""fmt""strings""time""github.com/modelcontextprotocol/go-sdk/mcp")// CreateLocationInput defines the input parameters for the create_location tooltype CreateLocationInput struct {DatasetID string `json:"dataset_id" jsonschema:"required,ID of the parent dataset (12-character nanoid)"`Name string `json:"name" jsonschema:"required,Location name (max 140 characters)"`Latitude float64 `json:"latitude" jsonschema:"required,Latitude in decimal degrees (-90 to 90)"`Longitude float64 `json:"longitude" jsonschema:"required,Longitude in decimal degrees (-180 to 180)"`TimezoneID string `json:"timezone_id" jsonschema:"required,IANA timezone ID (e.g. 'Pacific/Auckland')"`Description *string `json:"description,omitempty" jsonschema:"Optional location description (max 255 characters)"`}// CreateLocationOutput defines the output structuretype CreateLocationOutput struct {Location db.Location `json:"location" jsonschema:"The created location with generated ID and timestamps"`Message string `json:"message" jsonschema:"Success message"`}// CreateLocation implements the create_location tool handler// Creates a new location within a dataset with GPS coordinates and timezonefunc CreateLocation(ctx context.Context,req *mcp.CallToolRequest,input CreateLocationInput,) (*mcp.CallToolResult, CreateLocationOutput, error) {var output CreateLocationOutput// Validate nameif strings.TrimSpace(input.Name) == "" {return nil, output, fmt.Errorf("name cannot be empty")}if len(input.Name) > 140 {return nil, output, fmt.Errorf("name must be 140 characters or less (got %d)", len(input.Name))}// Validate description length if providedif input.Description != nil && len(*input.Description) > 255 {return nil, output, fmt.Errorf("description must be 255 characters or less (got %d)", len(*input.Description))}// Validate coordinatesif input.Latitude < -90 || input.Latitude > 90 {return nil, output, fmt.Errorf("latitude must be between -90 and 90 (got %f)", input.Latitude)}if input.Longitude < -180 || input.Longitude > 180 {return nil, output, fmt.Errorf("longitude must be between -180 and 180 (got %f)", input.Longitude)}// Validate timezoneif _, err := time.LoadLocation(input.TimezoneID); err != nil {return nil, output, fmt.Errorf("invalid timezone_id '%s': %w", input.TimezoneID, err)}// Validate dataset_id not emptyif strings.TrimSpace(input.DatasetID) == "" {return nil, output, fmt.Errorf("dataset_id cannot be empty")}// Open writable database connectiondatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("database connection failed: %w", err)}defer database.Close()// Begin transactiontx, err := database.BeginTx(ctx, nil)if err != nil {return nil, output, fmt.Errorf("failed to begin transaction: %w", err)}defer func() {if err != nil {tx.Rollback()}}()// Verify dataset exists and is activevar datasetExists boolvar datasetActive boolvar datasetName stringerr = tx.QueryRowContext(ctx,"SELECT EXISTS(SELECT 1 FROM dataset WHERE id = ?), active, name FROM dataset WHERE id = ?",input.DatasetID, input.DatasetID,).Scan(&datasetExists, &datasetActive, &datasetName)if err != nil {return nil, output, fmt.Errorf("failed to verify dataset: %w", err)}if !datasetExists {return nil, output, fmt.Errorf("dataset with ID '%s' does not exist", input.DatasetID)}if !datasetActive {return nil, output, fmt.Errorf("dataset '%s' (ID: %s) is not active", datasetName, input.DatasetID)}// Generate IDif err != nil {return nil, output, fmt.Errorf("failed to generate ID: %w", err)}// Insert location (explicitly set timestamps and active for schema compatibility)_, err = tx.ExecContext(ctx,"INSERT INTO location (id, dataset_id, name, latitude, longitude, timezone_id, description, created_at, last_modified, active) VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, TRUE)",id, input.DatasetID, input.Name, input.Latitude, input.Longitude, input.TimezoneID, input.Description,)if err != nil {return nil, output, fmt.Errorf("failed to create location: %w", err)}// Fetch the created location (gets DB-generated timestamps and defaults)var location db.Locationerr = tx.QueryRowContext(ctx,"SELECT id, dataset_id, name, latitude, longitude, description, created_at, last_modified, active, timezone_id FROM location WHERE id = ?",id,).Scan(&location.ID, &location.DatasetID, &location.Name, &location.Latitude, &location.Longitude,&location.Description, &location.CreatedAt, &location.LastModified, &location.Active, &location.TimezoneID)if err != nil {return nil, output, fmt.Errorf("failed to fetch created location: %w", err)}// Commit transactionif err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Location = locationoutput.Message = fmt.Sprintf("Successfully created location '%s' with ID %s in dataset '%s' (%.6f, %.6f, %s)",location.Name, location.ID, datasetName, location.Latitude, location.Longitude, location.TimezoneID)return &mcp.CallToolResult{}, output, nil}id, err := gonanoid.Generate("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", 12)gonanoid "github.com/matoous/go-nanoid/v2""skraak_mcp/db"
package toolsimport ("context""fmt""strings""github.com/modelcontextprotocol/go-sdk/mcp")// CreateDatasetInput defines the input parameters for the create_dataset tooltype CreateDatasetInput struct {Name string `json:"name" jsonschema:"required,Dataset name (max 255 characters)"`Description *string `json:"description,omitempty" jsonschema:"Optional dataset description (max 255 characters)"`Type *string `json:"type,omitempty" jsonschema:"Dataset type: 'organise'/'test'/'train' (defaults to 'organise')"`}// CreateDatasetOutput defines the output structuretype CreateDatasetOutput struct {Dataset db.Dataset `json:"dataset" jsonschema:"The created dataset with generated ID and timestamps"`Message string `json:"message" jsonschema:"Success message"`}// CreateDataset implements the create_dataset tool handler// Creates a new dataset with the specified name, description, and typefunc CreateDataset(ctx context.Context,req *mcp.CallToolRequest,input CreateDatasetInput,) (*mcp.CallToolResult, CreateDatasetOutput, error) {var output CreateDatasetOutput// Validate nameif strings.TrimSpace(input.Name) == "" {return nil, output, fmt.Errorf("name cannot be empty")}if len(input.Name) > 255 {return nil, output, fmt.Errorf("name must be 255 characters or less (got %d)", len(input.Name))}// Validate description length if providedif input.Description != nil && len(*input.Description) > 255 {return nil, output, fmt.Errorf("description must be 255 characters or less (got %d)", len(*input.Description))}// Validate and set typedatasetType := db.DatasetTypeOrganise // Defaultif input.Type != nil {typeStr := strings.ToLower(strings.TrimSpace(*input.Type))switch typeStr {case "organise":datasetType = db.DatasetTypeOrganisecase "test":datasetType = db.DatasetTypeTestcase "train":datasetType = db.DatasetTypeTraindefault:return nil, output, fmt.Errorf("invalid type '%s': must be 'organise', 'test', or 'train'", *input.Type)}}// Open writable database connectiondatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("database connection failed: %w", err)}defer database.Close()// Begin transactiontx, err := database.BeginTx(ctx, nil)if err != nil {return nil, output, fmt.Errorf("failed to begin transaction: %w", err)}defer func() {if err != nil {tx.Rollback()}}()// Generate IDif err != nil {return nil, output, fmt.Errorf("failed to generate ID: %w", err)}// Insert dataset (explicitly set timestamps and active for schema compatibility)_, err = tx.ExecContext(ctx,"INSERT INTO dataset (id, name, description, type, created_at, last_modified, active) VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, TRUE)",id, input.Name, input.Description, string(datasetType),)if err != nil {return nil, output, fmt.Errorf("failed to create dataset: %w", err)}// Fetch the created dataset (gets DB-generated timestamps and defaults)var dataset db.Dataseterr = tx.QueryRowContext(ctx,"SELECT id, name, description, created_at, last_modified, active, type FROM dataset WHERE id = ?",id,).Scan(&dataset.ID, &dataset.Name, &dataset.Description, &dataset.CreatedAt, &dataset.LastModified, &dataset.Active, &dataset.Type)if err != nil {return nil, output, fmt.Errorf("failed to fetch created dataset: %w", err)}// Commit transactionif err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Dataset = datasetoutput.Message = fmt.Sprintf("Successfully created dataset '%s' with ID %s (type: %s)",dataset.Name, dataset.ID, dataset.Type)return &mcp.CallToolResult{}, output, nil}id, err := gonanoid.Generate("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", 12)gonanoid "github.com/matoous/go-nanoid/v2""skraak_mcp/db"
package toolsimport ("context""fmt""strings""github.com/modelcontextprotocol/go-sdk/mcp")// CreateClusterInput defines the input parameters for the create_cluster tooltype CreateClusterInput struct {DatasetID string `json:"dataset_id" jsonschema:"required,ID of the parent dataset (12-character nanoid)"`LocationID string `json:"location_id" jsonschema:"required,ID of the parent location (12-character nanoid)"`Name string `json:"name" jsonschema:"required,Cluster name (max 140 characters)"`SampleRate int `json:"sample_rate" jsonschema:"required,Sample rate in Hz (must be positive)"`CyclicRecordingPatternID *string `json:"cyclic_recording_pattern_id,omitempty" jsonschema:"Optional ID of cyclic recording pattern (12-character nanoid)"`Description *string `json:"description,omitempty" jsonschema:"Optional cluster description (max 255 characters)"`}// CreateClusterOutput defines the output structuretype CreateClusterOutput struct {Cluster db.Cluster `json:"cluster" jsonschema:"The created cluster with generated ID and timestamps"`Message string `json:"message" jsonschema:"Success message"`}// CreateCluster implements the create_cluster tool handler// Creates a new cluster within a location. Location must belong to the specified dataset.func CreateCluster(ctx context.Context,req *mcp.CallToolRequest,input CreateClusterInput,) (*mcp.CallToolResult, CreateClusterOutput, error) {var output CreateClusterOutput// Validate nameif strings.TrimSpace(input.Name) == "" {return nil, output, fmt.Errorf("name cannot be empty")}if len(input.Name) > 140 {return nil, output, fmt.Errorf("name must be 140 characters or less (got %d)", len(input.Name))}// Validate description length if providedif input.Description != nil && len(*input.Description) > 255 {return nil, output, fmt.Errorf("description must be 255 characters or less (got %d)", len(*input.Description))}// Validate sample rateif input.SampleRate <= 0 {return nil, output, fmt.Errorf("sample_rate must be positive (got %d)", input.SampleRate)}// Validate IDs not emptyif strings.TrimSpace(input.DatasetID) == "" {return nil, output, fmt.Errorf("dataset_id cannot be empty")}if strings.TrimSpace(input.LocationID) == "" {return nil, output, fmt.Errorf("location_id cannot be empty")}// Open writable database connectiondatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("database connection failed: %w", err)}defer database.Close()// Begin transactiontx, err := database.BeginTx(ctx, nil)if err != nil {return nil, output, fmt.Errorf("failed to begin transaction: %w", err)}defer func() {if err != nil {tx.Rollback()}}()// Verify dataset exists and is activevar datasetExists boolvar datasetActive boolvar datasetName stringerr = tx.QueryRowContext(ctx,"SELECT EXISTS(SELECT 1 FROM dataset WHERE id = ?), active, name FROM dataset WHERE id = ?",input.DatasetID, input.DatasetID,).Scan(&datasetExists, &datasetActive, &datasetName)if err != nil {return nil, output, fmt.Errorf("failed to verify dataset: %w", err)}if !datasetExists {return nil, output, fmt.Errorf("dataset with ID '%s' does not exist", input.DatasetID)}if !datasetActive {return nil, output, fmt.Errorf("dataset '%s' (ID: %s) is not active", datasetName, input.DatasetID)}// Verify location exists, is active, and belongs to the specified dataset (BUSINESS RULE)var locationExists boolvar locationActive boolvar locationName stringvar locationDatasetID stringerr = tx.QueryRowContext(ctx,"SELECT EXISTS(SELECT 1 FROM location WHERE id = ?), active, name, dataset_id FROM location WHERE id = ?",input.LocationID, input.LocationID,).Scan(&locationExists, &locationActive, &locationName, &locationDatasetID)if err != nil {return nil, output, fmt.Errorf("failed to verify location: %w", err)}if !locationExists {return nil, output, fmt.Errorf("location with ID '%s' does not exist", input.LocationID)}if !locationActive {return nil, output, fmt.Errorf("location '%s' (ID: %s) is not active", locationName, input.LocationID)}// CRITICAL BUSINESS RULE: Location must belong to the specified datasetif locationDatasetID != input.DatasetID {return nil, output, fmt.Errorf("location '%s' (ID: %s) does not belong to dataset '%s' (ID: %s) - it belongs to dataset ID '%s'",locationName, input.LocationID, datasetName, input.DatasetID, locationDatasetID)}// Verify cyclic recording pattern if providedif input.CyclicRecordingPatternID != nil && strings.TrimSpace(*input.CyclicRecordingPatternID) != "" {var patternExists boolvar patternActive boolerr = tx.QueryRowContext(ctx,"SELECT EXISTS(SELECT 1 FROM cyclic_recording_pattern WHERE id = ?), active FROM cyclic_recording_pattern WHERE id = ?",*input.CyclicRecordingPatternID, *input.CyclicRecordingPatternID,).Scan(&patternExists, &patternActive)if err != nil {return nil, output, fmt.Errorf("failed to verify cyclic recording pattern: %w", err)}if !patternExists {return nil, output, fmt.Errorf("cyclic recording pattern with ID '%s' does not exist", *input.CyclicRecordingPatternID)}if !patternActive {return nil, output, fmt.Errorf("cyclic recording pattern with ID '%s' is not active", *input.CyclicRecordingPatternID)}}// Generate IDif err != nil {return nil, output, fmt.Errorf("failed to generate ID: %w", err)}// Insert cluster (explicitly set timestamps and active for schema compatibility)_, err = tx.ExecContext(ctx,"INSERT INTO cluster (id, dataset_id, location_id, name, sample_rate, cyclic_recording_pattern_id, description, created_at, last_modified, active) VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, TRUE)",id, input.DatasetID, input.LocationID, input.Name, input.SampleRate, input.CyclicRecordingPatternID, input.Description,)if err != nil {return nil, output, fmt.Errorf("failed to create cluster: %w", err)}// Fetch the created cluster (gets DB-generated timestamps and defaults)var cluster db.Clustererr = tx.QueryRowContext(ctx,"SELECT id, dataset_id, location_id, name, description, created_at, last_modified, active, cyclic_recording_pattern_id, sample_rate FROM cluster WHERE id = ?",id,).Scan(&cluster.ID, &cluster.DatasetID, &cluster.LocationID, &cluster.Name, &cluster.Description,&cluster.CreatedAt, &cluster.LastModified, &cluster.Active, &cluster.CyclicRecordingPatternID, &cluster.SampleRate)if err != nil {return nil, output, fmt.Errorf("failed to fetch created cluster: %w", err)}// Commit transactionif err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Cluster = clusteroutput.Message = fmt.Sprintf("Successfully created cluster '%s' with ID %s in location '%s' at dataset '%s' (sample rate: %d Hz)",cluster.Name, cluster.ID, locationName, datasetName, cluster.SampleRate)return &mcp.CallToolResult{}, output, nil}id, err := gonanoid.Generate("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", 12)gonanoid "github.com/matoous/go-nanoid/v2""skraak_mcp/db"
package toolsimport ("context""os""path/filepath""testing""github.com/modelcontextprotocol/go-sdk/mcp")func TestCreateOrUpdatePattern_CreateDuplicate(t *testing.T) {// Setup: Use test databasetestDB := filepath.Join("..", "db", "test.duckdb")if _, err := os.Stat(testDB); os.IsNotExist(err) {t.Skipf("Test database not found at %s", testDB)}SetDBPath(testDB)ctx := context.Background()// Test 1: Try to create duplicate of existing pattern (60s/1740s)// Should return existing pattern IBv_KxDGsNQst.Run("CreateDuplicatePattern", func(t *testing.T) {record := 60sleep := 1740input := PatternInput{RecordSeconds: &record,SleepSeconds: &sleep,}result, output, err := CreateOrUpdatePattern(ctx, &mcp.CallToolRequest{}, input)if err != nil {t.Fatalf("Expected no error, got: %v", err)}if result == nil {t.Fatal("Expected non-nil result")}// Should return existing patternif output.Pattern.ID != "IBv_KxDGsNQs" {t.Errorf("Expected existing pattern ID 'IBv_KxDGsNQs', got '%s'", output.Pattern.ID)}if output.Pattern.RecordS != 60 {t.Errorf("Expected record_s 60, got %d", output.Pattern.RecordS)}if output.Pattern.SleepS != 1740 {t.Errorf("Expected sleep_s 1740, got %d", output.Pattern.SleepS)}// Check message indicates existing patternif output.Message == "" {t.Error("Expected non-empty message")}t.Logf("Message: %s", output.Message)})// Test 2: Create new unique patternt.Run("CreateUniquePattern", func(t *testing.T) {record := 999sleep := 888input := PatternInput{RecordSeconds: &record,SleepSeconds: &sleep,}result, output, err := CreateOrUpdatePattern(ctx, &mcp.CallToolRequest{}, input)if err != nil {t.Fatalf("Expected no error, got: %v", err)}if result == nil {t.Fatal("Expected non-nil result")}// Should create new patternfirstID := output.Pattern.IDif firstID == "" {t.Fatal("Expected non-empty ID")}if output.Pattern.RecordS != 999 {t.Errorf("Expected record_s 999, got %d", output.Pattern.RecordS)}if output.Pattern.SleepS != 888 {t.Errorf("Expected sleep_s 888, got %d", output.Pattern.SleepS)}t.Logf("Created pattern ID: %s", firstID)// Test 3: Try to create duplicate of the pattern we just created (idempotent)result2, output2, err2 := CreateOrUpdatePattern(ctx, &mcp.CallToolRequest{}, input)if err2 != nil {t.Fatalf("Expected no error on duplicate, got: %v", err2)}if result2 == nil {t.Fatal("Expected non-nil result")}// Should return same patternif output2.Pattern.ID != firstID {t.Errorf("Expected same pattern ID '%s', got '%s'", firstID, output2.Pattern.ID)}t.Logf("Idempotent test passed - returned same ID: %s", output2.Pattern.ID)})}func TestCreateOrUpdatePattern_Validation(t *testing.T) {testDB := filepath.Join("..", "db", "test.duckdb")if _, err := os.Stat(testDB); os.IsNotExist(err) {t.Skipf("Test database not found at %s", testDB)}SetDBPath(testDB)ctx := context.Background()// Test invalid inputs for create (no ID = create mode)tests := []struct {name stringrecordSeconds intsleepSeconds intwantError bool}{{"ZeroRecordSeconds", 0, 100, true},{"NegativeRecordSeconds", -10, 100, true},{"ZeroSleepSeconds", 100, 0, true},{"NegativeSleepSeconds", 100, -10, true},{"ValidInputs", 10, 20, false},}for _, tt := range tests {t.Run(tt.name, func(t *testing.T) {input := PatternInput{RecordSeconds: &tt.recordSeconds,SleepSeconds: &tt.sleepSeconds,}_, _, err := CreateOrUpdatePattern(ctx, &mcp.CallToolRequest{}, input)if (err != nil) != tt.wantError {t.Errorf("Expected error=%v, got error=%v", tt.wantError, err != nil)}})}}func TestCreateOrUpdatePattern_Update(t *testing.T) {testDB := filepath.Join("..", "db", "test.duckdb")if _, err := os.Stat(testDB); os.IsNotExist(err) {t.Skipf("Test database not found at %s", testDB)}SetDBPath(testDB)ctx := context.Background()t.Run("UpdateNonExistentPattern", func(t *testing.T) {id := "NONEXISTENT1"record := 100input := PatternInput{ID: &id,RecordSeconds: &record,}_, _, err := CreateOrUpdatePattern(ctx, &mcp.CallToolRequest{}, input)if err == nil {t.Error("Expected error for non-existent pattern")}})t.Run("UpdateNoFields", func(t *testing.T) {id := "IBv_KxDGsNQs"input := PatternInput{ID: &id,}_, _, err := CreateOrUpdatePattern(ctx, &mcp.CallToolRequest{}, input)if err == nil {t.Error("Expected error when no fields provided")}})}
package toolsimport ("context""database/sql""fmt""skraak_mcp/db""strings"gonanoid "github.com/matoous/go-nanoid/v2""github.com/modelcontextprotocol/go-sdk/mcp")// PatternInput defines the input parameters for the create_or_update_pattern tooltype PatternInput struct {ID *string `json:"id,omitempty" jsonschema:"Pattern ID (12 characters). Omit to create a new pattern, provide to update an existing one."`RecordSeconds *int `json:"record_seconds,omitempty" jsonschema:"Number of seconds to record (must be positive). Required for create."`SleepSeconds *int `json:"sleep_seconds,omitempty" jsonschema:"Number of seconds to sleep between recordings (must be positive for create, >= 0 for update)."`}// PatternOutput defines the output structuretype PatternOutput struct {Pattern db.CyclicRecordingPattern `json:"pattern" jsonschema:"The created or updated recording pattern"`Message string `json:"message" jsonschema:"Success message"`}// CreateOrUpdatePattern implements the create_or_update_pattern tool handlerfunc CreateOrUpdatePattern(ctx context.Context,req *mcp.CallToolRequest,input PatternInput,) (*mcp.CallToolResult, PatternOutput, error) {if input.ID != nil && strings.TrimSpace(*input.ID) != "" {return updatePattern(ctx, input)}return createPattern(ctx, input)}func createPattern(ctx context.Context, input PatternInput) (*mcp.CallToolResult, PatternOutput, error) {var output PatternOutput// Validate required fields for createif input.RecordSeconds == nil {return nil, output, fmt.Errorf("record_seconds is required when creating a pattern")}if input.SleepSeconds == nil {return nil, output, fmt.Errorf("sleep_seconds is required when creating a pattern")}if *input.RecordSeconds <= 0 {return nil, output, fmt.Errorf("record_seconds must be positive (got %d)", *input.RecordSeconds)}if *input.SleepSeconds <= 0 {return nil, output, fmt.Errorf("sleep_seconds must be positive (got %d)", *input.SleepSeconds)}// Open writable database connectiondatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("database connection failed: %w", err)}defer database.Close()// Begin transactiontx, err := database.BeginTx(ctx, nil)if err != nil {return nil, output, fmt.Errorf("failed to begin transaction: %w", err)}defer func() {if err != nil {tx.Rollback()}}()// Check if pattern with same record_s/sleep_s already existsvar existingID stringerr = tx.QueryRowContext(ctx,"SELECT id FROM cyclic_recording_pattern WHERE record_s = ? AND sleep_s = ? AND active = true",*input.RecordSeconds, *input.SleepSeconds,).Scan(&existingID)if err == nil {// Pattern already exists, return it instead of creating duplicatevar pattern db.CyclicRecordingPatternerr = tx.QueryRowContext(ctx,"SELECT id, record_s, sleep_s, created_at, last_modified, active FROM cyclic_recording_pattern WHERE id = ?",existingID,).Scan(&pattern.ID, &pattern.RecordS, &pattern.SleepS, &pattern.CreatedAt, &pattern.LastModified, &pattern.Active)if err != nil {return nil, output, fmt.Errorf("failed to fetch existing pattern: %w", err)}if err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Pattern = patternoutput.Message = fmt.Sprintf("Pattern already exists with ID %s (record %ds, sleep %ds) - returning existing pattern",pattern.ID, pattern.RecordS, pattern.SleepS)return &mcp.CallToolResult{}, output, nil} else if err != sql.ErrNoRows {return nil, output, fmt.Errorf("failed to check for existing pattern: %w", err)}// Generate IDid, err := gonanoid.Generate("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", 12)if err != nil {return nil, output, fmt.Errorf("failed to generate ID: %w", err)}// Insert pattern_, err = tx.ExecContext(ctx,"INSERT INTO cyclic_recording_pattern (id, record_s, sleep_s, created_at, last_modified, active) VALUES (?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, TRUE)",id, *input.RecordSeconds, *input.SleepSeconds,)if err != nil {return nil, output, fmt.Errorf("failed to create pattern: %w", err)}// Fetch the created patternvar pattern db.CyclicRecordingPatternerr = tx.QueryRowContext(ctx,"SELECT id, record_s, sleep_s, created_at, last_modified, active FROM cyclic_recording_pattern WHERE id = ?",id,).Scan(&pattern.ID, &pattern.RecordS, &pattern.SleepS, &pattern.CreatedAt, &pattern.LastModified, &pattern.Active)if err != nil {return nil, output, fmt.Errorf("failed to fetch created pattern: %w", err)}if err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Pattern = patternoutput.Message = fmt.Sprintf("Successfully created cyclic recording pattern with ID %s (record %ds, sleep %ds)",pattern.ID, pattern.RecordS, pattern.SleepS)return &mcp.CallToolResult{}, output, nil}func updatePattern(ctx context.Context, input PatternInput) (*mcp.CallToolResult, PatternOutput, error) {var output PatternOutputpatternID := *input.ID// Validate fields if providedif input.RecordSeconds != nil && *input.RecordSeconds <= 0 {return nil, output, fmt.Errorf("record_seconds must be greater than 0: %d", *input.RecordSeconds)}if input.SleepSeconds != nil && *input.SleepSeconds < 0 {return nil, output, fmt.Errorf("sleep_seconds must be greater than or equal to 0: %d", *input.SleepSeconds)}// Open writable databasedatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("failed to open database: %w", err)}defer database.Close()// Verify pattern existsvar exists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM cyclic_recording_pattern WHERE id = ?)", patternID).Scan(&exists)if err != nil {return nil, output, fmt.Errorf("failed to query pattern: %w", err)}if !exists {return nil, output, fmt.Errorf("pattern not found: %s", patternID)}// Build dynamic UPDATE queryupdates := []string{}args := []any{}if input.RecordSeconds != nil {updates = append(updates, "record_s = ?")args = append(args, *input.RecordSeconds)}if input.SleepSeconds != nil {updates = append(updates, "sleep_s = ?")args = append(args, *input.SleepSeconds)}if len(updates) == 0 {return nil, output, fmt.Errorf("no fields provided to update")}// Always update last_modifiedupdates = append(updates, "last_modified = now()")args = append(args, patternID)query := fmt.Sprintf("UPDATE cyclic_recording_pattern SET %s WHERE id = ?", strings.Join(updates, ", "))_, err = database.Exec(query, args...)if err != nil {return nil, output, fmt.Errorf("failed to update pattern: %w", err)}// Fetch the updated patternvar pattern db.CyclicRecordingPatternerr = database.QueryRow("SELECT id, record_s, sleep_s, created_at, last_modified, active FROM cyclic_recording_pattern WHERE id = ?",patternID,).Scan(&pattern.ID, &pattern.RecordS, &pattern.SleepS, &pattern.CreatedAt, &pattern.LastModified, &pattern.Active)if err != nil {return nil, output, fmt.Errorf("failed to fetch updated pattern: %w", err)}output.Pattern = patternoutput.Message = fmt.Sprintf("Successfully updated pattern (ID: %s, record %ds, sleep %ds)",pattern.ID, pattern.RecordS, pattern.SleepS)return &mcp.CallToolResult{}, output, nil}
package toolsimport ("context""fmt""skraak_mcp/db""strings""time"gonanoid "github.com/matoous/go-nanoid/v2""github.com/modelcontextprotocol/go-sdk/mcp")// LocationInput defines the input parameters for the create_or_update_location tooltype LocationInput struct {ID *string `json:"id,omitempty" jsonschema:"Location ID (12 characters). Omit to create a new location, provide to update an existing one."`DatasetID *string `json:"dataset_id,omitempty" jsonschema:"ID of the parent dataset (12-character nanoid). Required for create."`Name *string `json:"name,omitempty" jsonschema:"Location name (max 140 characters). Required for create."`Latitude *float64 `json:"latitude,omitempty" jsonschema:"Latitude in decimal degrees (-90 to 90). Required for create."`Longitude *float64 `json:"longitude,omitempty" jsonschema:"Longitude in decimal degrees (-180 to 180). Required for create."`TimezoneID *string `json:"timezone_id,omitempty" jsonschema:"IANA timezone ID (e.g. 'Pacific/Auckland'). Required for create."`Description *string `json:"description,omitempty" jsonschema:"Optional location description (max 255 characters)"`}// LocationOutput defines the output structuretype LocationOutput struct {Location db.Location `json:"location" jsonschema:"The created or updated location"`Message string `json:"message" jsonschema:"Success message"`}// CreateOrUpdateLocation implements the create_or_update_location tool handlerfunc CreateOrUpdateLocation(ctx context.Context,req *mcp.CallToolRequest,input LocationInput,) (*mcp.CallToolResult, LocationOutput, error) {if input.ID != nil && strings.TrimSpace(*input.ID) != "" {return updateLocation(ctx, input)}return createLocation(ctx, input)}// validateLocationFields validates fields common to both create and updatefunc validateLocationFields(input LocationInput) error {if input.Name != nil && len(*input.Name) > 140 {return fmt.Errorf("name must be 140 characters or less (got %d)", len(*input.Name))}if input.Description != nil && len(*input.Description) > 255 {return fmt.Errorf("description must be 255 characters or less (got %d)", len(*input.Description))}if input.Latitude != nil && (*input.Latitude < -90 || *input.Latitude > 90) {return fmt.Errorf("latitude must be between -90 and 90 (got %f)", *input.Latitude)}if input.Longitude != nil && (*input.Longitude < -180 || *input.Longitude > 180) {return fmt.Errorf("longitude must be between -180 and 180 (got %f)", *input.Longitude)}if input.TimezoneID != nil {if len(*input.TimezoneID) > 40 {return fmt.Errorf("timezone_id must be 40 characters or less (got %d)", len(*input.TimezoneID))}if _, err := time.LoadLocation(*input.TimezoneID); err != nil {return fmt.Errorf("invalid timezone_id '%s': %w", *input.TimezoneID, err)}}return nil}func createLocation(ctx context.Context, input LocationInput) (*mcp.CallToolResult, LocationOutput, error) {var output LocationOutput// Validate required fields for createif input.DatasetID == nil || strings.TrimSpace(*input.DatasetID) == "" {return nil, output, fmt.Errorf("dataset_id is required when creating a location")}if input.Name == nil || strings.TrimSpace(*input.Name) == "" {return nil, output, fmt.Errorf("name is required when creating a location")}if input.Latitude == nil {return nil, output, fmt.Errorf("latitude is required when creating a location")}if input.Longitude == nil {return nil, output, fmt.Errorf("longitude is required when creating a location")}if input.TimezoneID == nil || strings.TrimSpace(*input.TimezoneID) == "" {return nil, output, fmt.Errorf("timezone_id is required when creating a location")}if err := validateLocationFields(input); err != nil {return nil, output, err}// Open writable database connectiondatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("database connection failed: %w", err)}defer database.Close()// Begin transactiontx, err := database.BeginTx(ctx, nil)if err != nil {return nil, output, fmt.Errorf("failed to begin transaction: %w", err)}defer func() {if err != nil {tx.Rollback()}}()// Verify dataset exists and is activevar datasetExists boolvar datasetActive boolvar datasetName stringerr = tx.QueryRowContext(ctx,"SELECT EXISTS(SELECT 1 FROM dataset WHERE id = ?), active, name FROM dataset WHERE id = ?",*input.DatasetID, *input.DatasetID,).Scan(&datasetExists, &datasetActive, &datasetName)if err != nil {return nil, output, fmt.Errorf("failed to verify dataset: %w", err)}if !datasetExists {return nil, output, fmt.Errorf("dataset with ID '%s' does not exist", *input.DatasetID)}if !datasetActive {return nil, output, fmt.Errorf("dataset '%s' (ID: %s) is not active", datasetName, *input.DatasetID)}// Generate IDid, err := gonanoid.Generate("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", 12)if err != nil {return nil, output, fmt.Errorf("failed to generate ID: %w", err)}// Insert location_, err = tx.ExecContext(ctx,"INSERT INTO location (id, dataset_id, name, latitude, longitude, timezone_id, description, created_at, last_modified, active) VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, TRUE)",id, *input.DatasetID, *input.Name, *input.Latitude, *input.Longitude, *input.TimezoneID, input.Description,)if err != nil {return nil, output, fmt.Errorf("failed to create location: %w", err)}// Fetch the created locationvar location db.Locationerr = tx.QueryRowContext(ctx,"SELECT id, dataset_id, name, latitude, longitude, description, created_at, last_modified, active, timezone_id FROM location WHERE id = ?",id,).Scan(&location.ID, &location.DatasetID, &location.Name, &location.Latitude, &location.Longitude,&location.Description, &location.CreatedAt, &location.LastModified, &location.Active, &location.TimezoneID)if err != nil {return nil, output, fmt.Errorf("failed to fetch created location: %w", err)}if err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Location = locationoutput.Message = fmt.Sprintf("Successfully created location '%s' with ID %s in dataset '%s' (%.6f, %.6f, %s)",location.Name, location.ID, datasetName, location.Latitude, location.Longitude, location.TimezoneID)return &mcp.CallToolResult{}, output, nil}func updateLocation(ctx context.Context, input LocationInput) (*mcp.CallToolResult, LocationOutput, error) {var output LocationOutputlocationID := *input.IDif err := validateLocationFields(input); err != nil {return nil, output, err}// Open writable databasedatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("failed to open database: %w", err)}defer database.Close()// Verify location existsvar exists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM location WHERE id = ?)", locationID).Scan(&exists)if err != nil {return nil, output, fmt.Errorf("failed to query location: %w", err)}if !exists {return nil, output, fmt.Errorf("location not found: %s", locationID)}// Verify dataset exists if DatasetID providedif input.DatasetID != nil {var datasetExists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM dataset WHERE id = ?)", *input.DatasetID).Scan(&datasetExists)if err != nil {return nil, output, fmt.Errorf("failed to query dataset: %w", err)}if !datasetExists {return nil, output, fmt.Errorf("dataset not found: %s", *input.DatasetID)}}// Build dynamic UPDATE queryupdates := []string{}args := []any{}if input.DatasetID != nil {updates = append(updates, "dataset_id = ?")args = append(args, *input.DatasetID)}if input.Name != nil {updates = append(updates, "name = ?")args = append(args, *input.Name)}if input.Latitude != nil {updates = append(updates, "latitude = ?")args = append(args, *input.Latitude)}if input.Longitude != nil {updates = append(updates, "longitude = ?")args = append(args, *input.Longitude)}if input.Description != nil {updates = append(updates, "description = ?")args = append(args, *input.Description)}if input.TimezoneID != nil {updates = append(updates, "timezone_id = ?")args = append(args, *input.TimezoneID)}if len(updates) == 0 {return nil, output, fmt.Errorf("no fields provided to update")}// Always update last_modifiedupdates = append(updates, "last_modified = now()")args = append(args, locationID)query := fmt.Sprintf("UPDATE location SET %s WHERE id = ?", strings.Join(updates, ", "))_, err = database.Exec(query, args...)if err != nil {return nil, output, fmt.Errorf("failed to update location: %w", err)}// Fetch the updated locationvar location db.Locationerr = database.QueryRow("SELECT id, dataset_id, name, latitude, longitude, description, created_at, last_modified, active, timezone_id FROM location WHERE id = ?",locationID,).Scan(&location.ID, &location.DatasetID, &location.Name, &location.Latitude, &location.Longitude,&location.Description, &location.CreatedAt, &location.LastModified, &location.Active, &location.TimezoneID)if err != nil {return nil, output, fmt.Errorf("failed to fetch updated location: %w", err)}output.Location = locationoutput.Message = fmt.Sprintf("Successfully updated location '%s' (ID: %s)", location.Name, location.ID)return &mcp.CallToolResult{}, output, nil}
input := CreateClusterInput{DatasetID: datasetID,LocationID: locationID,Name: "Integration Test Cluster",SampleRate: 16000,
sampleRate := 16000input := ClusterInput{DatasetID: &datasetID,LocationID: &locationID,Name: stringPtr("Integration Test Cluster"),SampleRate: &sampleRate,
Summary ImportSelectionSummary `json:"summary"`SelectionIDs []string `json:"selection_ids"`Errors []SelectionImportError `json:"errors,omitempty"`
Summary ImportSelectionSummary `json:"summary"`SelectionIDs []string `json:"selection_ids"`Errors []SelectionImportError `json:"errors,omitempty"`
FilterName string `json:"filter_name"`TotalSelections int `json:"total_selections"`ImportedSelections int `json:"imported_selections"`FailedSelections int `json:"failed_selections"`SpeciesCount int `json:"species_count"`CallTypeCount int `json:"call_type_count"`ProcessingTimeMs int64 `json:"processing_time_ms"`
FilterName string `json:"filter_name"`TotalSelections int `json:"total_selections"`ImportedSelections int `json:"imported_selections"`FailedSelections int `json:"failed_selections"`SpeciesCount int `json:"species_count"`CallTypeCount int `json:"call_type_count"`ProcessingTimeMs int64 `json:"processing_time_ms"`
package toolsimport ("context""fmt""skraak_mcp/db""strings"gonanoid "github.com/matoous/go-nanoid/v2""github.com/modelcontextprotocol/go-sdk/mcp")// DatasetInput defines the input parameters for the create_or_update_dataset tooltype DatasetInput struct {ID *string `json:"id,omitempty" jsonschema:"Dataset ID (12 characters). Omit to create a new dataset, provide to update an existing one."`Name *string `json:"name,omitempty" jsonschema:"Dataset name (max 255 characters). Required for create."`Description *string `json:"description,omitempty" jsonschema:"Optional dataset description (max 255 characters)"`Type *string `json:"type,omitempty" jsonschema:"Dataset type: 'organise'/'test'/'train' (defaults to 'organise' on create)"`}// DatasetOutput defines the output structuretype DatasetOutput struct {Dataset db.Dataset `json:"dataset" jsonschema:"The created or updated dataset"`Message string `json:"message" jsonschema:"Success message"`}// CreateOrUpdateDataset implements the create_or_update_dataset tool handlerfunc CreateOrUpdateDataset(ctx context.Context,req *mcp.CallToolRequest,input DatasetInput,) (*mcp.CallToolResult, DatasetOutput, error) {if input.ID != nil && strings.TrimSpace(*input.ID) != "" {return updateDataset(ctx, input)}return createDataset(ctx, input)}func createDataset(ctx context.Context, input DatasetInput) (*mcp.CallToolResult, DatasetOutput, error) {var output DatasetOutput// Validate name (required for create)if input.Name == nil || strings.TrimSpace(*input.Name) == "" {return nil, output, fmt.Errorf("name is required when creating a dataset")}if len(*input.Name) > 255 {return nil, output, fmt.Errorf("name must be 255 characters or less (got %d)", len(*input.Name))}// Validate description length if providedif input.Description != nil && len(*input.Description) > 255 {return nil, output, fmt.Errorf("description must be 255 characters or less (got %d)", len(*input.Description))}// Validate and set typedatasetType := db.DatasetTypeOrganise // Defaultif input.Type != nil {typeStr := strings.ToLower(strings.TrimSpace(*input.Type))switch typeStr {case "organise":datasetType = db.DatasetTypeOrganisecase "test":datasetType = db.DatasetTypeTestcase "train":datasetType = db.DatasetTypeTraindefault:return nil, output, fmt.Errorf("invalid type '%s': must be 'organise', 'test', or 'train'", *input.Type)}}// Open writable database connectiondatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("database connection failed: %w", err)}defer database.Close()// Begin transactiontx, err := database.BeginTx(ctx, nil)if err != nil {return nil, output, fmt.Errorf("failed to begin transaction: %w", err)}defer func() {if err != nil {tx.Rollback()}}()// Generate IDid, err := gonanoid.Generate("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", 12)if err != nil {return nil, output, fmt.Errorf("failed to generate ID: %w", err)}// Insert dataset_, err = tx.ExecContext(ctx,"INSERT INTO dataset (id, name, description, type, created_at, last_modified, active) VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, TRUE)",id, *input.Name, input.Description, string(datasetType),)if err != nil {return nil, output, fmt.Errorf("failed to create dataset: %w", err)}// Fetch the created datasetvar dataset db.Dataseterr = tx.QueryRowContext(ctx,"SELECT id, name, description, created_at, last_modified, active, type FROM dataset WHERE id = ?",id,).Scan(&dataset.ID, &dataset.Name, &dataset.Description, &dataset.CreatedAt, &dataset.LastModified, &dataset.Active, &dataset.Type)if err != nil {return nil, output, fmt.Errorf("failed to fetch created dataset: %w", err)}if err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Dataset = datasetoutput.Message = fmt.Sprintf("Successfully created dataset '%s' with ID %s (type: %s)",dataset.Name, dataset.ID, dataset.Type)return &mcp.CallToolResult{}, output, nil}func updateDataset(ctx context.Context, input DatasetInput) (*mcp.CallToolResult, DatasetOutput, error) {var output DatasetOutputdatasetID := *input.ID// Validate fields if providedif input.Name != nil && len(*input.Name) > 255 {return nil, output, fmt.Errorf("name must be 255 characters or less (got %d)", len(*input.Name))}if input.Description != nil && len(*input.Description) > 255 {return nil, output, fmt.Errorf("description must be 255 characters or less (got %d)", len(*input.Description))}if input.Type != nil {typeValue := strings.ToLower(*input.Type)if typeValue != "organise" && typeValue != "test" && typeValue != "train" {return nil, output, fmt.Errorf("invalid dataset type: %s (must be 'organise', 'test', or 'train')", *input.Type)}}// Open writable databasedatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("failed to open database: %w", err)}defer database.Close()// Verify dataset existsvar exists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM dataset WHERE id = ?)", datasetID).Scan(&exists)if err != nil {return nil, output, fmt.Errorf("failed to query dataset: %w", err)}if !exists {return nil, output, fmt.Errorf("dataset not found: %s", datasetID)}// Build dynamic UPDATE queryupdates := []string{}args := []any{}if input.Name != nil {updates = append(updates, "name = ?")args = append(args, *input.Name)}if input.Description != nil {updates = append(updates, "description = ?")args = append(args, *input.Description)}if input.Type != nil {updates = append(updates, "type = ?")args = append(args, strings.ToLower(*input.Type))}if len(updates) == 0 {return nil, output, fmt.Errorf("no fields provided to update")}// Always update last_modifiedupdates = append(updates, "last_modified = now()")args = append(args, datasetID)query := fmt.Sprintf("UPDATE dataset SET %s WHERE id = ?", strings.Join(updates, ", "))_, err = database.Exec(query, args...)if err != nil {return nil, output, fmt.Errorf("failed to update dataset: %w", err)}// Fetch the updated datasetvar dataset db.Dataseterr = database.QueryRow("SELECT id, name, description, created_at, last_modified, active, type FROM dataset WHERE id = ?",datasetID,).Scan(&dataset.ID, &dataset.Name, &dataset.Description, &dataset.CreatedAt, &dataset.LastModified, &dataset.Active, &dataset.Type)if err != nil {return nil, output, fmt.Errorf("failed to fetch updated dataset: %w", err)}output.Dataset = datasetoutput.Message = fmt.Sprintf("Successfully updated dataset '%s' (ID: %s)", dataset.Name, dataset.ID)return &mcp.CallToolResult{}, output, nil}
package toolsimport ("context""fmt""skraak_mcp/db""strings"gonanoid "github.com/matoous/go-nanoid/v2""github.com/modelcontextprotocol/go-sdk/mcp")// ClusterInput defines the input parameters for the create_or_update_cluster tooltype ClusterInput struct {ID *string `json:"id,omitempty" jsonschema:"Cluster ID (12 characters). Omit to create a new cluster, provide to update an existing one."`DatasetID *string `json:"dataset_id,omitempty" jsonschema:"ID of the parent dataset (12-character nanoid). Required for create."`LocationID *string `json:"location_id,omitempty" jsonschema:"ID of the parent location (12-character nanoid). Required for create."`Name *string `json:"name,omitempty" jsonschema:"Cluster name (max 140 characters). Required for create."`SampleRate *int `json:"sample_rate,omitempty" jsonschema:"Sample rate in Hz (must be positive). Required for create."`Path *string `json:"path,omitempty" jsonschema:"Normalized folder path (max 255 characters)"`CyclicRecordingPatternID *string `json:"cyclic_recording_pattern_id,omitempty" jsonschema:"Optional ID of cyclic recording pattern (12-character nanoid). Set to empty string to clear."`Description *string `json:"description,omitempty" jsonschema:"Optional cluster description (max 255 characters)"`}// ClusterOutput defines the output structuretype ClusterOutput struct {Cluster db.Cluster `json:"cluster" jsonschema:"The created or updated cluster"`Message string `json:"message" jsonschema:"Success message"`}// CreateOrUpdateCluster implements the create_or_update_cluster tool handlerfunc CreateOrUpdateCluster(ctx context.Context,req *mcp.CallToolRequest,input ClusterInput,) (*mcp.CallToolResult, ClusterOutput, error) {if input.ID != nil && strings.TrimSpace(*input.ID) != "" {return updateCluster(ctx, input)}return createCluster(ctx, input)}// validateClusterFields validates fields common to both create and updatefunc validateClusterFields(input ClusterInput) error {if input.Name != nil && len(*input.Name) > 140 {return fmt.Errorf("name must be 140 characters or less (got %d)", len(*input.Name))}if input.Description != nil && len(*input.Description) > 255 {return fmt.Errorf("description must be 255 characters or less (got %d)", len(*input.Description))}if input.SampleRate != nil && *input.SampleRate <= 0 {return fmt.Errorf("sample_rate must be positive (got %d)", *input.SampleRate)}return nil}func createCluster(ctx context.Context, input ClusterInput) (*mcp.CallToolResult, ClusterOutput, error) {var output ClusterOutput// Validate required fields for createif input.DatasetID == nil || strings.TrimSpace(*input.DatasetID) == "" {return nil, output, fmt.Errorf("dataset_id is required when creating a cluster")}if input.LocationID == nil || strings.TrimSpace(*input.LocationID) == "" {return nil, output, fmt.Errorf("location_id is required when creating a cluster")}if input.Name == nil || strings.TrimSpace(*input.Name) == "" {return nil, output, fmt.Errorf("name is required when creating a cluster")}if input.SampleRate == nil {return nil, output, fmt.Errorf("sample_rate is required when creating a cluster")}if err := validateClusterFields(input); err != nil {return nil, output, err}// Open writable database connectiondatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("database connection failed: %w", err)}defer database.Close()// Begin transactiontx, err := database.BeginTx(ctx, nil)if err != nil {return nil, output, fmt.Errorf("failed to begin transaction: %w", err)}defer func() {if err != nil {tx.Rollback()}}()// Verify dataset exists and is activevar datasetExists boolvar datasetActive boolvar datasetName stringerr = tx.QueryRowContext(ctx,"SELECT EXISTS(SELECT 1 FROM dataset WHERE id = ?), active, name FROM dataset WHERE id = ?",*input.DatasetID, *input.DatasetID,).Scan(&datasetExists, &datasetActive, &datasetName)if err != nil {return nil, output, fmt.Errorf("failed to verify dataset: %w", err)}if !datasetExists {return nil, output, fmt.Errorf("dataset with ID '%s' does not exist", *input.DatasetID)}if !datasetActive {return nil, output, fmt.Errorf("dataset '%s' (ID: %s) is not active", datasetName, *input.DatasetID)}// Verify location exists, is active, and belongs to the specified datasetvar locationExists boolvar locationActive boolvar locationName stringvar locationDatasetID stringerr = tx.QueryRowContext(ctx,"SELECT EXISTS(SELECT 1 FROM location WHERE id = ?), active, name, dataset_id FROM location WHERE id = ?",*input.LocationID, *input.LocationID,).Scan(&locationExists, &locationActive, &locationName, &locationDatasetID)if err != nil {return nil, output, fmt.Errorf("failed to verify location: %w", err)}if !locationExists {return nil, output, fmt.Errorf("location with ID '%s' does not exist", *input.LocationID)}if !locationActive {return nil, output, fmt.Errorf("location '%s' (ID: %s) is not active", locationName, *input.LocationID)}if locationDatasetID != *input.DatasetID {return nil, output, fmt.Errorf("location '%s' (ID: %s) does not belong to dataset '%s' (ID: %s) - it belongs to dataset ID '%s'",locationName, *input.LocationID, datasetName, *input.DatasetID, locationDatasetID)}// Verify cyclic recording pattern if providedif input.CyclicRecordingPatternID != nil && strings.TrimSpace(*input.CyclicRecordingPatternID) != "" {var patternExists boolvar patternActive boolerr = tx.QueryRowContext(ctx,"SELECT EXISTS(SELECT 1 FROM cyclic_recording_pattern WHERE id = ?), active FROM cyclic_recording_pattern WHERE id = ?",*input.CyclicRecordingPatternID, *input.CyclicRecordingPatternID,).Scan(&patternExists, &patternActive)if err != nil {return nil, output, fmt.Errorf("failed to verify cyclic recording pattern: %w", err)}if !patternExists {return nil, output, fmt.Errorf("cyclic recording pattern with ID '%s' does not exist", *input.CyclicRecordingPatternID)}if !patternActive {return nil, output, fmt.Errorf("cyclic recording pattern with ID '%s' is not active", *input.CyclicRecordingPatternID)}}// Generate IDid, err := gonanoid.Generate("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", 12)if err != nil {return nil, output, fmt.Errorf("failed to generate ID: %w", err)}// Insert cluster_, err = tx.ExecContext(ctx,"INSERT INTO cluster (id, dataset_id, location_id, name, sample_rate, cyclic_recording_pattern_id, description, created_at, last_modified, active) VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, TRUE)",id, *input.DatasetID, *input.LocationID, *input.Name, *input.SampleRate, input.CyclicRecordingPatternID, input.Description,)if err != nil {return nil, output, fmt.Errorf("failed to create cluster: %w", err)}// Fetch the created clustervar cluster db.Clustererr = tx.QueryRowContext(ctx,"SELECT id, dataset_id, location_id, name, description, created_at, last_modified, active, cyclic_recording_pattern_id, sample_rate FROM cluster WHERE id = ?",id,).Scan(&cluster.ID, &cluster.DatasetID, &cluster.LocationID, &cluster.Name, &cluster.Description,&cluster.CreatedAt, &cluster.LastModified, &cluster.Active, &cluster.CyclicRecordingPatternID, &cluster.SampleRate)if err != nil {return nil, output, fmt.Errorf("failed to fetch created cluster: %w", err)}if err = tx.Commit(); err != nil {return nil, output, fmt.Errorf("failed to commit transaction: %w", err)}output.Cluster = clusteroutput.Message = fmt.Sprintf("Successfully created cluster '%s' with ID %s in location '%s' at dataset '%s' (sample rate: %d Hz)",cluster.Name, cluster.ID, locationName, datasetName, cluster.SampleRate)return &mcp.CallToolResult{}, output, nil}func updateCluster(ctx context.Context, input ClusterInput) (*mcp.CallToolResult, ClusterOutput, error) {var output ClusterOutputclusterID := *input.IDif err := validateClusterFields(input); err != nil {return nil, output, err}// Open writable databasedatabase, err := db.OpenWriteableDB(dbPath)if err != nil {return nil, output, fmt.Errorf("failed to open database: %w", err)}defer database.Close()// Verify cluster existsvar exists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM cluster WHERE id = ?)", clusterID).Scan(&exists)if err != nil {return nil, output, fmt.Errorf("failed to query cluster: %w", err)}if !exists {return nil, output, fmt.Errorf("cluster not found: %s", clusterID)}// Validate cyclic_recording_pattern_id if providedif input.CyclicRecordingPatternID != nil {trimmedPatternID := strings.TrimSpace(*input.CyclicRecordingPatternID)if trimmedPatternID != "" {var patternExists boolerr = database.QueryRow("SELECT EXISTS(SELECT 1 FROM cyclic_recording_pattern WHERE id = ? AND active = true)",trimmedPatternID,).Scan(&patternExists)if err != nil {return nil, output, fmt.Errorf("failed to verify cyclic recording pattern: %w", err)}if !patternExists {return nil, output, fmt.Errorf("cyclic recording pattern not found or not active: %s", trimmedPatternID)}}}// Build dynamic UPDATE queryupdates := []string{}args := []any{}if input.Name != nil {updates = append(updates, "name = ?")args = append(args, *input.Name)}if input.Path != nil {updates = append(updates, "path = ?")args = append(args, *input.Path)}if input.SampleRate != nil {updates = append(updates, "sample_rate = ?")args = append(args, *input.SampleRate)}if input.Description != nil {updates = append(updates, "description = ?")args = append(args, *input.Description)}if input.CyclicRecordingPatternID != nil {trimmedPatternID := strings.TrimSpace(*input.CyclicRecordingPatternID)if trimmedPatternID == "" {updates = append(updates, "cyclic_recording_pattern_id = NULL")} else {updates = append(updates, "cyclic_recording_pattern_id = ?")args = append(args, trimmedPatternID)}}if len(updates) == 0 {return nil, output, fmt.Errorf("no fields provided to update")}// Always update last_modifiedupdates = append(updates, "last_modified = now()")args = append(args, clusterID)query := fmt.Sprintf("UPDATE cluster SET %s WHERE id = ?", strings.Join(updates, ", "))_, err = database.Exec(query, args...)if err != nil {return nil, output, fmt.Errorf("failed to update cluster: %w", err)}// Fetch the updated clustervar cluster db.Clustererr = database.QueryRow("SELECT id, dataset_id, location_id, name, description, created_at, last_modified, active, cyclic_recording_pattern_id, sample_rate FROM cluster WHERE id = ?",clusterID,).Scan(&cluster.ID, &cluster.DatasetID, &cluster.LocationID, &cluster.Name, &cluster.Description,&cluster.CreatedAt, &cluster.LastModified, &cluster.Active, &cluster.CyclicRecordingPatternID, &cluster.SampleRate)if err != nil {return nil, output, fmt.Errorf("failed to fetch updated cluster: %w", err)}output.Cluster = clusteroutput.Message = fmt.Sprintf("Successfully updated cluster '%s' (ID: %s)", cluster.Name, cluster.ID)return &mcp.CallToolResult{}, output, nil}
# Comprehensive test suite for all write tools (create and update)# Tests: create_dataset, create_location, create_cluster, create_cyclic_recording_pattern# update_dataset, update_location, update_cluster, update_pattern
# Comprehensive test suite for create_or_update tools# Tests: create_or_update_dataset, create_or_update_location, create_or_update_cluster, create_or_update_pattern
echo "Test 1: Create cyclic recording pattern (valid)"echo "------------------------------------------------"PATTERN_RESULT=$(send_request "tools/call" '{"name":"create_cyclic_recording_pattern","arguments":{"record_seconds":120,"sleep_seconds":300}}')
echo "Test 1: Create pattern (valid)"echo "-------------------------------"PATTERN_RESULT=$(send_request "tools/call" '{"name":"create_or_update_pattern","arguments":{"record_seconds":120,"sleep_seconds":300}}')
INVALID_PATTERN=$(send_request "tools/call" '{"name":"create_cyclic_recording_pattern","arguments":{"record_seconds":-10,"sleep_seconds":300}}')
INVALID_PATTERN=$(send_request "tools/call" '{"name":"create_or_update_pattern","arguments":{"record_seconds":-10,"sleep_seconds":300}}')
DATASET_RESULT=$(send_request "tools/call" '{"name":"create_dataset","arguments":{"name":"Test Dataset 2026","description":"Automated test dataset","type":"organise"}}')
DATASET_RESULT=$(send_request "tools/call" '{"name":"create_or_update_dataset","arguments":{"name":"Test Dataset 2026","description":"Automated test dataset","type":"organise"}}')
INVALID_DATASET=$(send_request "tools/call" '{"name":"create_dataset","arguments":{"name":"Bad Dataset","type":"invalid_type"}}')
INVALID_DATASET=$(send_request "tools/call" '{"name":"create_or_update_dataset","arguments":{"name":"Bad Dataset","type":"invalid_type"}}')
LOCATION_RESULT=$(send_request "tools/call" '{"name":"create_location","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Wellington Test Location","latitude":-41.2865,"longitude":174.7762,"timezone_id":"Pacific/Auckland","description":"Test location in Wellington"}}')
LOCATION_RESULT=$(send_request "tools/call" '{"name":"create_or_update_location","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Wellington Test Location","latitude":-41.2865,"longitude":174.7762,"timezone_id":"Pacific/Auckland","description":"Test location in Wellington"}}')
INVALID_LOCATION=$(send_request "tools/call" '{"name":"create_location","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Invalid Location","latitude":999,"longitude":174.7762,"timezone_id":"Pacific/Auckland"}}')
INVALID_LOCATION=$(send_request "tools/call" '{"name":"create_or_update_location","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Invalid Location","latitude":999,"longitude":174.7762,"timezone_id":"Pacific/Auckland"}}')
CLUSTER_RESULT=$(send_request "tools/call" '{"name":"create_cluster","arguments":{"dataset_id":"'"$DATASET_ID"'","location_id":"'"$LOCATION_ID"'","name":"Test Cluster A01","sample_rate":250000,"cyclic_recording_pattern_id":"'"$PATTERN_ID"'"}}')
CLUSTER_RESULT=$(send_request "tools/call" '{"name":"create_or_update_cluster","arguments":{"dataset_id":"'"$DATASET_ID"'","location_id":"'"$LOCATION_ID"'","name":"Test Cluster A01","sample_rate":250000,"cyclic_recording_pattern_id":"'"$PATTERN_ID"'"}}')
INVALID_CLUSTER=$(send_request "tools/call" '{"name":"create_cluster","arguments":{"dataset_id":"'"$DATASET_ID"'","location_id":"'"$LOCATION_ID"'","name":"Bad Cluster","sample_rate":-1000}}')
INVALID_CLUSTER=$(send_request "tools/call" '{"name":"create_or_update_cluster","arguments":{"dataset_id":"'"$DATASET_ID"'","location_id":"'"$LOCATION_ID"'","name":"Bad Cluster","sample_rate":-1000}}')
UPDATE_RESULT=$(send_request "tools/call" '{"name":"update_dataset","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Updated Test Dataset","description":"Updated description after test"}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.success // empty')if [ "$SUCCESS" = "true" ]; then
UPDATE_RESULT=$(send_request "tools/call" '{"name":"create_or_update_dataset","arguments":{"id":"'"$DATASET_ID"'","name":"Updated Test Dataset","description":"Updated description after test"}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.dataset.id // empty')if [ -n "$SUCCESS" ]; then
UPDATE_RESULT=$(send_request "tools/call" '{"name":"update_dataset","arguments":{"dataset_id":"'"$DATASET_ID"'","type":"train"}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.success // empty')if [ "$SUCCESS" = "true" ]; thenecho "✓ Successfully updated dataset type"
UPDATE_RESULT=$(send_request "tools/call" '{"name":"create_or_update_dataset","arguments":{"id":"'"$DATASET_ID"'","type":"train"}}')UPDATED_TYPE=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.dataset.type // empty')if [ "$UPDATED_TYPE" = "train" ]; thenecho "✓ Successfully updated dataset type to 'train'"
UPDATE_RESULT=$(send_request "tools/call" '{"name":"update_location","arguments":{"location_id":"'"$LOCATION_ID"'","name":"Updated Wellington Location","latitude":-41.2900,"longitude":174.7800}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.success // empty')if [ "$SUCCESS" = "true" ]; then
UPDATE_RESULT=$(send_request "tools/call" '{"name":"create_or_update_location","arguments":{"id":"'"$LOCATION_ID"'","name":"Updated Wellington Location","latitude":-41.2900,"longitude":174.7800}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.location.id // empty')if [ -n "$SUCCESS" ]; then
UPDATE_RESULT=$(send_request "tools/call" '{"name":"update_cluster","arguments":{"cluster_id":"'"$CLUSTER_ID"'","name":"Updated Cluster A01","sample_rate":384000,"description":"Updated cluster description"}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.success // empty')if [ "$SUCCESS" = "true" ]; then
UPDATE_RESULT=$(send_request "tools/call" '{"name":"create_or_update_cluster","arguments":{"id":"'"$CLUSTER_ID"'","name":"Updated Cluster A01","sample_rate":384000,"description":"Updated cluster description"}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.cluster.id // empty')if [ -n "$SUCCESS" ]; then
# Check if this combination already existsCHECK_RESULT=$(send_request "tools/call" '{"name":"execute_sql","arguments":{"query":"SELECT COUNT(*) as count FROM cyclic_recording_pattern WHERE record_s = '"$NEW_RECORD"' AND sleep_s = '"$NEW_SLEEP"' AND active = true"}}')EXISTS=$(echo "$CHECK_RESULT" | jq -r '.result.structuredContent.rows[0].count // "0"' | sed 's/"//g')
if [ "$EXISTS" = "0" ]; thenUPDATE_RESULT=$(send_request "tools/call" '{"name":"update_pattern","arguments":{"pattern_id":"'"$EXISTING_PATTERN_ID"'","record_s":'"$NEW_RECORD"',"sleep_s":'"$NEW_SLEEP"'}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.success // empty')if [ "$SUCCESS" = "true" ]; thenecho "✓ Successfully updated pattern (${NEW_RECORD}s record, ${NEW_SLEEP}s sleep)"elseecho "✗ Failed to update pattern"echo "$UPDATE_RESULT" | jq '.'fi
UPDATE_RESULT=$(send_request "tools/call" '{"name":"create_or_update_pattern","arguments":{"id":"'"$EXISTING_PATTERN_ID"'","record_seconds":'"$NEW_RECORD"',"sleep_seconds":'"$NEW_SLEEP"'}}')SUCCESS=$(echo "$UPDATE_RESULT" | jq -r '.result.structuredContent.pattern.id // empty')if [ -n "$SUCCESS" ]; thenecho "✓ Successfully updated pattern (${NEW_RECORD}s record, ${NEW_SLEEP}s sleep)"
INVALID_UPDATE=$(send_request "tools/call" '{"name":"update_dataset","arguments":{"dataset_id":"INVALID_ID_123","name":"Should Fail"}}')
INVALID_UPDATE=$(send_request "tools/call" '{"name":"create_or_update_dataset","arguments":{"id":"INVALID_ID_123","name":"Should Fail"}}')
DATASET_RESULT=$(send_request "tools/call" '{"name":"create_dataset","arguments":{"name":"Bulk Import Test Dataset","type":"test","description":"Dataset for testing bulk import"}}')
DATASET_RESULT=$(send_request "tools/call" '{"name":"create_or_update_dataset","arguments":{"name":"Bulk Import Test Dataset","type":"test","description":"Dataset for testing bulk import"}}')
LOCATION_A_RESULT=$(send_request "tools/call" '{"name":"create_location","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Test Location A","latitude":-41.2865,"longitude":174.7762,"timezone_id":"Pacific/Auckland","description":"Test site A"}}')
LOCATION_A_RESULT=$(send_request "tools/call" '{"name":"create_or_update_location","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Test Location A","latitude":-41.2865,"longitude":174.7762,"timezone_id":"Pacific/Auckland","description":"Test site A"}}')
LOCATION_B_RESULT=$(send_request "tools/call" '{"name":"create_location","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Test Location B","latitude":-36.8485,"longitude":174.7633,"timezone_id":"Pacific/Auckland","description":"Test site B"}}')
LOCATION_B_RESULT=$(send_request "tools/call" '{"name":"create_or_update_location","arguments":{"dataset_id":"'"$DATASET_ID"'","name":"Test Location B","latitude":-36.8485,"longitude":174.7633,"timezone_id":"Pacific/Auckland","description":"Test site B"}}')
- **Write tools (8)**: `create_*` and `update_*` for datasets, locations, clusters, patterns
- **Write tools (4)**: `create_or_update_dataset`, `create_or_update_location`, `create_or_update_cluster`, `create_or_update_pattern`
Tests all 8 write tools (create and update):- `create_dataset`, `create_location`, `create_cluster`, `create_cyclic_recording_pattern`- `update_dataset`, `update_location`, `update_cluster`, `update_pattern`
Tests all 4 create_or_update tools in both create and update modes:- `create_or_update_dataset` - create (no id) and update (with id)- `create_or_update_location` - create and update- `create_or_update_cluster` - create and update- `create_or_update_pattern` - create and update
{"jsonrpc":"2.0","id":9,"method":"tools/call","params":{"name":"create_dataset","arguments":{"name":"Test Dataset","description":"Testing create tool","type":"test"}}}
{"jsonrpc":"2.0","id":9,"method":"tools/call","params":{"name":"create_or_update_dataset","arguments":{"name":"Test Dataset","description":"Testing create tool","type":"test"}}}
{"jsonrpc":"2.0","id":10,"method":"tools/call","params":{"name":"create_location","arguments":{"dataset_id":"YOUR_DATASET_ID","name":"Test Location","latitude":-41.2865,"longitude":174.7762,"timezone_id":"Pacific/Auckland"}}}
{"jsonrpc":"2.0","id":10,"method":"tools/call","params":{"name":"create_or_update_dataset","arguments":{"id":"YOUR_DATASET_ID","name":"Updated Name"}}}```### 11. Create location```json{"jsonrpc":"2.0","id":11,"method":"tools/call","params":{"name":"create_or_update_location","arguments":{"dataset_id":"YOUR_DATASET_ID","name":"Test Location","latitude":-41.2865,"longitude":174.7762,"timezone_id":"Pacific/Auckland"}}}
- `create_dataset`, `create_location`, `create_cluster`, `create_cyclic_recording_pattern`- `update_dataset`, `update_location`, `update_cluster`, `update_pattern`
- `create_or_update_dataset`, `create_or_update_location`, `create_or_update_cluster`, `create_or_update_pattern`
}]}}```### Prompt Response```json{"jsonrpc":"2.0","id":8,"result":{"description":"Query active datasets with filtering and analysis using SQL SELECT and GROUP BY","messages":[{"role":"user","content":{"type":"text","text":"I want to query active datasets..."}
}}```## Testing with Claude DesktopConfigure the server in Claude Desktop:1. Edit your MCP config file:- **Linux**: `~/.config/Claude/claude_desktop_config.json`- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`2. Add this configuration:```json{"mcpServers": {"skraak_mcp": {"command": "/home/david/go/src/skraak_mcp/skraak_mcp","args": ["/home/david/go/src/skraak_mcp/db/skraak.duckdb"]}
3. Restart Claude Desktop4. Test by asking:- "What time is it?"- "Show me all active datasets"- "What tables are in the database?"- "Query locations for dataset vgIr9JSH_lFj"- "Create a new test dataset called 'My Test Data'"- "Show me the database schema"
Name: "create_dataset",Description: "Create a new dataset. Returns the created dataset with generated ID and timestamps.",}, tools.CreateDataset)
Name: "create_or_update_dataset",Description: "Create or update a dataset. Omit 'id' to create (name required), provide 'id' to update. Returns the dataset with timestamps.",}, tools.CreateOrUpdateDataset)
Name: "create_location",Description: "Create a new location within a dataset. Requires valid dataset_id, GPS coordinates, and IANA timezone.",}, tools.CreateLocation)
Name: "create_or_update_location",Description: "Create or update a location. Omit 'id' to create (dataset_id, name, latitude, longitude, timezone_id required), provide 'id' to update. Location must belong to the specified dataset when creating.",}, tools.CreateOrUpdateLocation)
Name: "create_cluster",Description: "Create a new cluster within a location. Location must belong to the specified dataset. If using a cyclic_recording_pattern_id, query existing patterns first with execute_sql and reuse matching pattern rather than creating a new one.",}, tools.CreateCluster)
Name: "create_or_update_cluster",Description: "Create or update a cluster. Omit 'id' to create (dataset_id, location_id, name, sample_rate required), provide 'id' to update. Query existing patterns first with execute_sql before setting cyclic_recording_pattern_id.",}, tools.CreateOrUpdateCluster)
Name: "update_cluster",Description: "Update an existing cluster's metadata (name, path, sample_rate, description, cyclic_recording_pattern_id). When setting a pattern, query existing patterns first with execute_sql and reuse matching pattern rather than creating a new one.",}, tools.UpdateCluster)mcp.AddTool(server, &mcp.Tool{Name: "update_dataset",Description: "Update an existing dataset's metadata (name, description, type).",}, tools.UpdateDataset)
Name: "create_or_update_pattern",Description: "Create or update a cyclic recording pattern. Omit 'id' to create (record_seconds, sleep_seconds required), provide 'id' to update. Returns existing pattern if duplicate record/sleep values found.",}, tools.CreateOrUpdatePattern)
mcp.AddTool(server, &mcp.Tool{Name: "update_location",Description: "Update an existing location's metadata (name, coordinates, dataset, timezone).",}, tools.UpdateLocation)mcp.AddTool(server, &mcp.Tool{Name: "update_pattern",Description: "Update an existing cyclic recording pattern (record/sleep durations).",}, tools.UpdatePattern)mcp.AddTool(server, &mcp.Tool{Name: "create_cyclic_recording_pattern",Description: "Create a reusable recording pattern with record/sleep cycle in seconds. Check if pattern already exists first by querying: SELECT id FROM cyclic_recording_pattern WHERE record_s = ? AND sleep_s = ? AND active = true. Only create if pattern doesn't exist.",}, tools.CreateCyclicRecordingPattern)
3. **test_tools.sh [db_path]** - Comprehensive test of all 8 write tools- Tests: create_dataset, create_location, create_cluster, create_cyclic_recording_pattern- Tests: update_dataset, update_location, update_cluster, update_pattern
3. **test_tools.sh [db_path]** - Comprehensive test of all 4 create_or_update tools- Tests: create_or_update_dataset, create_or_update_location, create_or_update_cluster, create_or_update_pattern- Tests both create mode (no id) and update mode (with id)
│ ├── write_dataset.go # create_dataset│ ├── write_location.go # create_location│ ├── write_cluster.go # create_cluster│ ├── write_pattern.go # create_cyclic_recording_pattern│ ├── update_dataset.go # update_dataset│ ├── update_location.go # update_location│ ├── update_cluster.go # update_cluster│ └── update_pattern.go # update_pattern
│ ├── dataset.go # create_or_update_dataset│ ├── location.go # create_or_update_location│ ├── cluster.go # create_or_update_cluster│ └── pattern.go # create_or_update_pattern
### Write Tools- `create_dataset` - Create new dataset (organise/test/train)- `create_location` - Create location with GPS coordinates and timezone- `create_cluster` - Create cluster within location- `create_cyclic_recording_pattern` - Create recording pattern (record/sleep cycle)- `update_dataset` - Update dataset metadata- `update_location` - Update location metadata- `update_cluster` - Update cluster metadata- `update_pattern` - Update recording pattern
### Create/Update Tools- `create_or_update_dataset` - Create (omit id) or update (provide id) a dataset- `create_or_update_location` - Create or update a location with GPS coordinates and timezone- `create_or_update_cluster` - Create or update a cluster within a location- `create_or_update_pattern` - Create or update a cyclic recording pattern (record/sleep cycle)
**Tool Count Update**: Now **11 total tools** (read: 2, write: 8, import: 2)*Note: Tool count increased to 14 in later updates (import: 4)*
**Tool Count Update**: Now **7 total tools** (read: 2, write: 4, import: 2)*Note: Tool count increased to 10 in later updates (import: 4)*
**Tool Count Update**: Now **12 total tools** (read: 2, write: 8, import: 3)*Note: Tool count increased to 14 in later update (import: 4)*
**Tool Count Update**: Now **8 total tools** (read: 2, write: 4, import: 3)*Note: Tool count increased to 10 in later update (import: 4)*
### Latest Update: Tool Consolidation - 8 write/update tools → 4 create_or_update tools (2026-02-06)**Consolidated 4 create_* + 4 update_* tools into 4 create_or_update_* tools****Deleted (8 files):**- `tools/write_dataset.go`, `tools/write_location.go`, `tools/write_cluster.go`, `tools/write_pattern.go`- `tools/update_dataset.go`, `tools/update_location.go`, `tools/update_cluster.go`, `tools/update_pattern.go`- `tools/write_pattern_test.go`**Added (4 files + 1 test):**- `tools/dataset.go` - `create_or_update_dataset` (create when no id, update when id provided)- `tools/location.go` - `create_or_update_location`- `tools/cluster.go` - `create_or_update_cluster`- `tools/pattern.go` - `create_or_update_pattern`- `tools/pattern_test.go` - Updated tests for consolidated pattern tool**Modified:**- `main.go` - 8 tool registrations → 4- `tools/integration_test.go` - Updated to use new ClusterInput/CreateOrUpdateCluster types- `shell_scripts/test_tools.sh` - Updated to test 4 tools (both create and update modes)- `shell_scripts/test_bulk_import.sh` - Updated tool names- `shell_scripts/TESTING.md` - Updated documentation- `CLAUDE.md` - Updated tool counts, directory structure, documentation**Design:**- Omit `id` field → CREATE mode (generates nanoid, inserts, returns entity)- Provide `id` field → UPDATE mode (verifies exists, builds dynamic UPDATE, returns entity)- Shared validation logic per entity (e.g., coordinate bounds, name length)- Both modes now return the full entity (update previously only returned success boolean)
**Benefits:**- Tool count reduced from 14 → 10 (fewer tools for LLM to reason about)- File count reduced from 8 → 4 (fewer files to maintain)- ~31% less code (~320 lines removed)- Shared validation logic eliminates duplication- Consistent return types (both modes return the entity)
**Status**: Test scripts consolidated and documented**Current Tools**: 14 (read: 2, write: 8, import: 4)
**Status**: Write tools consolidated into create_or_update pattern**Current Tools**: 10 (read: 2, write: 4, import: 4)