diff --git a/README.MD b/README.MD index bcb69fa..b222e4e 100644 --- a/README.MD +++ b/README.MD @@ -1,18 +1,4 @@ # Poll-system -- TODO: Update the Database On the linode server -- TODO: Map View -- TODO: CSV smart address upload -- TODO: Reports - - TODO: Print Reports properly with print button - - TODO: Natural Language Filtering e.i. (From _DB1_,_DB2_,_DB3_, ... Where Date = _date_ And Time = _Time_) - - which volunteer whent to what address - - what are the poll respoce of a address - - what are the poll respoces of a vouneered address - - what are the admutn donat filtered by volunteer, address, - - which team did most appointments - - what is are the poeple in that team - - how much money the tam made - - what are the csv imported data - - what are the poll taken data - - what is the simple the +- TODO: Reports Generation, Export csv, Print Pdf, Show Charts +- TODO: VOlunteer Schedual or avilablity diff --git a/app/database/schema.sql b/app/database/schema.sql new file mode 100644 index 0000000..533da22 --- /dev/null +++ b/app/database/schema.sql @@ -0,0 +1,141 @@ + + +CREATE TABLE role ( + role_id SERIAL PRIMARY KEY, + name VARCHAR(50) UNIQUE NOT NULL, -- admin, volunteer, team_lead, manager, terry + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +-- COMBINED: users + admin_settings in one table +CREATE TABLE users ( + user_id SERIAL PRIMARY KEY, + first_name VARCHAR(100), + last_name VARCHAR(100), + email VARCHAR(150) UNIQUE NOT NULL, + phone VARCHAR(20), + password TEXT NOT NULL, + role_id INT REFERENCES role(role_id) ON DELETE SET NULL, + admin_code CHAR(6) UNIQUE, + -- Admin settings combined here + ward_settings TEXT, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +CREATE TABLE address_database ( + address_id SERIAL PRIMARY KEY, + address VARCHAR(255), + street_name VARCHAR(100), + street_type VARCHAR(50), + street_quadrant VARCHAR(50), + house_number VARCHAR(20), + house_alpha VARCHAR(10), + postal_code VARCHAR(10), + longitude DECIMAL(9,6), + latitude DECIMAL(9,6), + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +CREATE TABLE team ( + team_id SERIAL PRIMARY KEY, + team_lead_id INT REFERENCES users(user_id) ON DELETE SET NULL, + volunteer_id INT REFERENCES users(user_id) ON DELETE SET NULL, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +CREATE TABLE admin_volunteers ( + admin_id INT REFERENCES users(user_id) ON DELETE CASCADE, + volunteer_id INT REFERENCES users(user_id) ON DELETE CASCADE, + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW(), + PRIMARY KEY (admin_id, volunteer_id) +); + +CREATE TABLE appointment ( + sched_id SERIAL PRIMARY KEY, + user_id INT REFERENCES users(user_id) ON DELETE CASCADE, + address_id INT REFERENCES address_database(address_id) ON DELETE CASCADE, + appointment_date DATE, + appointment_time TIME, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +CREATE TABLE poll( + poll_id SERIAL PRIMARY KEY, + user_id INTEGER REFERENCES users ON DELETE CASCADE, + address_id INTEGER REFERENCES address_database ON DELETE CASCADE, + donation_amount integer default 0, + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +CREATE TABLE poll_response +( + poll_response_id SERIAL PRIMARY KEY, + poll_id INTEGER REFERENCES poll(poll_id) ON DELETE CASCADE, + respondent_postal_code VARCHAR(10), -- Postal code of respondent + question1_voted_before BOOLEAN, -- Have you voted before? + question2_vote_again BOOLEAN, -- Will you vote again for this candidate? + question3_lawn_signs INTEGER DEFAULT 0, -- How many lawn signs needed + question4_banner_signs INTEGER DEFAULT 0, -- How many banner signs needed + question5_thoughts TEXT, -- Write your thoughts + signage_status VARCHAR(50) DEFAULT 'requested' CHECK (signage_status IN ('requested', 'delivered', 'cancelled')), + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); + +CREATE TABLE post ( + post_id SERIAL PRIMARY KEY, + author_id INT REFERENCES users(user_id) ON DELETE CASCADE, + content TEXT, + image_url TEXT, + created_at TIMESTAMP DEFAULT NOW() +); + +CREATE TABLE availability ( + availability_id SERIAL PRIMARY KEY, + user_id INT REFERENCES users(user_id) ON DELETE CASCADE, + day_of_week VARCHAR(20), + start_time TIME, + end_time TIME, + created_at TIMESTAMP DEFAULT NOW() +); + +-- Indexes for better performance +CREATE INDEX idx_poll_response_poll_id ON poll_response(poll_id); +CREATE INDEX idx_poll_response_postal_code ON poll_response(respondent_postal_code); +CREATE INDEX idx_poll_response_signage_status ON poll_response(signage_status); +CREATE INDEX idx_poll_user_id ON poll(user_id); + +-- Function to generate a 6-character random admin code +CREATE OR REPLACE FUNCTION generate_admin_code() +RETURNS trigger AS $$ +BEGIN + IF (SELECT name FROM role WHERE role_id = NEW.role_id) = 'admin' THEN + NEW.admin_code := substring(md5(random()::text) FROM 1 FOR 6); + ELSE + NEW.admin_code := NULL; + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- Trigger to automatically generate admin_code on INSERT +CREATE TRIGGER set_admin_code +BEFORE INSERT ON users +FOR EACH ROW +EXECUTE PROCEDURE generate_admin_code(); + + +INSERT INTO role (role_id, name) VALUES +(1, 'admin'), +(2, 'team_lead'), +(3, 'volunteer') +ON CONFLICT DO NOTHING; diff --git a/app/internal/handlers/admin_addresses.go b/app/internal/handlers/admin_addresses.go index 842e080..b79e782 100644 --- a/app/internal/handlers/admin_addresses.go +++ b/app/internal/handlers/admin_addresses.go @@ -107,7 +107,7 @@ func AddressHandler(w http.ResponseWriter, r *http.Request) { FROM address_database a LEFT JOIN appointment ap ON a.address_id = ap.address_id LEFT JOIN users u ON ap.user_id = u.user_id - WHERE a.street_quadrant = 'ne' + WHERE a.street_quadrant = 'NE' ORDER BY a.address_id LIMIT $1 OFFSET $2 `, pageSize, offset) @@ -215,7 +215,7 @@ func AddressHandler(w http.ResponseWriter, r *http.Request) { "ActiveSection": "address", "Addresses": addresses, "Users": users, - "UserName": username, + "UserName": username, "Role": "admin", "Pagination": pagination, }) diff --git a/app/internal/handlers/admin_csv_upload.go b/app/internal/handlers/admin_csv_upload.go index 04c1eaa..e09e96d 100644 --- a/app/internal/handlers/admin_csv_upload.go +++ b/app/internal/handlers/admin_csv_upload.go @@ -6,23 +6,32 @@ import ( "io" "log" "net/http" + "sort" "strconv" "strings" - "time" "github.com/patel-mann/poll-system/app/internal/models" "github.com/patel-mann/poll-system/app/internal/utils" ) +// AddressMatch represents a potential address match with similarity score +type AddressMatch struct { + AddressID int + Address string + CurrentStatus bool + SimilarityScore float64 +} + // CSVUploadResult holds the results of CSV processing type CSVUploadResult struct { - TotalRecords int - ValidatedCount int - NotFoundCount int - ErrorCount int + TotalRecords int + ValidatedCount int + NotFoundCount int + ErrorCount int ValidatedAddresses []string NotFoundAddresses []string - ErrorMessages []string + ErrorMessages []string + FuzzyMatches []string // New field for fuzzy matches } // Combined CSV Upload Handler - handles both GET (display form) and POST (process CSV) @@ -69,6 +78,17 @@ func CSVUploadHandler(w http.ResponseWriter, r *http.Request) { return } + // Get similarity threshold (optional, default to 0.8) + similarityThresholdStr := r.FormValue("similarity_threshold") + similarityThreshold := 0.8 // Default threshold + if similarityThresholdStr != "" { + if threshold, err := strconv.ParseFloat(similarityThresholdStr, 64); err == nil { + if threshold >= 0.0 && threshold <= 1.0 { + similarityThreshold = threshold + } + } + } + // Get uploaded file file, header, err := r.FormFile("csv_file") if err != nil { @@ -116,12 +136,13 @@ func CSVUploadHandler(w http.ResponseWriter, r *http.Request) { return } - // Process addresses - result := processAddressValidation(allRows[1:], addressColumn) // Skip header + // Process addresses with fuzzy matching + result := processAddressValidationWithFuzzyMatching(allRows[1:], addressColumn, similarityThreshold) // Add result to template data templateData["Result"] = result templateData["FileName"] = header.Filename + templateData["SimilarityThreshold"] = similarityThreshold // Render the same template with results utils.Render(w, "csv-upload.html", templateData) @@ -132,12 +153,20 @@ func CSVUploadHandler(w http.ResponseWriter, r *http.Request) { http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) } -// processAddressValidation processes CSV data and validates addresses -func processAddressValidation(rows [][]string, addressColumn int) CSVUploadResult { +// processAddressValidationWithFuzzyMatching processes CSV data with fuzzy string matching +func processAddressValidationWithFuzzyMatching(rows [][]string, addressColumn int, threshold float64) CSVUploadResult { result := CSVUploadResult{ TotalRecords: len(rows), } + // Pre-load all addresses from database for fuzzy matching + dbAddresses, err := loadAllAddressesFromDB() + if err != nil { + result.ErrorCount = len(rows) + result.ErrorMessages = append(result.ErrorMessages, "Failed to load addresses from database: "+err.Error()) + return result + } + for i, row := range rows { // Check if the row has enough columns if addressColumn >= len(row) { @@ -147,116 +176,217 @@ func processAddressValidation(rows [][]string, addressColumn int) CSVUploadResul continue } - // Get and normalize address - address := strings.ToLower(strings.TrimSpace(row[addressColumn])) - if address == "" { + // Get and normalize address from CSV + csvAddress := normalizeAddress(row[addressColumn]) + if csvAddress == "" { result.ErrorCount++ result.ErrorMessages = append(result.ErrorMessages, fmt.Sprintf("Row %d: Empty address", i+2)) continue } - // Check if address exists in database - var addressID int - var currentStatus bool - err := models.DB.QueryRow(` - SELECT address_id, visited_validated - FROM address_database - WHERE LOWER(TRIM(address)) = $1 - `, address).Scan(&addressID, ¤tStatus) + // Find best matches using fuzzy string matching + matches := findBestMatches(csvAddress, dbAddresses, 5) // Get top 5 matches - if err != nil { - // Address not found + if len(matches) == 0 { result.NotFoundCount++ - result.NotFoundAddresses = append(result.NotFoundAddresses, address) + result.NotFoundAddresses = append(result.NotFoundAddresses, csvAddress) continue } - // Address found - update validation status if not already validated - if !currentStatus { - _, err = models.DB.Exec(` - UPDATE address_database - SET visited_validated = true, updated_at = NOW() - WHERE address_id = $1 - `, addressID) - + // Get the best match + bestMatch := matches[0] + + // Check if the best match meets our similarity threshold + if bestMatch.SimilarityScore < threshold { + result.ErrorCount++ + result.ErrorMessages = append(result.ErrorMessages, + fmt.Sprintf("Row %d: No good match found for '%s' (best match: '%s' with score %.2f, threshold: %.2f)", + i+2, csvAddress, bestMatch.Address, bestMatch.SimilarityScore, threshold)) + continue + } + + // Update validation status if not already validated + if !bestMatch.CurrentStatus { + err = updateAddressValidation(bestMatch.AddressID) if err != nil { result.ErrorCount++ result.ErrorMessages = append(result.ErrorMessages, - fmt.Sprintf("Row %d: Database update error for address '%s'", i+2, address)) - log.Printf("Error updating address %d: %v", addressID, err) + fmt.Sprintf("Row %d: Database update error for address '%s'", i+2, csvAddress)) + log.Printf("Error updating address %d: %v", bestMatch.AddressID, err) continue } result.ValidatedCount++ - result.ValidatedAddresses = append(result.ValidatedAddresses, address) + matchInfo := fmt.Sprintf("%s → %s (score: %.2f)", csvAddress, bestMatch.Address, bestMatch.SimilarityScore) + result.ValidatedAddresses = append(result.ValidatedAddresses, matchInfo) } else { - // Address was already validated - still count as validated + // Address was already validated result.ValidatedCount++ - result.ValidatedAddresses = append(result.ValidatedAddresses, address+" (already validated)") + matchInfo := fmt.Sprintf("%s → %s (score: %.2f, already validated)", csvAddress, bestMatch.Address, bestMatch.SimilarityScore) + result.ValidatedAddresses = append(result.ValidatedAddresses, matchInfo) + } + + // Add fuzzy match info if it's not an exact match + if bestMatch.SimilarityScore < 1.0 { + fuzzyInfo := fmt.Sprintf("CSV: '%s' matched to DB: '%s' (similarity: %.2f)", + csvAddress, bestMatch.Address, bestMatch.SimilarityScore) + result.FuzzyMatches = append(result.FuzzyMatches, fuzzyInfo) } } return result } -// Optional: Keep the export function if you need it -// ExportValidatedAddressesHandler exports validated addresses to CSV -func ExportValidatedAddressesHandler(w http.ResponseWriter, r *http.Request) { - // Query validated addresses +// normalizeAddress trims spaces and converts to lowercase +func normalizeAddress(address string) string { + return strings.ToLower(strings.TrimSpace(address)) +} + +// loadAllAddressesFromDB loads all addresses from the database for fuzzy matching +func loadAllAddressesFromDB() ([]AddressMatch, error) { rows, err := models.DB.Query(` - SELECT - a.address_id, - a.address, - a.street_name, - a.street_type, - a.street_quadrant, - a.house_number, - COALESCE(a.house_alpha, '') as house_alpha, - a.longitude, - a.latitude, - a.visited_validated, - a.created_at, - a.updated_at, - CASE - WHEN ap.sched_id IS NOT NULL THEN true - ELSE false - END as assigned, - COALESCE(u.first_name || ' ' || u.last_name, '') as user_name, - COALESCE(u.email, '') as user_email, - COALESCE(ap.appointment_date::text, '') as appointment_date, - COALESCE(ap.appointment_time::text, '') as appointment_time - FROM address_database a - LEFT JOIN appointment ap ON a.address_id = ap.address_id - LEFT JOIN users u ON ap.user_id = u.user_id - WHERE a.visited_validated = true - ORDER BY a.updated_at DESC + SELECT address_id, address, visited_validated + FROM address_database `) if err != nil { - log.Println("Export query error:", err) - http.Error(w, "Database error", http.StatusInternalServerError) - return + return nil, err } defer rows.Close() - // Set response headers for CSV download - filename := fmt.Sprintf("validated_addresses_%s.csv", time.Now().Format("2006-01-02_15-04-05")) - w.Header().Set("Content-Type", "text/csv") - w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename)) - - // Create CSV writer - writer := csv.NewWriter(w) - defer writer.Flush() - - // Write header - header := []string{ - "Address ID", "Address", "Street Name", "Street Type", "Street Quadrant", - "House Number", "House Alpha", "Longitude", "Latitude", "Validated", - "Created At", "Updated At", "Assigned", "Assigned User", "User Email", - "Appointment Date", "Appointment Time", + var addresses []AddressMatch + for rows.Next() { + var match AddressMatch + var rawAddress string + + err := rows.Scan(&match.AddressID, &rawAddress, &match.CurrentStatus) + if err != nil { + log.Printf("Error scanning address row: %v", err) + continue + } + + // Normalize the address from database + match.Address = normalizeAddress(rawAddress) + addresses = append(addresses, match) } - writer.Write(header) - // Write data rows (you'll need to define AddressWithDetails struct) - // Implementation depends on your existing struct definitions + return addresses, rows.Err() +} + +// findBestMatches finds the top N best matches for a given address +func findBestMatches(csvAddress string, dbAddresses []AddressMatch, topN int) []AddressMatch { + // Calculate similarity scores for all addresses + var matches []AddressMatch + for _, dbAddr := range dbAddresses { + score := calculateSimilarity(csvAddress, dbAddr.Address) + match := AddressMatch{ + AddressID: dbAddr.AddressID, + Address: dbAddr.Address, + CurrentStatus: dbAddr.CurrentStatus, + SimilarityScore: score, + } + matches = append(matches, match) + } + + // Sort by similarity score (descending) + sort.Slice(matches, func(i, j int) bool { + return matches[i].SimilarityScore > matches[j].SimilarityScore + }) + + // Return top N matches + if len(matches) > topN { + return matches[:topN] + } + return matches +} + +// calculateSimilarity calculates Levenshtein distance-based similarity score +func calculateSimilarity(s1, s2 string) float64 { + if s1 == s2 { + return 1.0 + } + + distance := levenshteinDistance(s1, s2) + maxLen := max(len(s1), len(s2)) + + if maxLen == 0 { + return 1.0 + } + + similarity := 1.0 - float64(distance)/float64(maxLen) + return max(0.0, similarity) +} + +// levenshteinDistance calculates the Levenshtein distance between two strings +func levenshteinDistance(s1, s2 string) int { + if len(s1) == 0 { + return len(s2) + } + if len(s2) == 0 { + return len(s1) + } + + // Create a matrix + matrix := make([][]int, len(s1)+1) + for i := range matrix { + matrix[i] = make([]int, len(s2)+1) + } + + // Initialize first row and column + for i := 0; i <= len(s1); i++ { + matrix[i][0] = i + } + for j := 0; j <= len(s2); j++ { + matrix[0][j] = j + } + + // Fill the matrix + for i := 1; i <= len(s1); i++ { + for j := 1; j <= len(s2); j++ { + cost := 0 + if s1[i-1] != s2[j-1] { + cost = 1 + } + + matrix[i][j] = min( + matrix[i-1][j]+1, // deletion + matrix[i][j-1]+1, // insertion + matrix[i-1][j-1]+cost, // substitution + ) + } + } + + return matrix[len(s1)][len(s2)] +} + +// updateAddressValidation updates an address validation status +func updateAddressValidation(addressID int) error { + _, err := models.DB.Exec(` + UPDATE address_database + SET visited_validated = true, updated_at = NOW() + WHERE address_id = $1 + `, addressID) + return err +} + +// Helper functions for different types +func minInt(a, b int) int { + if a < b { + return a + } + return b +} + +func maxInt(a, b int) int { + if a > b { + return a + } + return b +} + +func maxFloat64(a, b float64) float64 { + if a > b { + return a + } + return b } \ No newline at end of file diff --git a/app/internal/handlers/admin_dashboard.go b/app/internal/handlers/admin_dashboard.go index 1670258..e1dbc0d 100644 --- a/app/internal/handlers/admin_dashboard.go +++ b/app/internal/handlers/admin_dashboard.go @@ -33,7 +33,7 @@ func AdminDashboardHandler(w http.ResponseWriter, r *http.Request) { // 2. Total donations from polls err = models.DB.QueryRow(` SELECT COALESCE(SUM(amount_donated), 0) - FROM poll; + FROM poll_response; `).Scan(&totalDonations) if err != nil { log.Println("Donation query error:", err) diff --git a/app/internal/handlers/admin_map.go b/app/internal/handlers/admin_map.go new file mode 100644 index 0000000..417b326 --- /dev/null +++ b/app/internal/handlers/admin_map.go @@ -0,0 +1,186 @@ +package handlers + +import ( + "encoding/json" + "log" + "net/http" + + "github.com/patel-mann/poll-system/app/internal/models" +) + +// ValidatedAddress represents a validated address with coordinates +type ValidatedAddress struct { + AddressID int `json:"address_id"` + Address string `json:"address"` + Longitude float64 `json:"longitude"` + Latitude float64 `json:"latitude"` + HouseNumber string `json:"house_number"` + StreetName string `json:"street_name"` + StreetType string `json:"street_type"` + Quadrant string `json:"street_quadrant"` + UpdatedAt string `json:"updated_at"` +} + +// GetValidatedAddressesHandler returns all validated addresses with their coordinates as JSON +func GetValidatedAddressesHandler(w http.ResponseWriter, r *http.Request) { + // Only allow GET requests + if r.Method != http.MethodGet { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + // Optional: Check if user is authenticated (depending on your auth system) + // _, authenticated := models.GetCurrentUserName(r) + // if !authenticated { + // http.Error(w, "Unauthorized", http.StatusUnauthorized) + // return + // } + + // Query validated addresses from database + addresses, err := fetchValidatedAddresses() + if err != nil { + log.Printf("Error fetching validated addresses: %v", err) + http.Error(w, "Internal server error", http.StatusInternalServerError) + return + } + + // Set response headers for JSON + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") // Adjust based on your CORS policy + + // Encode and send JSON response + if err := json.NewEncoder(w).Encode(addresses); err != nil { + log.Printf("Error encoding JSON response: %v", err) + http.Error(w, "Error encoding response", http.StatusInternalServerError) + return + } + + + log.Printf("Successfully returned %d validated addresses", addresses) +} + +// fetchValidatedAddresses retrieves all validated addresses from the database +func fetchValidatedAddresses() ([]ValidatedAddress, error) { + query := ` + SELECT + address_id, + address, + longitude, + latitude, + COALESCE(house_number, '') as house_number, + COALESCE(street_name, '') as street_name, + COALESCE(street_type, '') as street_type, + COALESCE(street_quadrant, '') as street_quadrant, + updated_at::text + FROM address_database + WHERE visited_validated = true + AND longitude IS NOT NULL + AND latitude IS NOT NULL + AND longitude != 0 + AND latitude != 0 + ORDER BY updated_at DESC + ` + + rows, err := models.DB.Query(query) + if err != nil { + return nil, err + } + defer rows.Close() + + var addresses []ValidatedAddress + for rows.Next() { + var addr ValidatedAddress + err := rows.Scan( + &addr.AddressID, + &addr.Address, + &addr.Longitude, + &addr.Latitude, + &addr.HouseNumber, + &addr.StreetName, + &addr.StreetType, + &addr.Quadrant, + &addr.UpdatedAt, + ) + if err != nil { + log.Printf("Error scanning address row: %v", err) + continue + } + addresses = append(addresses, addr) + } + + if err = rows.Err(); err != nil { + return nil, err + } + + return addresses, nil +} + +// GetValidatedAddressesStatsHandler returns statistics about validated addresses +func GetValidatedAddressesStatsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + stats, err := getValidatedAddressesStats() + if err != nil { + log.Printf("Error fetching address stats: %v", err) + http.Error(w, "Internal server error", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(stats) +} + +// AddressStats represents statistics about addresses +type AddressStats struct { + TotalValidated int `json:"total_validated"` + TotalAddresses int `json:"total_addresses"` + ValidatedWithCoords int `json:"validated_with_coords"` + RecentlyValidated int `json:"recently_validated_24h"` +} + +// getValidatedAddressesStats gets statistics about validated addresses +func getValidatedAddressesStats() (AddressStats, error) { + var stats AddressStats + + // Get total validated count + err := models.DB.QueryRow("SELECT COUNT(*) FROM address_database WHERE visited_validated = true"). + Scan(&stats.TotalValidated) + if err != nil { + return stats, err + } + + // Get total addresses count + err = models.DB.QueryRow("SELECT COUNT(*) FROM address_database"). + Scan(&stats.TotalAddresses) + if err != nil { + return stats, err + } + + // Get validated with coordinates count + err = models.DB.QueryRow(` + SELECT COUNT(*) FROM address_database + WHERE visited_validated = true + AND longitude IS NOT NULL + AND latitude IS NOT NULL + AND longitude != 0 + AND latitude != 0 + `).Scan(&stats.ValidatedWithCoords) + if err != nil { + return stats, err + } + + // Get recently validated (last 24 hours) + err = models.DB.QueryRow(` + SELECT COUNT(*) FROM address_database + WHERE visited_validated = true + AND updated_at > NOW() - INTERVAL '24 hours' + `).Scan(&stats.RecentlyValidated) + if err != nil { + return stats, err + } + + return stats, nil +} \ No newline at end of file diff --git a/app/internal/handlers/admin_reports.go b/app/internal/handlers/admin_reports.go index 7bb5b9f..df88f81 100644 --- a/app/internal/handlers/admin_reports.go +++ b/app/internal/handlers/admin_reports.go @@ -4,7 +4,6 @@ import ( "fmt" "log" "net/http" - "regexp" "strconv" "strings" "time" @@ -13,35 +12,27 @@ import ( "github.com/patel-mann/poll-system/app/internal/utils" ) -// SmartFilterQuery represents a parsed smart filter query -type SmartFilterQuery struct { - Tables []string - Conditions []FilterCondition - Groupings []string - Aggregations []string - Sorting []string - Limit int +type ReportResult struct { + Columns []string `json:"columns"` + Rows [][]interface{} `json:"rows"` + Count int `json:"count"` + Error string `json:"error,omitempty"` } -type FilterCondition struct { - Table string - Column string - Operator string - Value interface{} - LogicalOp string // AND, OR +type ReportDefinition struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + SQL string `json:"-"` // Don't expose SQL in JSON } -// SmartFilterResult represents the result of a smart filter query -type SmartFilterResult struct { - Columns []string - Rows [][]interface{} - Query string - Count int - Error string +type SummaryStats struct { + Label string `json:"label"` + Value string `json:"value"` } -// SmartFilterHandler handles intelligent filtering with natural language parsing -func SmartFilterHandler(w http.ResponseWriter, r *http.Request) { +// Simple Reports Handler +func ReportsHandler(w http.ResponseWriter, r *http.Request) { username, _ := models.GetCurrentUserName(r) role := r.Context().Value("user_role").(int) @@ -50,396 +41,82 @@ func SmartFilterHandler(w http.ResponseWriter, r *http.Request) { return } - smartQuery := r.URL.Query().Get("smart_query") - var result SmartFilterResult + category := r.URL.Query().Get("category") + reportID := r.URL.Query().Get("report") + dateFrom := r.URL.Query().Get("date_from") + dateTo := r.URL.Query().Get("date_to") + export := r.URL.Query().Get("export") - if smartQuery != "" { - result = executeSmartFilter(smartQuery) + // Set default date range if not provided + if dateFrom == "" { + dateFrom = time.Now().AddDate(0, 0, -30).Format("2006-01-02") + } + if dateTo == "" { + dateTo = time.Now().Format("2006-01-02") } - adminnav := role == 1 - volunteernav := role != 1 + var result ReportResult + var reportTitle, reportDescription string - utils.Render(w, "smart_reports.html", map[string]interface{}{ - "Title": "Smart Reports & Analytics", - "IsAuthenticated": true, - "ShowAdminNav": adminnav, - "ShowVolunteerNav": volunteernav, - "UserName": username, - "ActiveSection": "reports", - "SmartQuery": smartQuery, - "Result": result, - "QueryExamples": getQueryExamples(), + // Generate report if both category and report are selected + if category != "" && reportID != "" { + result, reportTitle, reportDescription = executeReport(category, reportID, dateFrom, dateTo) + + // Handle CSV export + if export == "csv" { + exportCSV(w, result, reportTitle) + return + } + } + + utils.Render(w, "reports.html", map[string]interface{}{ + "Title": "Campaign Reports", + "IsAuthenticated": true, + "ShowAdminNav": role == 1, + "ShowVolunteerNav": role != 1, + "UserName": username, + "ActiveSection": "reports", + "Category": category, + "ReportID": reportID, + "DateFrom": dateFrom, + "DateTo": dateTo, + "AvailableReports": getReportsForCategory(category), + "Result": result, + "ReportTitle": reportTitle, + "ReportDescription": reportDescription, + "GeneratedAt": time.Now().Format("January 2, 2006 at 3:04 PM"), + "SummaryStats": generateSummaryStats(result), }) } -// executeSmartFilter parses and executes a smart filter query -func executeSmartFilter(query string) SmartFilterResult { - parsed := parseSmartQuery(query) - if parsed == nil { - return SmartFilterResult{Error: "Could not parse query"} +// Execute a specific report +func executeReport(category, reportID, dateFrom, dateTo string) (ReportResult, string, string) { + report := getReportDefinition(category, reportID) + if report == nil { + return ReportResult{Error: "Report not found"}, "", "" } - sqlQuery, err := buildSQLFromParsed(parsed) + // Replace date placeholders in SQL + sql := report.SQL + sql = replaceDatePlaceholders(sql, dateFrom, dateTo) + + // Execute the SQL query + rows, err := models.DB.Query(sql) if err != nil { - return SmartFilterResult{Error: err.Error()} - } - - return executeSQLQuery(sqlQuery) -} - -// parseSmartQuery parses natural language into structured query -func parseSmartQuery(query string) *SmartFilterQuery { - query = strings.ToLower(strings.TrimSpace(query)) - - // Initialize the parsed query - parsed := &SmartFilterQuery{ - Tables: []string{}, - Conditions: []FilterCondition{}, - Limit: 100, // Default limit - } - - // Define entity mappings - entityMappings := map[string]string{ - "volunteer": "users", - "volunteers": "users", - "user": "users", - "users": "users", - "admin": "users", - "admins": "users", - "poll": "poll", - "polls": "poll", - "address": "address_database", - "addresses": "address_database", - "appointment": "appointment", - "appointments": "appointment", - "team": "team", - "teams": "team", - } - - // Define column mappings for each table - columnMappings := map[string]map[string]string{ - "users": { - "name": "first_name || ' ' || last_name", - "email": "email", - "phone": "phone", - "role": "role_id", - "created": "created_at", - "updated": "updated_at", - }, - "poll": { - "title": "poll_title", - "description": "poll_description", - "active": "is_active", - "donated": "amount_donated", - "donation": "amount_donated", - "money": "amount_donated", - "amount": "amount_donated", - "created": "created_at", - "updated": "updated_at", - }, - "address_database": { - "address": "address", - "street": "street_name", - "house": "house_number", - "visited": "visited_validated", - "latitude": "latitude", - "longitude": "longitude", - "created": "created_at", - }, - "appointment": { - "date": "appointment_date", - "time": "appointment_time", - "created": "created_at", - }, - "team": { - "lead": "team_lead_id", - "volunteer": "volunteer_id", - "created": "created_at", - }, - } - - _ = columnMappings - - // Extract entities from query - for keyword, table := range entityMappings { - if strings.Contains(query, keyword) { - if !contains(parsed.Tables, table) { - parsed.Tables = append(parsed.Tables, table) - } - } - } - - // Parse conditions using regex patterns - conditionPatterns := []struct { - pattern string - handler func(matches []string, parsed *SmartFilterQuery) - }{ - // "volunteers who went to address X" - {`(volunteer|user)s?\s+(?:who\s+)?(?:went\s+to|visited)\s+(?:address\s+)?(.+)`, func(matches []string, parsed *SmartFilterQuery) { - addTablesIfNeeded(parsed, []string{"users", "appointment", "address_database"}) - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: "address_database", Column: "address", Operator: "ILIKE", Value: "%" + matches[2] + "%", LogicalOp: "AND", - }) - }}, - - // "poll responses for address X" - {`poll\s+(?:response|responses|data)\s+(?:for|of|at)\s+(?:address\s+)?(.+)`, func(matches []string, parsed *SmartFilterQuery) { - addTablesIfNeeded(parsed, []string{"poll", "address_database"}) - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: "address_database", Column: "address", Operator: "ILIKE", Value: "%" + matches[1] + "%", LogicalOp: "AND", - }) - }}, - - // "donations by volunteer X" - {`(?:donation|donations|money|amount)\s+(?:by|from)\s+(?:volunteer\s+)?(.+)`, func(matches []string, parsed *SmartFilterQuery) { - addTablesIfNeeded(parsed, []string{"poll", "users"}) - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: "users", Column: "first_name || ' ' || last_name", Operator: "ILIKE", Value: "%" + matches[1] + "%", LogicalOp: "AND", - }) - parsed.Aggregations = append(parsed.Aggregations, "SUM(poll.amount_donated) as total_donated") - }}, - - // "team with most appointments" - {`team\s+(?:with\s+)?(?:most|highest)\s+appointment`, func(matches []string, parsed *SmartFilterQuery) { - addTablesIfNeeded(parsed, []string{"team", "appointment"}) - parsed.Aggregations = append(parsed.Aggregations, "COUNT(appointment.sched_id) as appointment_count") - parsed.Groupings = append(parsed.Groupings, "team.team_id") - parsed.Sorting = append(parsed.Sorting, "appointment_count DESC") - }}, - - // "people in team X" - {`(?:people|members|users)\s+in\s+team\s+(\d+)`, func(matches []string, parsed *SmartFilterQuery) { - addTablesIfNeeded(parsed, []string{"team", "users"}) - teamID, _ := strconv.Atoi(matches[1]) - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: "team", Column: "team_id", Operator: "=", Value: teamID, LogicalOp: "AND", - }) - }}, - - // "money made by team X" - {`(?:money|donation|amount)\s+(?:made|earned)\s+by\s+team\s+(\d+)`, func(matches []string, parsed *SmartFilterQuery) { - addTablesIfNeeded(parsed, []string{"team", "users", "poll"}) - teamID, _ := strconv.Atoi(matches[1]) - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: "team", Column: "team_id", Operator: "=", Value: teamID, LogicalOp: "AND", - }) - parsed.Aggregations = append(parsed.Aggregations, "SUM(poll.amount_donated) as team_total") - }}, - - // "visited addresses" - {`visited\s+address`, func(matches []string, parsed *SmartFilterQuery) { - addTablesIfNeeded(parsed, []string{"address_database"}) - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: "address_database", Column: "visited_validated", Operator: "=", Value: true, LogicalOp: "AND", - }) - }}, - - // "active polls" - {`active\s+poll`, func(matches []string, parsed *SmartFilterQuery) { - addTablesIfNeeded(parsed, []string{"poll"}) - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: "poll", Column: "is_active", Operator: "=", Value: true, LogicalOp: "AND", - }) - }}, - - // Date filters - {`(?:from|after)\s+(\d{4}-\d{2}-\d{2})`, func(matches []string, parsed *SmartFilterQuery) { - for _, table := range parsed.Tables { - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: table, Column: "created_at", Operator: ">=", Value: matches[1], LogicalOp: "AND", - }) - } - }}, - - {`(?:to|before|until)\s+(\d{4}-\d{2}-\d{2})`, func(matches []string, parsed *SmartFilterQuery) { - for _, table := range parsed.Tables { - parsed.Conditions = append(parsed.Conditions, FilterCondition{ - Table: table, Column: "created_at", Operator: "<=", Value: matches[1] + " 23:59:59", LogicalOp: "AND", - }) - } - }}, - } - - // Apply pattern matching - for _, pattern := range conditionPatterns { - re := regexp.MustCompile(pattern.pattern) - if matches := re.FindStringSubmatch(query); matches != nil { - pattern.handler(matches, parsed) - } - } - - // If no tables were identified, return nil - if len(parsed.Tables) == 0 { - return nil - } - - return parsed -} - -// buildSQLFromParsed converts parsed query into SQL -func buildSQLFromParsed(parsed *SmartFilterQuery) (string, error) { - var selectCols []string - var fromClause []string - var joinClauses []string - var whereConditions []string - var groupByClause string - var orderByClause string - - // Define table aliases - aliases := map[string]string{ - "users": "u", - "poll": "p", - "address_database": "a", - "appointment": "ap", - "team": "t", - } - - // Define default columns for each table - defaultColumns := map[string][]string{ - "users": {"u.user_id", "u.first_name", "u.last_name", "u.email", "u.role_id", "u.created_at"}, - "poll": {"p.poll_id", "p.poll_title", "p.poll_description", "p.is_active", "p.amount_donated", "p.created_at"}, - "address_database": {"a.address_id", "a.address", "a.street_name", "a.visited_validated", "a.created_at"}, - "appointment": {"ap.sched_id", "ap.appointment_date", "ap.appointment_time", "ap.created_at"}, - "team": {"t.team_id", "t.team_lead_id", "t.volunteer_id", "t.created_at"}, - } - - // Build SELECT clause - if len(parsed.Aggregations) > 0 { - selectCols = parsed.Aggregations - // Add grouping columns - for _, table := range parsed.Tables { - alias := aliases[table] - if table == "users" { - selectCols = append(selectCols, alias+".first_name || ' ' || "+alias+".last_name as user_name") - } else if table == "team" { - selectCols = append(selectCols, alias+".team_id") - } - } - } else { - // Include default columns for all tables - for _, table := range parsed.Tables { - selectCols = append(selectCols, defaultColumns[table]...) - } - } - - // Build FROM clause with main table - mainTable := parsed.Tables[0] - mainAlias := aliases[mainTable] - fromClause = append(fromClause, mainTable+" "+mainAlias) - - // Build JOIN clauses for additional tables - for i := 1; i < len(parsed.Tables); i++ { - table := parsed.Tables[i] - alias := aliases[table] - - // Define join conditions based on relationships - joinCondition := getJoinCondition(mainTable, mainAlias, table, alias) - if joinCondition != "" { - joinClauses = append(joinClauses, "LEFT JOIN "+table+" "+alias+" ON "+joinCondition) - } - } - - // Build WHERE conditions - for _, condition := range parsed.Conditions { - alias := aliases[condition.Table] - whereClause := fmt.Sprintf("%s.%s %s", alias, condition.Column, condition.Operator) - - switch v := condition.Value.(type) { - case string: - whereClause += fmt.Sprintf(" '%s'", strings.Replace(v, "'", "''", -1)) - case int: - whereClause += fmt.Sprintf(" %d", v) - case bool: - whereClause += fmt.Sprintf(" %t", v) - case float64: - whereClause += fmt.Sprintf(" %.2f", v) - } - - whereConditions = append(whereConditions, whereClause) - } - - // Build GROUP BY - if len(parsed.Groupings) > 0 { - groupByClause = "GROUP BY " + strings.Join(parsed.Groupings, ", ") - } - - // Build ORDER BY - if len(parsed.Sorting) > 0 { - orderByClause = "ORDER BY " + strings.Join(parsed.Sorting, ", ") - } else { - orderByClause = "ORDER BY " + mainAlias + ".created_at DESC" - } - - // Construct final SQL - sql := "SELECT " + strings.Join(selectCols, ", ") - sql += " FROM " + strings.Join(fromClause, ", ") - if len(joinClauses) > 0 { - sql += " " + strings.Join(joinClauses, " ") - } - if len(whereConditions) > 0 { - sql += " WHERE " + strings.Join(whereConditions, " AND ") - } - if groupByClause != "" { - sql += " " + groupByClause - } - sql += " " + orderByClause - sql += fmt.Sprintf(" LIMIT %d", parsed.Limit) - - return sql, nil -} - -// getJoinCondition returns the appropriate JOIN condition between tables -func getJoinCondition(table1, alias1, table2, alias2 string) string { - joinMap := map[string]map[string]string{ - "users": { - "poll": alias1 + ".user_id = " + alias2 + ".user_id", - "appointment": alias1 + ".user_id = " + alias2 + ".user_id", - "team": alias1 + ".user_id = " + alias2 + ".team_lead_id OR " + alias1 + ".user_id = " + alias2 + ".volunteer_id", - }, - "poll": { - "users": alias2 + ".user_id = " + alias1 + ".user_id", - "address_database": alias1 + ".address_id = " + alias2 + ".address_id", - }, - "appointment": { - "users": alias2 + ".user_id = " + alias1 + ".user_id", - "address_database": alias1 + ".address_id = " + alias2 + ".address_id", - }, - "team": { - "users": alias2 + ".user_id = " + alias1 + ".team_lead_id OR " + alias2 + ".user_id = " + alias1 + ".volunteer_id", - }, - "address_database": { - "poll": alias2 + ".address_id = " + alias1 + ".address_id", - "appointment": alias2 + ".address_id = " + alias1 + ".address_id", - }, - } - - if table1Joins, exists := joinMap[table1]; exists { - if condition, exists := table1Joins[table2]; exists { - return condition - } - } - return "" -} - -// executeSQLQuery executes the built SQL and returns results -func executeSQLQuery(sqlQuery string) SmartFilterResult { - rows, err := models.DB.Query(sqlQuery) - if err != nil { - log.Println("Smart filter SQL error:", err) - return SmartFilterResult{Error: "Query execution failed: " + err.Error(), Query: sqlQuery} + log.Printf("Report SQL error: %v", err) + return ReportResult{Error: "Failed to execute report"}, report.Name, report.Description } defer rows.Close() - // Get column information + // Get column names columns, err := rows.Columns() if err != nil { - return SmartFilterResult{Error: "Failed to get columns", Query: sqlQuery} + return ReportResult{Error: "Failed to get columns"}, report.Name, report.Description } + // Process rows var results [][]interface{} for rows.Next() { - // Create a slice of interface{} to hold the values values := make([]interface{}, len(columns)) valuePtrs := make([]interface{}, len(columns)) for i := range values { @@ -447,11 +124,9 @@ func executeSQLQuery(sqlQuery string) SmartFilterResult { } if err := rows.Scan(valuePtrs...); err != nil { - log.Println("Error scanning row:", err) continue } - // Convert values to strings for display row := make([]interface{}, len(columns)) for i, val := range values { if val == nil { @@ -470,145 +145,690 @@ func executeSQLQuery(sqlQuery string) SmartFilterResult { results = append(results, row) } - return SmartFilterResult{ + return ReportResult{ Columns: columns, Rows: results, - Query: sqlQuery, Count: len(results), - } + }, report.Name, report.Description } -// Helper functions -func addTablesIfNeeded(parsed *SmartFilterQuery, tables []string) { - for _, table := range tables { - if !contains(parsed.Tables, table) { - parsed.Tables = append(parsed.Tables, table) +// Get available reports for a category +func getReportsForCategory(category string) []ReportDefinition { + allReports := getAllReportDefinitions() + if reports, exists := allReports[category]; exists { + return reports + } + return []ReportDefinition{} +} + +// Get a specific report definition +func getReportDefinition(category, reportID string) *ReportDefinition { + reports := getReportsForCategory(category) + for _, report := range reports { + if report.ID == reportID { + return &report } } + return nil } -func contains(slice []string, item string) bool { - for _, s := range slice { - if s == item { - return true +// Define all available reports +func getAllReportDefinitions() map[string][]ReportDefinition { + return map[string][]ReportDefinition{ + "users": { + { + ID: "users_by_role", + Name: "Users by Role", + Description: "Count of users grouped by their role", + SQL: `SELECT + CASE + WHEN role_id = 1 THEN 'Admin' + WHEN role_id = 2 THEN 'Volunteer' + ELSE 'Unknown' + END as role, + COUNT(*) as user_count, + COUNT(CASE WHEN created_at >= ?1 THEN 1 END) as new_this_period + FROM users + GROUP BY role_id + ORDER BY role_id`, + }, + { + ID: "volunteer_activity", + Name: "Volunteer Activity Summary", + Description: "Summary of volunteer activities including appointments and polls", + SQL: `SELECT + u.first_name || ' ' || u.last_name as volunteer_name, + u.email, + COUNT(DISTINCT a.sched_id) as appointments_count, + COUNT(DISTINCT p.poll_id) as polls_created, + u.created_at as joined_date + FROM users u + LEFT JOIN appointment a ON u.user_id = a.user_id AND a.created_at BETWEEN ?1 AND ?2 + LEFT JOIN poll p ON u.user_id = p.user_id AND p.created_at BETWEEN ?1 AND ?2 + WHERE u.role_id = 2 + GROUP BY u.user_id, u.first_name, u.last_name, u.email, u.created_at + ORDER BY appointments_count DESC, polls_created DESC`, + }, + { + ID: "team_performance", + Name: "Team Performance Report", + Description: "Performance metrics for each team", + SQL: `SELECT + t.team_id, + ul.first_name || ' ' || ul.last_name as team_lead, + uv.first_name || ' ' || uv.last_name as volunteer, + COUNT(DISTINCT a.sched_id) as appointments, + COUNT(DISTINCT p.poll_id) as polls_created, + t.created_at as team_formed + FROM team t + LEFT JOIN users ul ON t.team_lead_id = ul.user_id + LEFT JOIN users uv ON t.volunteer_id = uv.user_id + LEFT JOIN appointment a ON uv.user_id = a.user_id AND a.created_at BETWEEN ?1 AND ?2 + LEFT JOIN poll p ON uv.user_id = p.user_id AND p.created_at BETWEEN ?1 AND ?2 + GROUP BY t.team_id, ul.first_name, ul.last_name, uv.first_name, uv.last_name, t.created_at + ORDER BY appointments DESC`, + }, + { + ID: "admin_workload", + Name: "Admin Workload Analysis", + Description: "Workload distribution across admins", + SQL: `SELECT + u.first_name || ' ' || u.last_name as admin_name, + u.email, + COUNT(DISTINCT t.team_id) as teams_managed, + COUNT(DISTINCT p.poll_id) as polls_created, + COUNT(DISTINCT a.sched_id) as appointments_scheduled + FROM users u + LEFT JOIN team t ON u.user_id = t.team_lead_id + LEFT JOIN poll p ON u.user_id = p.user_id AND p.created_at BETWEEN ?1 AND ?2 + LEFT JOIN appointment a ON u.user_id = a.user_id AND a.created_at BETWEEN ?1 AND ?2 + WHERE u.role_id = 1 + GROUP BY u.user_id, u.first_name, u.last_name, u.email + ORDER BY teams_managed DESC, polls_created DESC`, + }, + { + ID: "inactive_users", + Name: "Inactive Users Report", + Description: "Users with no recent activity", + SQL: `SELECT + u.first_name || ' ' || u.last_name as user_name, + u.email, + CASE + WHEN u.role_id = 1 THEN 'Admin' + WHEN u.role_id = 2 THEN 'Volunteer' + ELSE 'Unknown' + END as role, + u.created_at as joined_date, + COALESCE(MAX(a.created_at), MAX(p.created_at)) as last_activity + FROM users u + LEFT JOIN appointment a ON u.user_id = a.user_id + LEFT JOIN poll p ON u.user_id = p.user_id + GROUP BY u.user_id, u.first_name, u.last_name, u.email, u.role_id, u.created_at + HAVING COALESCE(MAX(a.created_at), MAX(p.created_at)) < ?1 OR COALESCE(MAX(a.created_at), MAX(p.created_at)) IS NULL + ORDER BY last_activity DESC`, + }, + }, + "addresses": { + { + ID: "coverage_by_area", + Name: "Coverage by Area", + Description: "Address coverage statistics by geographical area", + SQL: `SELECT + COALESCE(NULLIF(TRIM(SPLIT_PART(address, ',', -1)), ''), 'Unknown') as area, + COUNT(*) as total_addresses, + COUNT(CASE WHEN visited_validated = true THEN 1 END) as visited_count, + ROUND(COUNT(CASE WHEN visited_validated = true THEN 1 END) * 100.0 / COUNT(*), 2) as coverage_percentage + FROM address_database + WHERE created_at BETWEEN ?1 AND ?2 + GROUP BY area + ORDER BY total_addresses DESC`, + }, + { + ID: "visits_by_postal", + Name: "Visits by Postal Code", + Description: "Visit statistics grouped by postal code", + SQL: `SELECT + COALESCE(NULLIF(TRIM(SUBSTRING(address FROM '[A-Za-z][0-9][A-Za-z] ?[0-9][A-Za-z][0-9]')), ''), 'No Postal Code') as postal_code, + COUNT(*) as addresses, + COUNT(CASE WHEN visited_validated = true THEN 1 END) as visited, + COUNT(CASE WHEN visited_validated = false THEN 1 END) as unvisited + FROM address_database + WHERE created_at BETWEEN ?1 AND ?2 + GROUP BY postal_code + ORDER BY addresses DESC + LIMIT 50`, + }, + { + ID: "unvisited_addresses", + Name: "Unvisited Addresses", + Description: "List of addresses that haven't been visited", + SQL: `SELECT + address_id, + address, + latitude, + longitude, + created_at as added_date + FROM address_database + WHERE visited_validated = false + AND created_at BETWEEN ?1 AND ?2 + ORDER BY created_at DESC + LIMIT 100`, + }, + { + ID: "donations_by_location", + Name: "Donations by Location", + Description: "Donation amounts grouped by address location", + SQL: `SELECT + a.address, + COUNT(p.poll_id) as total_polls, + COALESCE(SUM(p.amount_donated), 0) as total_donations, + COALESCE(AVG(p.amount_donated), 0) as avg_donation + FROM address_database a + LEFT JOIN poll p ON a.address_id = p.address_id AND p.created_at BETWEEN ?1 AND ?2 + GROUP BY a.address_id, a.address + HAVING COUNT(p.poll_id) > 0 + ORDER BY total_donations DESC + LIMIT 50`, + }, + { + ID: "address_validation_status", + Name: "Address Validation Status", + Description: "Status of address validation across the database", + SQL: `SELECT + CASE + WHEN visited_validated = true THEN 'Validated' + WHEN visited_validated = false THEN 'Not Validated' + ELSE 'Unknown' + END as validation_status, + COUNT(*) as address_count, + ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM address_database), 2) as percentage + FROM address_database + WHERE created_at BETWEEN ?1 AND ?2 + GROUP BY visited_validated + ORDER BY address_count DESC`, + }, + }, + "appointments": { + { + ID: "appointments_by_day", + Name: "Appointments by Day", + Description: "Daily breakdown of appointment scheduling", + SQL: `SELECT + appointment_date, + COUNT(*) as appointments_scheduled, + COUNT(DISTINCT user_id) as unique_volunteers, + COUNT(DISTINCT address_id) as unique_addresses + FROM appointment + WHERE appointment_date BETWEEN ?1 AND ?2 + GROUP BY appointment_date + ORDER BY appointment_date DESC`, + }, + { + ID: "completion_rates", + Name: "Completion Rates", + Description: "Appointment completion statistics by volunteer", + SQL: `SELECT + u.first_name || ' ' || u.last_name as volunteer_name, + COUNT(a.sched_id) as total_appointments, + COUNT(CASE WHEN ad.visited_validated = true THEN 1 END) as completed_visits, + ROUND(COUNT(CASE WHEN ad.visited_validated = true THEN 1 END) * 100.0 / COUNT(a.sched_id), 2) as completion_rate + FROM appointment a + JOIN users u ON a.user_id = u.user_id + LEFT JOIN address_database ad ON a.address_id = ad.address_id + WHERE a.created_at BETWEEN ?1 AND ?2 + GROUP BY u.user_id, u.first_name, u.last_name + HAVING COUNT(a.sched_id) > 0 + ORDER BY completion_rate DESC, total_appointments DESC`, + }, + { + ID: "volunteer_schedules", + Name: "Volunteer Schedules", + Description: "Current volunteer scheduling overview", + SQL: `SELECT + u.first_name || ' ' || u.last_name as volunteer_name, + a.appointment_date, + a.appointment_time, + ad.address, + a.created_at as scheduled_date + FROM appointment a + JOIN users u ON a.user_id = u.user_id + JOIN address_database ad ON a.address_id = ad.address_id + WHERE a.appointment_date BETWEEN ?1 AND ?2 + ORDER BY a.appointment_date, a.appointment_time`, + }, + { + ID: "missed_appointments", + Name: "Missed Appointments", + Description: "Appointments that were scheduled but addresses remain unvisited", + SQL: `SELECT + u.first_name || ' ' || u.last_name as volunteer_name, + a.appointment_date, + a.appointment_time, + ad.address, + CASE + WHEN a.appointment_date < CURRENT_DATE THEN 'Overdue' + ELSE 'Upcoming' + END as status + FROM appointment a + JOIN users u ON a.user_id = u.user_id + JOIN address_database ad ON a.address_id = ad.address_id + WHERE ad.visited_validated = false + AND a.appointment_date BETWEEN ?1 AND ?2 + ORDER BY a.appointment_date DESC`, + }, + { + ID: "peak_hours", + Name: "Peak Activity Hours", + Description: "Most popular appointment times", + SQL: `SELECT + appointment_time, + COUNT(*) as appointment_count, + COUNT(DISTINCT user_id) as unique_volunteers + FROM appointment + WHERE appointment_date BETWEEN ?1 AND ?2 + GROUP BY appointment_time + ORDER BY appointment_count DESC`, + }, + }, + "polls": { + { + ID: "poll_creation_stats", + Name: "Poll Creation Statistics", + Description: "Overview of poll creation activity", + SQL: `SELECT + u.first_name || ' ' || u.last_name as creator_name, + COUNT(p.poll_id) as polls_created, + COUNT(CASE WHEN p.is_active = true THEN 1 END) as active_polls, + COALESCE(SUM(p.amount_donated), 0) as total_donations, + COALESCE(AVG(p.amount_donated), 0) as avg_donation_per_poll + FROM poll p + JOIN users u ON p.user_id = u.user_id + WHERE p.created_at BETWEEN ?1 AND ?2 + GROUP BY u.user_id, u.first_name, u.last_name + ORDER BY polls_created DESC`, + }, + { + ID: "donation_analysis", + Name: "Donation Analysis", + Description: "Detailed analysis of donation patterns", + SQL: `SELECT + CASE + WHEN amount_donated = 0 THEN 'No Donation' + WHEN amount_donated BETWEEN 0.01 AND 25 THEN '$1 - $25' + WHEN amount_donated BETWEEN 25.01 AND 50 THEN '$26 - $50' + WHEN amount_donated BETWEEN 50.01 AND 100 THEN '$51 - $100' + ELSE 'Over $100' + END as donation_range, + COUNT(*) as poll_count, + COALESCE(SUM(amount_donated), 0) as total_amount, + ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM poll WHERE created_at BETWEEN ?1 AND ?2), 2) as percentage + FROM poll + WHERE created_at BETWEEN ?1 AND ?2 + GROUP BY donation_range + ORDER BY + CASE donation_range + WHEN 'No Donation' THEN 1 + WHEN '$1 - $25' THEN 2 + WHEN '$26 - $50' THEN 3 + WHEN '$51 - $100' THEN 4 + WHEN 'Over $100' THEN 5 + END`, + }, + { + ID: "active_vs_inactive", + Name: "Active vs Inactive Polls", + Description: "Comparison of active and inactive polls", + SQL: `SELECT + CASE + WHEN is_active = true THEN 'Active' + ELSE 'Inactive' + END as poll_status, + COUNT(*) as poll_count, + COALESCE(SUM(amount_donated), 0) as total_donations, + COALESCE(AVG(amount_donated), 0) as avg_donation + FROM poll + WHERE created_at BETWEEN ?1 AND ?2 + GROUP BY is_active + ORDER BY poll_count DESC`, + }, + { + ID: "poll_trends", + Name: "Poll Activity Trends", + Description: "Poll creation trends over time", + SQL: `SELECT + DATE(created_at) as creation_date, + COUNT(*) as polls_created, + COUNT(CASE WHEN is_active = true THEN 1 END) as active_polls, + COALESCE(SUM(amount_donated), 0) as daily_donations + FROM poll + WHERE created_at BETWEEN ?1 AND ?2 + GROUP BY DATE(created_at) + ORDER BY creation_date DESC`, + }, + { + ID: "creator_performance", + Name: "Creator Performance", + Description: "Performance metrics for poll creators", + SQL: `SELECT + u.first_name || ' ' || u.last_name as creator_name, + u.email, + COUNT(p.poll_id) as total_polls, + COALESCE(SUM(p.amount_donated), 0) as total_raised, + COALESCE(MAX(p.amount_donated), 0) as highest_donation, + COUNT(CASE WHEN p.is_active = true THEN 1 END) as active_polls + FROM users u + JOIN poll p ON u.user_id = p.user_id + WHERE p.created_at BETWEEN ?1 AND ?2 + GROUP BY u.user_id, u.first_name, u.last_name, u.email + ORDER BY total_raised DESC, total_polls DESC`, + }, + }, + "responses": { + { + ID: "voter_status", + Name: "Voter Status Report", + Description: "Analysis of voter status from poll responses", + SQL: `SELECT + voter_before as voted_before, + COUNT(*) as response_count, + COUNT(CASE WHEN will_vote_again = true THEN 1 END) as will_vote_again_count, + ROUND(COUNT(CASE WHEN will_vote_again = true THEN 1 END) * 100.0 / COUNT(*), 2) as vote_again_percentage + FROM poll_response pr + JOIN poll p ON pr.poll_id = p.poll_id + WHERE p.created_at BETWEEN ?1 AND ?2 + GROUP BY voter_before + ORDER BY response_count DESC`, + }, + { + ID: "sign_requests", + Name: "Sign Requests Summary", + Description: "Summary of lawn sign and banner requests", + SQL: `SELECT + 'Lawn Signs' as sign_type, + SUM(lawn_sign) as total_requested, + SUM(CASE WHEN lawn_sign_status = 'delivered' THEN lawn_sign ELSE 0 END) as delivered, + SUM(CASE WHEN lawn_sign_status = 'cancelled' THEN lawn_sign ELSE 0 END) as cancelled + FROM poll_response pr + JOIN poll p ON pr.poll_id = p.poll_id + WHERE p.created_at BETWEEN ?1 AND ?2 + UNION ALL + SELECT + 'Banner Signs' as sign_type, + SUM(banner_sign) as total_requested, + SUM(CASE WHEN banner_sign_status = 'delivered' THEN banner_sign ELSE 0 END) as delivered, + SUM(CASE WHEN banner_sign_status = 'cancelled' THEN banner_sign ELSE 0 END) as cancelled + FROM poll_response pr + JOIN poll p ON pr.poll_id = p.poll_id + WHERE p.created_at BETWEEN ?1 AND ?2`, + }, + { + ID: "feedback_analysis", + Name: "Feedback Analysis", + Description: "Analysis of open-text feedback from responses", + SQL: `SELECT + LENGTH(thoughts) as feedback_length_category, + COUNT(*) as response_count + FROM poll_response pr + JOIN poll p ON pr.poll_id = p.poll_id + WHERE p.created_at BETWEEN ?1 AND ?2 + AND thoughts IS NOT NULL + AND TRIM(thoughts) != '' + GROUP BY + CASE + WHEN LENGTH(thoughts) <= 50 THEN 'Short (1-50 chars)' + WHEN LENGTH(thoughts) <= 150 THEN 'Medium (51-150 chars)' + ELSE 'Long (150+ chars)' + END + ORDER BY response_count DESC`, + }, + { + ID: "response_trends", + Name: "Response Trends", + Description: "Poll response trends over time", + SQL: `SELECT + DATE(pr.created_at) as response_date, + COUNT(*) as responses, + COUNT(CASE WHEN voter_before = true THEN 1 END) as returning_voters, + COUNT(CASE WHEN will_vote_again = true THEN 1 END) as committed_future_voters + FROM poll_response pr + JOIN poll p ON pr.poll_id = p.poll_id + WHERE pr.created_at BETWEEN ?1 AND ?2 + GROUP BY DATE(pr.created_at) + ORDER BY response_date DESC`, + }, + { + ID: "repeat_voters", + Name: "Repeat Voters Analysis", + Description: "Analysis of voters who have responded to multiple polls", + SQL: `SELECT + pr.name, + pr.email, + COUNT(DISTINCT pr.poll_id) as polls_responded, + SUM(CASE WHEN voter_before = true THEN 1 ELSE 0 END) as times_voted_before, + SUM(CASE WHEN will_vote_again = true THEN 1 ELSE 0 END) as times_will_vote_again + FROM poll_response pr + JOIN poll p ON pr.poll_id = p.poll_id + WHERE p.created_at BETWEEN ?1 AND ?2 + GROUP BY pr.name, pr.email + HAVING COUNT(DISTINCT pr.poll_id) > 1 + ORDER BY polls_responded DESC`, + }, + }, + "posts": { + { + ID: "posts_by_user", + Name: "Posts by User", + Description: "Post creation statistics by user", + SQL: `SELECT + u.first_name || ' ' || u.last_name as author_name, + u.email, + COUNT(po.post_id) as total_posts, + MIN(po.created_at) as first_post, + MAX(po.created_at) as latest_post + FROM users u + JOIN posts po ON u.user_id = po.user_id + WHERE po.created_at BETWEEN ?1 AND ?2 + GROUP BY u.user_id, u.first_name, u.last_name, u.email + ORDER BY total_posts DESC`, + }, + { + ID: "engagement_timeline", + Name: "Engagement Timeline", + Description: "Post creation timeline", + SQL: `SELECT + DATE(created_at) as post_date, + COUNT(*) as posts_created, + COUNT(DISTINCT user_id) as active_users + FROM posts + WHERE created_at BETWEEN ?1 AND ?2 + GROUP BY DATE(created_at) + ORDER BY post_date DESC`, + }, + { + ID: "content_analysis", + Name: "Content Analysis", + Description: "Analysis of post content length and characteristics", + SQL: `SELECT + CASE + WHEN LENGTH(content) <= 100 THEN 'Short (1-100 chars)' + WHEN LENGTH(content) <= 300 THEN 'Medium (101-300 chars)' + ELSE 'Long (300+ chars)' + END as content_length, + COUNT(*) as post_count, + ROUND(AVG(LENGTH(content)), 2) as avg_length + FROM posts + WHERE created_at BETWEEN ?1 AND ?2 + AND content IS NOT NULL + GROUP BY content_length + ORDER BY post_count DESC`, + }, + { + ID: "post_frequency", + Name: "Post Frequency Report", + Description: "Posting frequency patterns", + SQL: `SELECT + u.first_name || ' ' || u.last_name as author_name, + COUNT(*) as total_posts, + ROUND(COUNT(*) * 1.0 / GREATEST(1, EXTRACT(days FROM (?2::date - ?1::date))), 2) as posts_per_day, + DATE(MIN(po.created_at)) as first_post, + DATE(MAX(po.created_at)) as last_post + FROM posts po + JOIN users u ON po.user_id = u.user_id + WHERE po.created_at BETWEEN ?1 AND ?2 + GROUP BY u.user_id, u.first_name, u.last_name + HAVING COUNT(*) > 1 + ORDER BY posts_per_day DESC`, + }, + }, + "availability": { + { + ID: "volunteer_availability", + Name: "Volunteer Availability", + Description: "Current volunteer availability schedules", + SQL: `SELECT + u.first_name || ' ' || u.last_name as volunteer_name, + av.day_of_week, + av.start_time, + av.end_time, + av.created_at as schedule_updated + FROM volunteer_availability av + JOIN users u ON av.user_id = u.user_id + WHERE av.created_at BETWEEN ?1 AND ?2 + ORDER BY u.first_name, u.last_name, av.day_of_week, av.start_time`, + }, + { + ID: "peak_availability", + Name: "Peak Availability Times", + Description: "Times when most volunteers are available", + SQL: `SELECT + day_of_week, + start_time, + end_time, + COUNT(*) as volunteers_available + FROM volunteer_availability av + JOIN users u ON av.user_id = u.user_id + WHERE av.created_at BETWEEN ?1 AND ?2 + GROUP BY day_of_week, start_time, end_time + ORDER BY volunteers_available DESC, day_of_week, start_time`, + }, + { + ID: "coverage_gaps", + Name: "Coverage Gaps", + Description: "Time periods with limited volunteer availability", + SQL: `SELECT + day_of_week, + start_time, + end_time, + COUNT(*) as volunteers_available + FROM volunteer_availability av + WHERE av.created_at BETWEEN ?1 AND ?2 + GROUP BY day_of_week, start_time, end_time + HAVING COUNT(*) <= 2 + ORDER BY volunteers_available ASC, day_of_week, start_time`, + }, + { + ID: "schedule_conflicts", + Name: "Schedule Conflicts", + Description: "Appointments scheduled outside volunteer availability", + SQL: `SELECT + u.first_name || ' ' || u.last_name as volunteer_name, + a.appointment_date, + a.appointment_time, + ad.address, + 'No availability recorded' as conflict_reason + FROM appointment a + JOIN users u ON a.user_id = u.user_id + JOIN address_database ad ON a.address_id = ad.address_id + LEFT JOIN volunteer_availability av ON u.user_id = av.user_id + AND EXTRACT(dow FROM a.appointment_date) = av.day_of_week + AND a.appointment_time BETWEEN av.start_time AND av.end_time + WHERE a.appointment_date BETWEEN ?1 AND ?2 + AND av.user_id IS NULL + ORDER BY a.appointment_date, a.appointment_time`, + }, + }, + } +} + +// Replace date placeholders in SQL +func replaceDatePlaceholders(sql, dateFrom, dateTo string) string { + sql = strings.ReplaceAll(sql, "?1", "'"+dateFrom+"'") + sql = strings.ReplaceAll(sql, "?2", "'"+dateTo+" 23:59:59'") + return sql +} + +// Generate summary statistics +func generateSummaryStats(result ReportResult) []SummaryStats { + if len(result.Rows) == 0 { + return nil + } + + stats := []SummaryStats{ + {Label: "Total Records", Value: strconv.Itoa(result.Count)}, + } + + // Try to generate additional stats based on column types + if len(result.Columns) > 0 { + // Look for numeric columns to calculate sums/averages + for colIdx, colName := range result.Columns { + if strings.Contains(strings.ToLower(colName), "count") || + strings.Contains(strings.ToLower(colName), "total") || + strings.Contains(strings.ToLower(colName), "amount") { + + var sum float64 + var validCount int + + for _, row := range result.Rows { + if colIdx < len(row) { + if val, err := strconv.ParseFloat(fmt.Sprintf("%v", row[colIdx]), 64); err == nil { + sum += val + validCount++ + } + } + } + + if validCount > 0 { + stats = append(stats, SummaryStats{ + Label: fmt.Sprintf("Total %s", strings.Title(strings.ToLower(colName))), + Value: fmt.Sprintf("%.2f", sum), + }) + + if validCount > 1 { + stats = append(stats, SummaryStats{ + Label: fmt.Sprintf("Average %s", strings.Title(strings.ToLower(colName))), + Value: fmt.Sprintf("%.2f", sum/float64(validCount)), + }) + } + } + break // Only calculate for first numeric column + } } } - return false + + return stats } -// getQueryExamples returns example queries for the user -func getQueryExamples() []string { - return []string{ - "volunteers who went to Main Street", - "poll responses for 123 Oak Avenue", - "donations by volunteer John", - "team with most appointments", - "people in team 5", - "money made by team 3", - "visited addresses from 2024-01-01", - "active polls created after 2024-06-01", - "appointments for unvisited addresses", - "users with role admin", - "polls with donations over 100", - "addresses visited by volunteer Sarah", - "team leads with more than 5 appointments", - "donations per address", - "volunteer activity by month", - } -} - -// SmartFilterAPIHandler provides JSON API for smart filtering -func SmartFilterAPIHandler(w http.ResponseWriter, r *http.Request) { - role := r.Context().Value("user_role").(int) - if role != 1 { - http.Error(w, "Unauthorized", http.StatusForbidden) - return - } - - smartQuery := r.URL.Query().Get("q") - if smartQuery == "" { - http.Error(w, "Query parameter 'q' is required", http.StatusBadRequest) - return - } - - result := executeSmartFilter(smartQuery) - - w.Header().Set("Content-Type", "application/json") - // Convert result to JSON manually since we're not using json package - response := fmt.Sprintf(`{ - "columns": [%s], - "rows": [%s], - "count": %d, - "query": "%s", - "error": "%s" - }`, - formatColumnsForJSON(result.Columns), - formatRowsForJSON(result.Rows), - result.Count, - strings.Replace(result.Query, "\"", "\\\"", -1), - result.Error, - ) - - w.Write([]byte(response)) -} - -func formatColumnsForJSON(columns []string) string { - var quoted []string - for _, col := range columns { - quoted = append(quoted, fmt.Sprintf(`"%s"`, col)) - } - return strings.Join(quoted, ",") -} - -func formatRowsForJSON(rows [][]interface{}) string { - var rowStrings []string - for _, row := range rows { - var values []string - for _, val := range row { - values = append(values, fmt.Sprintf(`"%v"`, val)) - } - rowStrings = append(rowStrings, "["+strings.Join(values, ",")+"]") - } - return strings.Join(rowStrings, ",") -} - -// SmartFilterExportHandler exports smart filter results as CSV -func SmartFilterExportHandler(w http.ResponseWriter, r *http.Request) { - role := r.Context().Value("user_role").(int) - if role != 1 { - http.Error(w, "Unauthorized", http.StatusForbidden) - return - } - - smartQuery := r.URL.Query().Get("smart_query") - if smartQuery == "" { - http.Error(w, "No query provided", http.StatusBadRequest) - return - } - - result := executeSmartFilter(smartQuery) - if result.Error != "" { - http.Error(w, result.Error, http.StatusBadRequest) - return - } - +// Export CSV +func exportCSV(w http.ResponseWriter, result ReportResult, reportTitle string) { w.Header().Set("Content-Type", "text/csv") - w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"smart_filter_results_%s.csv\"", time.Now().Format("2006-01-02"))) + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s_%s.csv\"", + strings.ReplaceAll(strings.ToLower(reportTitle), " ", "_"), + time.Now().Format("2006-01-02"))) - // Write CSV headers - w.Write([]byte(strings.Join(result.Columns, ",") + "\n")) + // Write header + fmt.Fprintf(w, "%s\n", strings.Join(result.Columns, ",")) - // Write CSV rows + // Write data for _, row := range result.Rows { var csvRow []string - for _, val := range row { - // Escape commas and quotes in CSV values - strVal := fmt.Sprintf("%v", val) - if strings.Contains(strVal, ",") || strings.Contains(strVal, "\"") { - strVal = "\"" + strings.Replace(strVal, "\"", "\"\"", -1) + "\"" + for _, cell := range row { + // Escape CSV values + cellStr := fmt.Sprintf("%v", cell) + if strings.Contains(cellStr, ",") || strings.Contains(cellStr, "\"") || strings.Contains(cellStr, "\n") { + cellStr = `"` + strings.ReplaceAll(cellStr, `"`, `""`) + `"` } - csvRow = append(csvRow, strVal) + csvRow = append(csvRow, cellStr) } - w.Write([]byte(strings.Join(csvRow, ",") + "\n")) + fmt.Fprintf(w, "%s\n", strings.Join(csvRow, ",")) } } \ No newline at end of file diff --git a/app/internal/handlers/login.go b/app/internal/handlers/login.go index 55f5489..f44c8a3 100644 --- a/app/internal/handlers/login.go +++ b/app/internal/handlers/login.go @@ -162,8 +162,8 @@ func LoginHandler(w http.ResponseWriter, r *http.Request) { func RegisterHandler(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { - utils.Render(w, "register.html", map[string]interface{}{ - "Title": "Register", + utils.Render(w, "layout.html", map[string]interface{}{ + "Title": "layout", "IsAuthenticated": false, }) return diff --git a/app/internal/handlers/volunteer_posts.go b/app/internal/handlers/volunteer_dashboard.go similarity index 82% rename from app/internal/handlers/volunteer_posts.go rename to app/internal/handlers/volunteer_dashboard.go index c1fead1..61f3885 100644 --- a/app/internal/handlers/volunteer_posts.go +++ b/app/internal/handlers/volunteer_dashboard.go @@ -22,6 +22,13 @@ type VolunteerStatistics struct { BannerSignsRequested int PollCompletionPercent int } +type TeamMate struct { + UserID int + FullName string + Phone string + Role string + IsLead bool +} // VolunteerPostsHandler - Dashboard view for volunteers with posts and statistics func VolunteerPostsHandler(w http.ResponseWriter, r *http.Request) { @@ -71,6 +78,38 @@ func VolunteerPostsHandler(w http.ResponseWriter, r *http.Request) { } } + // Fetch teammates + teammatesRows, err := models.DB.Query(` + SELECT u.user_id, + u.first_name || ' ' || u.last_name AS full_name, + COALESCE(u.phone, '') AS phone, + r.name AS role + FROM users u + JOIN role r ON u.role_id = r.role_id + JOIN team tm ON u.user_id = tm.volunteer_id OR u.user_id = tm.team_lead_id + WHERE tm.team_id = ( + SELECT team_id + FROM team + WHERE volunteer_id = $1 or team_lead_id = $2 + ) + ORDER BY CASE WHEN r.name = 'team_lead' THEN 0 ELSE 1 END, u.first_name; + `, CurrentUserID, CurrentUserID) + if err != nil { + fmt.Printf("Database query error (teammates): %v\n", err) + } + defer teammatesRows.Close() + + var teammates []TeamMate + for teammatesRows.Next() { + var t TeamMate + if err := teammatesRows.Scan(&t.UserID, &t.FullName, &t.Phone, &t.Role); err != nil { + fmt.Printf("Row scan error (teammates): %v\n", err) + continue + } + teammates = append(teammates, t) + } + + // Get volunteer statistics stats, err := getVolunteerStatistics(CurrentUserID) @@ -93,6 +132,7 @@ func VolunteerPostsHandler(w http.ResponseWriter, r *http.Request) { "UserName": username, "Posts": posts, "Statistics": stats, + "Teammates": teammates, "ActiveSection": "dashboard", "IsVolunteer": true, }) diff --git a/app/internal/handlers/volunteer_poll.go b/app/internal/handlers/volunteer_poll.go index d8d53c8..595b7d6 100644 --- a/app/internal/handlers/volunteer_poll.go +++ b/app/internal/handlers/volunteer_poll.go @@ -88,7 +88,12 @@ func PollHandler(w http.ResponseWriter, r *http.Request) { pollID := r.FormValue("poll_id") postalCode := r.FormValue("postal_code") + + // Parse integer values + question3LawnSigns, _ := strconv.Atoi(r.FormValue("question3_lawn_signs")) + question4BannerSigns, _ := strconv.Atoi(r.FormValue("question4_banner_signs")) question5Thoughts := r.FormValue("question5_thoughts") + question6donation := r.FormValue("question6_amount") // Parse boolean values var question1VotedBefore *bool @@ -115,19 +120,16 @@ func PollHandler(w http.ResponseWriter, r *http.Request) { } } - // Parse integer values - question3LawnSigns, _ := strconv.Atoi(r.FormValue("question3_lawn_signs")) - question4BannerSigns, _ := strconv.Atoi(r.FormValue("question4_banner_signs")) // Insert poll response _, err = models.DB.Exec(` INSERT INTO poll_response ( poll_id, respondent_postal_code, question1_voted_before, question2_vote_again, question3_lawn_signs, question4_banner_signs, - question5_thoughts - ) VALUES ($1, $2, $3, $4, $5, $6, $7) + question5_thoughts, question6_donation_amount + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) `, pollID, postalCode, question1VotedBefore, question2VoteAgain, - question3LawnSigns, question4BannerSigns, question5Thoughts) + question3LawnSigns, question4BannerSigns, question5Thoughts, question6donation) if err != nil { fmt.Print(err) diff --git a/app/internal/templates/dashboard/dashboard.html b/app/internal/templates/dashboard/dashboard.html index 1112bfd..5923076 100644 --- a/app/internal/templates/dashboard/dashboard.html +++ b/app/internal/templates/dashboard/dashboard.html @@ -10,233 +10,365 @@ rel="stylesheet" /> - + + + +
- -- Active Volunteers -
-- {{.VolunteerCount}} -
-- Addresses Visited -
-- {{.ValidatedCount}} -
-Donation
-- ${{.TotalDonations}} -
-- Houses Left -
-- {{.HousesLeftPercent}}% -
-+ Active Volunteers +
+{{.VolunteerCount}}
++ Addresses Visited +
+{{.ValidatedCount}}
+Loading...
+Donation
+${{.TotalDonations}}
+Houses Left
++ {{.HousesLeftPercent}}% +
+