package web import ( "bytes" "encoding/csv" "fmt" "io" "log" "mime/multipart" "net/http" "path/filepath" "regexp" "sort" "strconv" "strings" "sync" "time" root "marmic/servicetrade-toolbox" "marmic/servicetrade-toolbox/internal/api" ) // DocumentsHandler handles the document upload page func DocumentsHandler(w http.ResponseWriter, r *http.Request) { session, ok := r.Context().Value("session").(*api.Session) if !ok { http.Error(w, "Unauthorized", http.StatusUnauthorized) return } tmpl := root.WebTemplates data := map[string]interface{}{ "Title": "Document Uploads", "Session": session, } if r.Header.Get("HX-Request") == "true" { // For HTMX requests, just send the document_upload partial if err := tmpl.ExecuteTemplate(w, "document_upload", data); err != nil { log.Printf("Template execution error: %v", err) http.Error(w, "Internal Server Error", http.StatusInternalServerError) return } } else { // For full page requests, first render document_upload into a buffer var contentBuf bytes.Buffer if err := tmpl.ExecuteTemplate(&contentBuf, "document_upload", data); err != nil { log.Printf("Template execution error: %v", err) http.Error(w, "Internal Server Error", http.StatusInternalServerError) return } // Add the rendered content to the data for the layout data["BodyContent"] = contentBuf.String() // Now render the layout with our content if err := tmpl.ExecuteTemplate(w, "layout.html", data); err != nil { log.Printf("Template execution error: %v", err) http.Error(w, "Internal Server Error", http.StatusInternalServerError) return } } } // ProcessCSVHandler processes a CSV file with job numbers func ProcessCSVHandler(w http.ResponseWriter, r *http.Request) { _, ok := r.Context().Value("session").(*api.Session) if !ok { http.Error(w, "Unauthorized", http.StatusUnauthorized) return } // Check if the request method is POST if r.Method != http.MethodPost { http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) return } // Parse the multipart form data with a 10MB limit if err := r.ParseMultipartForm(10 << 20); err != nil { http.Error(w, "Unable to parse form: "+err.Error(), http.StatusBadRequest) return } // Get the file from the form file, _, err := r.FormFile("csvFile") if err != nil { http.Error(w, "Error retrieving file: "+err.Error(), http.StatusBadRequest) return } defer file.Close() // Read the CSV data csvData, err := csv.NewReader(file).ReadAll() if err != nil { http.Error(w, "Error reading CSV file: "+err.Error(), http.StatusBadRequest) return } if len(csvData) < 2 { http.Error(w, "CSV file must contain at least a header row and one data row", http.StatusBadRequest) return } // Find the index of the 'id' column headerRow := csvData[0] idColumnIndex := -1 for i, header := range headerRow { if strings.ToLower(strings.TrimSpace(header)) == "id" { idColumnIndex = i break } } // If 'id' column not found, try the first column if idColumnIndex == -1 { idColumnIndex = 0 log.Printf("No 'id' column found in CSV, using first column (header: %s)", headerRow[0]) } else { log.Printf("Found 'id' column at index %d", idColumnIndex) } // Extract job numbers from the CSV var jobNumbers []string for rowIndex, row := range csvData { // Skip header row if rowIndex == 0 { continue } if len(row) > idColumnIndex { // Extract and clean up the job ID jobID := strings.TrimSpace(row[idColumnIndex]) if jobID != "" { jobNumbers = append(jobNumbers, jobID) } } } totalJobs := len(jobNumbers) log.Printf("Extracted %d job IDs from CSV", totalJobs) if totalJobs == 0 { http.Error(w, "No valid job IDs found in the CSV file", http.StatusBadRequest) return } // Create a hidden input with the job IDs jobsValue := strings.Join(jobNumbers, ",") jobSampleDisplay := getJobSampleDisplay(jobNumbers) // Generate HTML for the main response (hidden input for job-ids-container) var responseHTML bytes.Buffer responseHTML.WriteString(fmt.Sprintf(``, jobsValue)) responseHTML.WriteString(fmt.Sprintf(`

Found %d job(s) in the CSV file

`, totalJobs)) responseHTML.WriteString(fmt.Sprintf(`

Sample job IDs: %s

`, jobSampleDisplay)) // Generate out-of-band swap for the preview section responseHTML.WriteString(fmt.Sprintf(`

Detected Jobs

Found %d job(s) in the CSV file

Sample job IDs: %s

`, totalJobs, jobSampleDisplay)) w.Header().Set("Content-Type", "text/html") w.Write(responseHTML.Bytes()) } // Helper function to show sample job IDs with a limit func getJobSampleDisplay(jobIDs []string) string { const maxSamples = 5 if len(jobIDs) <= maxSamples { return strings.Join(jobIDs, ", ") } sample := append([]string{}, jobIDs[:maxSamples]...) return strings.Join(sample, ", ") + fmt.Sprintf(" and %d more...", len(jobIDs)-maxSamples) } // UploadDocumentsHandler handles document uploads to jobs func UploadDocumentsHandler(w http.ResponseWriter, r *http.Request) { session, ok := r.Context().Value("session").(*api.Session) if !ok { http.Error(w, "Unauthorized", http.StatusUnauthorized) return } // Check if the request method is POST if r.Method != http.MethodPost { http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) return } // Parse the multipart form with a 30MB limit if err := r.ParseMultipartForm(30 << 20); err != nil { http.Error(w, fmt.Sprintf("Unable to parse form: %v", err), http.StatusBadRequest) return } // Get the job numbers from either of the possible form fields jobNumbers := r.FormValue("jobNumbers") if jobNumbers == "" { jobNumbers = r.FormValue("job-ids") if jobNumbers == "" { log.Printf("No job numbers provided. Form data: %+v", r.Form) http.Error(w, "No job numbers provided", http.StatusBadRequest) return } } // Log the form data for debugging log.Printf("Form data: %+v", r.Form) log.Printf("Job numbers: %s", jobNumbers) // Split the job numbers jobs := strings.Split(jobNumbers, ",") if len(jobs) == 0 { http.Error(w, "No valid job numbers provided", http.StatusBadRequest) return } // Regular expression to match file field patterns filePattern := regexp.MustCompile(`document-file-(\d+)`) // Collect document data type DocumentData struct { File multipart.File Header *multipart.FileHeader Name string Type string Index int } var documents []DocumentData // First, identify all available indices var indices []int for key := range r.MultipartForm.File { if matches := filePattern.FindStringSubmatch(key); len(matches) > 1 { if index, err := strconv.Atoi(matches[1]); err == nil { indices = append(indices, index) } } } // Process each document for _, index := range indices { fileKey := fmt.Sprintf("document-file-%d", index) nameKey := fmt.Sprintf("document-name-%d", index) typeKey := fmt.Sprintf("document-type-%d", index) fileHeaders := r.MultipartForm.File[fileKey] if len(fileHeaders) == 0 { continue // Skip if no file uploaded } fileHeader := fileHeaders[0] file, err := fileHeader.Open() if err != nil { log.Printf("Error opening file %s: %v", fileHeader.Filename, err) continue } // Get document name (use filename if not provided) documentName := r.FormValue(nameKey) if documentName == "" { documentName = fileHeader.Filename } else { // If a custom name is provided without extension, add the original file extension if !strings.Contains(documentName, ".") { extension := filepath.Ext(fileHeader.Filename) if extension != "" { documentName = documentName + extension } } } log.Printf("Using document name: %s (original filename: %s)", documentName, fileHeader.Filename) // Get document type documentType := r.FormValue(typeKey) if documentType == "" { log.Printf("No document type for file %s", fileHeader.Filename) continue } // Log the document type for debugging log.Printf("Document type for %s: '%s'", documentName, documentType) documents = append(documents, DocumentData{ File: file, Header: fileHeader, Name: documentName, Type: documentType, Index: index, }) } if len(documents) == 0 { http.Error(w, "No valid documents selected for upload", http.StatusBadRequest) return } // Read all file contents first to avoid keeping files open during concurrent uploads type DocumentWithContent struct { Name string Type string FileContent []byte } var docsWithContent []DocumentWithContent for _, doc := range documents { // Read file content fileContent, err := io.ReadAll(doc.File) if err != nil { log.Printf("Error reading file %s: %v", doc.Header.Filename, err) continue } doc.File.Close() // Close the file as soon as we're done with it docsWithContent = append(docsWithContent, DocumentWithContent{ Name: doc.Name, Type: doc.Type, FileContent: fileContent, }) } // Concurrent upload with throttling // ServiceTrade API allows 30s of availability per minute (approximately 15 requests at 2s each) const maxConcurrent = 5 // A conservative limit to avoid rate limiting const requestDelay = 300 * time.Millisecond // Delay between requests // Channel for collecting results type UploadResult struct { JobID string DocName string Success bool Error string Data map[string]interface{} } totalUploads := len(jobs) * len(docsWithContent) resultsChan := make(chan UploadResult, totalUploads) // Create a wait group to track when all uploads are done var wg sync.WaitGroup // Create a semaphore channel to limit concurrent uploads semaphore := make(chan struct{}, maxConcurrent) // Start the upload workers for _, jobID := range jobs { for _, doc := range docsWithContent { wg.Add(1) // Launch a goroutine for each job+document combination go func(jobID string, doc DocumentWithContent) { defer wg.Done() // Acquire a semaphore slot semaphore <- struct{}{} defer func() { <-semaphore }() // Release when done // Add a small delay to avoid overwhelming the API time.Sleep(requestDelay) // Call the ServiceTrade API result, err := session.UploadAttachment(jobID, doc.Name, doc.Type, doc.FileContent) if err != nil { log.Printf("Error uploading %s to job %s: %v", doc.Name, jobID, err) resultsChan <- UploadResult{ JobID: jobID, DocName: doc.Name, Success: false, Error: err.Error(), } } else { log.Printf("Successfully uploaded %s to job %s", doc.Name, jobID) resultsChan <- UploadResult{ JobID: jobID, DocName: doc.Name, Success: true, Data: result, } } }(jobID, doc) } } // Close the results channel when all uploads are done go func() { wg.Wait() close(resultsChan) }() // Collect results results := make(map[string][]UploadResult) for result := range resultsChan { if _, exists := results[result.JobID]; !exists { results[result.JobID] = []UploadResult{} } results[result.JobID] = append(results[result.JobID], result) } // Generate HTML for results var resultHTML bytes.Buffer // Count successes and failures var totalSuccess, totalFailure int for _, jobResults := range results { for _, result := range jobResults { if result.Success { totalSuccess++ } else { totalFailure++ } } } // Add summary section resultHTML.WriteString("
") resultHTML.WriteString("

Upload Results

") resultHTML.WriteString("
") // Total jobs stat resultHTML.WriteString("
") resultHTML.WriteString(fmt.Sprintf("
%d
", len(results))) resultHTML.WriteString("
Total Jobs
") resultHTML.WriteString("
") // Success stat resultHTML.WriteString("
") resultHTML.WriteString(fmt.Sprintf("
%d
", totalSuccess)) resultHTML.WriteString("
Successful Uploads
") resultHTML.WriteString("
") // Failure stat resultHTML.WriteString("
") resultHTML.WriteString(fmt.Sprintf("
%d
", totalFailure)) resultHTML.WriteString("
Failed Uploads
") resultHTML.WriteString("
") // File count stat resultHTML.WriteString("
") resultHTML.WriteString(fmt.Sprintf("
%d
", totalSuccess+totalFailure)) resultHTML.WriteString("
Files Processed
") resultHTML.WriteString("
") resultHTML.WriteString("
") // End of upload-stats // Add completion message if totalFailure == 0 { resultHTML.WriteString("

All documents were successfully uploaded to ServiceTrade!

") } else { resultHTML.WriteString("

Some documents failed to upload. See details below.

") } resultHTML.WriteString("
") // End of upload-summary // Add detailed job results resultHTML.WriteString("
") // Sort job IDs for consistent display sortedJobs := make([]string, 0, len(results)) for jobID := range results { sortedJobs = append(sortedJobs, jobID) } sort.Strings(sortedJobs) for _, jobID := range sortedJobs { jobResults := results[jobID] // Determine job success status jobSuccess := true for _, result := range jobResults { if !result.Success { jobSuccess = false break } } // Job result row jobClass := "success" if !jobSuccess { jobClass = "error" } resultHTML.WriteString(fmt.Sprintf("
", jobClass)) resultHTML.WriteString(fmt.Sprintf("Job #%s", jobID)) // File results if len(jobResults) > 0 { resultHTML.WriteString("
") for _, result := range jobResults { fileClass := "success" icon := "✓" message := "Successfully uploaded" if !result.Success { fileClass = "error" icon = "✗" message = result.Error } resultHTML.WriteString(fmt.Sprintf("
", fileClass)) resultHTML.WriteString(fmt.Sprintf("%s", icon)) resultHTML.WriteString(fmt.Sprintf("%s:", result.DocName)) resultHTML.WriteString(fmt.Sprintf("%s", message)) resultHTML.WriteString("
") } resultHTML.WriteString("
") // End of file-results } else { resultHTML.WriteString("

No files processed for this job.

") } resultHTML.WriteString("
") // End of job-result } resultHTML.WriteString("
") // End of job-results w.Header().Set("Content-Type", "text/html") w.Write(resultHTML.Bytes()) } // DocumentFieldAddHandler generates a new document field for the form func DocumentFieldAddHandler(w http.ResponseWriter, r *http.Request) { // Generate a random ID for the new field newId := fmt.Sprintf("%d", time.Now().UnixNano()) // Create HTML for a new document row html := fmt.Sprintf(`
`, newId, newId, newId, newId, newId, newId, newId, newId, newId) w.Header().Set("Content-Type", "text/html") w.Write([]byte(html)) } // DocumentFieldRemoveHandler handles the removal of a document field func DocumentFieldRemoveHandler(w http.ResponseWriter, r *http.Request) { // We read the ID but don't need to use it for simple removal _ = r.URL.Query().Get("id") // Count how many document rows exist // For simplicity, we'll just return an empty response to remove the field // In a complete implementation, we'd check if this is the last field and handle that case w.Header().Set("Content-Type", "text/html") w.Write([]byte("")) }