Merge pull request #64 from rede5/codex/verificar-rotas-e-campos-faltantes

feat(backend): add storage endpoints and job datePosted support
This commit is contained in:
Tiago Yamamoto 2026-02-14 21:16:28 -03:00 committed by GitHub
commit aa544426a5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 958 additions and 780 deletions

View file

@ -16,6 +16,16 @@ type StorageHandler struct {
storageService *services.StorageService
}
type uploadURLRequest struct {
Filename string `json:"filename"`
ContentType string `json:"contentType"`
Folder string `json:"folder"`
}
type downloadURLRequest struct {
Key string `json:"key"`
}
func NewStorageHandler(s *services.StorageService) *StorageHandler {
return &StorageHandler{storageService: s}
}
@ -31,7 +41,15 @@ func (h *StorageHandler) GetUploadURL(w http.ResponseWriter, r *http.Request) {
userIDVal := r.Context().Value(middleware.ContextUserID)
userID, _ := userIDVal.(string)
var body uploadURLRequest
if r.Method == http.MethodPost {
_ = json.NewDecoder(r.Body).Decode(&body)
}
folder := r.URL.Query().Get("folder")
if folder == "" {
folder = body.Folder
}
if folder == "" {
folder = "uploads"
}
@ -47,10 +65,20 @@ func (h *StorageHandler) GetUploadURL(w http.ResponseWriter, r *http.Request) {
}
filename := r.URL.Query().Get("filename")
if filename == "" {
filename = body.Filename
}
contentType := r.URL.Query().Get("contentType")
if contentType == "" {
contentType = body.ContentType
}
if filename == "" {
http.Error(w, "Filename is required", http.StatusBadRequest)
return
}
// Validate folder
validFolders := map[string]bool{"avatars": true, "resumes": true, "logos": true, "uploads": true}
validFolders := map[string]bool{"avatars": true, "resumes": true, "logos": true, "uploads": true, "documents": true}
if !validFolders[folder] {
http.Error(w, "Invalid folder", http.StatusBadRequest)
return
@ -83,12 +111,18 @@ func (h *StorageHandler) GetUploadURL(w http.ResponseWriter, r *http.Request) {
// Return simple JSON
resp := map[string]string{
"url": url,
"uploadUrl": url,
"key": key, // Client needs key to save to DB profile
"publicUrl": publicURL, // Public URL for immediate use
}
respWithExpiry := map[string]interface{}{}
for k, v := range resp {
respWithExpiry[k] = v
}
respWithExpiry["expiresIn"] = int((15 * time.Minute).Seconds())
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(resp)
json.NewEncoder(w).Encode(respWithExpiry)
}
// UploadFile handles direct file uploads via proxy
@ -116,7 +150,7 @@ func (h *StorageHandler) UploadFile(w http.ResponseWriter, r *http.Request) {
folder = "uploads"
}
validFolders := map[string]bool{"avatars": true, "resumes": true, "logos": true, "uploads": true}
validFolders := map[string]bool{"avatars": true, "resumes": true, "logos": true, "uploads": true, "documents": true}
if !validFolders[folder] {
http.Error(w, "Invalid folder", http.StatusBadRequest)
return
@ -163,3 +197,56 @@ func (h *StorageHandler) UploadFile(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(resp)
}
// GetDownloadURL returns a pre-signed URL for downloading a file.
func (h *StorageHandler) GetDownloadURL(w http.ResponseWriter, r *http.Request) {
var body downloadURLRequest
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
http.Error(w, "Invalid request body", http.StatusBadRequest)
return
}
if body.Key == "" {
http.Error(w, "Key is required", http.StatusBadRequest)
return
}
url, err := h.storageService.GetPresignedDownloadURL(r.Context(), body.Key)
if err != nil {
http.Error(w, "Failed to generate download URL: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"downloadUrl": url,
"expiresIn": int((60 * time.Minute).Seconds()),
})
}
// DeleteFile removes an object from storage by key.
func (h *StorageHandler) DeleteFile(w http.ResponseWriter, r *http.Request) {
key := r.URL.Query().Get("key")
if key == "" {
http.Error(w, "Key query parameter is required", http.StatusBadRequest)
return
}
if err := h.storageService.DeleteObject(r.Context(), key); err != nil {
http.Error(w, "Failed to delete file: "+err.Error(), http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusNoContent)
}
// TestConnection validates storage credentials and bucket access.
func (h *StorageHandler) TestConnection(w http.ResponseWriter, r *http.Request) {
if err := h.storageService.TestConnection(r.Context()); err != nil {
http.Error(w, "Storage connection failed: "+err.Error(), http.StatusBadRequest)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]string{"message": "Storage connection successful"})
}

View file

@ -47,6 +47,7 @@ type Job struct {
FeaturedUntil *time.Time `json:"featuredUntil,omitempty" db:"featured_until"`
// Metadata
DatePosted *time.Time `json:"datePosted,omitempty" db:"date_posted"`
CreatedAt time.Time `json:"createdAt" db:"created_at"`
UpdatedAt time.Time `json:"updatedAt" db:"updated_at"`
}

View file

@ -249,8 +249,12 @@ func NewRouter() http.Handler {
// Storage (Presigned URL)
mux.Handle("GET /api/v1/storage/upload-url", authMiddleware.OptionalHeaderAuthGuard(http.HandlerFunc(storageHandler.GetUploadURL)))
mux.Handle("POST /api/v1/storage/upload-url", authMiddleware.OptionalHeaderAuthGuard(http.HandlerFunc(storageHandler.GetUploadURL)))
mux.Handle("POST /api/v1/storage/download-url", authMiddleware.OptionalHeaderAuthGuard(http.HandlerFunc(storageHandler.GetDownloadURL)))
mux.Handle("DELETE /api/v1/storage/files", authMiddleware.OptionalHeaderAuthGuard(http.HandlerFunc(storageHandler.DeleteFile)))
// Storage (Direct Proxy)
mux.Handle("POST /api/v1/storage/upload", authMiddleware.OptionalHeaderAuthGuard(http.HandlerFunc(storageHandler.UploadFile)))
mux.Handle("POST /api/v1/admin/storage/test-connection", authMiddleware.HeaderAuthGuard(adminOnly(http.HandlerFunc(storageHandler.TestConnection))))
mux.Handle("POST /api/v1/system/cloudflare/purge", authMiddleware.HeaderAuthGuard(adminOnly(http.HandlerFunc(adminHandlers.PurgeCache))))

View file

@ -26,9 +26,9 @@ func (s *JobService) CreateJob(req dto.CreateJobRequest, createdBy string) (*mod
INSERT INTO jobs (
company_id, created_by, title, description, salary_min, salary_max, salary_type, currency,
employment_type, working_hours, location, region_id, city_id,
requirements, benefits, questions, visa_support, language_level, status, created_at, updated_at, salary_negotiable
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22)
RETURNING id, created_at, updated_at
requirements, benefits, questions, visa_support, language_level, status, date_posted, created_at, updated_at, salary_negotiable
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23)
RETURNING id, date_posted, created_at, updated_at
`
job := &models.Job{
@ -52,6 +52,7 @@ func (s *JobService) CreateJob(req dto.CreateJobRequest, createdBy string) (*mod
VisaSupport: req.VisaSupport,
LanguageLevel: req.LanguageLevel,
Status: req.Status,
DatePosted: ptrTime(time.Now()),
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
@ -63,8 +64,8 @@ func (s *JobService) CreateJob(req dto.CreateJobRequest, createdBy string) (*mod
query,
job.CompanyID, job.CreatedBy, job.Title, job.Description, job.SalaryMin, job.SalaryMax, job.SalaryType, job.Currency,
job.EmploymentType, job.WorkingHours, job.Location, job.RegionID, job.CityID,
job.Requirements, job.Benefits, job.Questions, job.VisaSupport, job.LanguageLevel, job.Status, job.CreatedAt, job.UpdatedAt, job.SalaryNegotiable,
).Scan(&job.ID, &job.CreatedAt, &job.UpdatedAt)
job.Requirements, job.Benefits, job.Questions, job.VisaSupport, job.LanguageLevel, job.Status, job.DatePosted, job.CreatedAt, job.UpdatedAt, job.SalaryNegotiable,
).Scan(&job.ID, &job.DatePosted, &job.CreatedAt, &job.UpdatedAt)
if err != nil {
fmt.Printf("[JOB_SERVICE ERROR] INSERT query failed: %v\n", err)
@ -80,7 +81,7 @@ func (s *JobService) GetJobs(filter dto.JobFilterQuery) ([]models.JobWithCompany
baseQuery := `
SELECT
j.id, j.company_id, j.title, j.description, j.salary_min, j.salary_max, j.salary_type,
j.employment_type, j.work_mode, j.working_hours, j.location, j.status, j.salary_negotiable, j.is_featured, j.created_at, j.updated_at,
j.employment_type, j.work_mode, j.working_hours, j.location, j.status, j.salary_negotiable, j.is_featured, COALESCE(j.date_posted, j.created_at) AS date_posted, j.created_at, j.updated_at,
CASE
WHEN c.type = 'CANDIDATE_WORKSPACE' OR c.name LIKE 'Candidate - %' THEN ''
ELSE COALESCE(c.name, '')
@ -240,19 +241,19 @@ func (s *JobService) GetJobs(filter dto.JobFilterQuery) ([]models.JobWithCompany
}
if hours > 0 {
cutoffTime := time.Now().Add(-time.Duration(hours) * time.Hour)
baseQuery += fmt.Sprintf(" AND j.created_at >= $%d", argId)
countQuery += fmt.Sprintf(" AND j.created_at >= $%d", argId)
baseQuery += fmt.Sprintf(" AND COALESCE(j.date_posted, j.created_at) >= $%d", argId)
countQuery += fmt.Sprintf(" AND COALESCE(j.date_posted, j.created_at) >= $%d", argId)
args = append(args, cutoffTime)
argId++
}
}
// Sorting
sortClause := " ORDER BY j.is_featured DESC, j.created_at DESC" // default
sortClause := " ORDER BY j.is_featured DESC, COALESCE(j.date_posted, j.created_at) DESC" // default
if filter.SortBy != nil {
switch *filter.SortBy {
case "recent", "date":
sortClause = " ORDER BY j.is_featured DESC, j.created_at DESC"
sortClause = " ORDER BY j.is_featured DESC, COALESCE(j.date_posted, j.created_at) DESC"
case "salary", "salary_asc":
sortClause = " ORDER BY j.salary_min ASC NULLS LAST"
case "salary_desc":
@ -298,7 +299,7 @@ func (s *JobService) GetJobs(filter dto.JobFilterQuery) ([]models.JobWithCompany
var j models.JobWithCompany
if err := rows.Scan(
&j.ID, &j.CompanyID, &j.Title, &j.Description, &j.SalaryMin, &j.SalaryMax, &j.SalaryType,
&j.EmploymentType, &j.WorkMode, &j.WorkingHours, &j.Location, &j.Status, &j.SalaryNegotiable, &j.IsFeatured, &j.CreatedAt, &j.UpdatedAt,
&j.EmploymentType, &j.WorkMode, &j.WorkingHours, &j.Location, &j.Status, &j.SalaryNegotiable, &j.IsFeatured, &j.DatePosted, &j.CreatedAt, &j.UpdatedAt,
&j.CompanyName, &j.CompanyLogoURL, &j.RegionName, &j.CityName,
&j.ViewCount, &j.FeaturedUntil, &j.ApplicationsCount,
); err != nil {
@ -321,14 +322,14 @@ func (s *JobService) GetJobByID(id string) (*models.Job, error) {
query := `
SELECT id, company_id, title, description, salary_min, salary_max, salary_type,
employment_type, working_hours, location, region_id, city_id,
requirements, benefits, visa_support, language_level, status, is_featured, featured_until, view_count, created_at, updated_at,
requirements, benefits, visa_support, language_level, status, is_featured, featured_until, view_count, date_posted, created_at, updated_at,
salary_negotiable, currency, work_mode
FROM jobs WHERE id = $1
`
err := s.DB.QueryRow(query, id).Scan(
&j.ID, &j.CompanyID, &j.Title, &j.Description, &j.SalaryMin, &j.SalaryMax, &j.SalaryType,
&j.EmploymentType, &j.WorkingHours, &j.Location, &j.RegionID, &j.CityID,
&j.Requirements, &j.Benefits, &j.VisaSupport, &j.LanguageLevel, &j.Status, &j.IsFeatured, &j.FeaturedUntil, &j.ViewCount, &j.CreatedAt, &j.UpdatedAt,
&j.Requirements, &j.Benefits, &j.VisaSupport, &j.LanguageLevel, &j.Status, &j.IsFeatured, &j.FeaturedUntil, &j.ViewCount, &j.DatePosted, &j.CreatedAt, &j.UpdatedAt,
&j.SalaryNegotiable, &j.Currency, &j.WorkMode,
)
if err != nil {
@ -470,3 +471,7 @@ func (s *JobService) DeleteJob(id string) error {
_, err := s.DB.Exec("DELETE FROM jobs WHERE id = $1", id)
return err
}
func ptrTime(t time.Time) *time.Time {
return &t
}

View file

@ -85,6 +85,16 @@ func (s *StorageService) getConfig(ctx context.Context) (UploadConfig, error) {
}
func (s *StorageService) getClient(ctx context.Context) (*s3.PresignClient, string, error) {
client, bucket, err := s.getS3Client(ctx)
if err != nil {
return nil, "", err
}
psClient := s3.NewPresignClient(client)
return psClient, bucket, nil
}
func (s *StorageService) getS3Client(ctx context.Context) (*s3.Client, string, error) {
uCfg, err := s.getConfig(ctx)
if err != nil {
return nil, "", err
@ -104,13 +114,20 @@ func (s *StorageService) getClient(ctx context.Context) (*s3.PresignClient, stri
o.BaseEndpoint = aws.String(uCfg.Endpoint)
o.UsePathStyle = true // Often needed for R2/MinIO
})
return client, uCfg.Bucket, nil
}
psClient := s3.NewPresignClient(client)
return psClient, uCfg.Bucket, nil
func (s *StorageService) sanitizeObjectKey(key string) string {
return strings.TrimLeft(strings.TrimSpace(key), "/")
}
// GetPresignedUploadURL generates a URL for PUT requests
func (s *StorageService) GetPresignedUploadURL(ctx context.Context, key string, contentType string) (string, error) {
key = s.sanitizeObjectKey(key)
if key == "" {
return "", fmt.Errorf("key is required")
}
psClient, bucket, err := s.getClient(ctx)
if err != nil {
return "", err
@ -131,6 +148,54 @@ func (s *StorageService) GetPresignedUploadURL(ctx context.Context, key string,
return req.URL, nil
}
// GetPresignedDownloadURL generates a URL for GET requests.
func (s *StorageService) GetPresignedDownloadURL(ctx context.Context, key string) (string, error) {
key = s.sanitizeObjectKey(key)
if key == "" {
return "", fmt.Errorf("key is required")
}
psClient, bucket, err := s.getClient(ctx)
if err != nil {
return "", err
}
req, err := psClient.PresignGetObject(ctx, &s3.GetObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
}, func(o *s3.PresignOptions) {
o.Expires = 60 * time.Minute
})
if err != nil {
return "", fmt.Errorf("failed to presign download: %w", err)
}
return req.URL, nil
}
// DeleteObject removes an object from storage.
func (s *StorageService) DeleteObject(ctx context.Context, key string) error {
key = s.sanitizeObjectKey(key)
if key == "" {
return fmt.Errorf("key is required")
}
client, bucket, err := s.getS3Client(ctx)
if err != nil {
return err
}
_, err = client.DeleteObject(ctx, &s3.DeleteObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
})
if err != nil {
return fmt.Errorf("failed to delete object: %w", err)
}
return nil
}
// TestConnection checks if the creds are valid and bucket is accessible
func (s *StorageService) TestConnection(ctx context.Context) error {
psClient, bucket, err := s.getClient(ctx)

View file

@ -0,0 +1,16 @@
-- Migration: Add date_posted to jobs
-- Description: Supports explicit posting date field consumed by frontend filters/sorting.
ALTER TABLE jobs
ADD COLUMN IF NOT EXISTS date_posted TIMESTAMP WITH TIME ZONE;
UPDATE jobs
SET date_posted = created_at
WHERE date_posted IS NULL;
ALTER TABLE jobs
ALTER COLUMN date_posted SET DEFAULT NOW();
CREATE INDEX IF NOT EXISTS idx_jobs_date_posted ON jobs(date_posted DESC);
COMMENT ON COLUMN jobs.date_posted IS 'Public posting timestamp used by listing/filtering UX';