Replace sleep with actual URL fetching. Worker scrapes HTML title from URLs, respects robots.txt, and includes proper User-Agent headers. Scraped titles stored in SQLite and sent via webhook callback.
53 lines
1.0 KiB
Go
53 lines
1.0 KiB
Go
package main
|
|
|
|
import (
|
|
"alpenqueue/pkg/db"
|
|
"alpenqueue/pkg/worker"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"log"
|
|
"net/http"
|
|
)
|
|
|
|
func main() {
|
|
database, err := db.Init("./alpenqueue.db")
|
|
if err != nil {
|
|
log.Fatal(err)
|
|
}
|
|
defer database.Close()
|
|
|
|
worker.Start(database)
|
|
|
|
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
|
w.Write([]byte("AlpenQueue running!"))
|
|
})
|
|
|
|
http.HandleFunc("/jobs", func(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
|
|
var req struct {
|
|
WebhookURL string `json:"webhook_url"`
|
|
URL string `json:"url"`
|
|
}
|
|
|
|
body, _ := io.ReadAll(r.Body)
|
|
json.Unmarshal(body, &req)
|
|
|
|
id, err := db.CreateJob(database, req.WebhookURL, req.URL)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.WriteHeader(http.StatusCreated)
|
|
fmt.Fprintf(w, "Job %d created\n", id)
|
|
})
|
|
|
|
log.Println("Server starting on :8080")
|
|
http.ListenAndServe(":8080", nil)
|
|
}
|