gropple/main.go

379 lines
8.1 KiB
Go
Raw Normal View History

2021-09-21 08:33:24 +09:30
package main
import (
"embed"
"encoding/json"
"fmt"
"html/template"
"io"
"log"
"net/http"
"os/exec"
"regexp"
"strings"
"sync"
"time"
"strconv"
"github.com/gorilla/mux"
2021-09-28 21:17:54 +09:30
"github.com/tardisx/gropple/config"
2021-09-26 21:13:33 +09:30
"github.com/tardisx/gropple/version"
2021-09-21 08:33:24 +09:30
)
type download struct {
Id int `json:"id"`
Url string `json:"url"`
Pid int `json:"pid"`
ExitCode int `json:"exit_code"`
State string `json:"state"`
Finished bool `json:"finished"`
2021-09-21 08:33:24 +09:30
Files []string `json:"files"`
Eta string `json:"eta"`
Percent float32 `json:"percent"`
Log []string `json:"log"`
}
var downloads []*download
2021-09-21 08:33:24 +09:30
var downloadId = 0
var versionInfo = version.Info{CurrentVersion: "v0.5.0"}
2021-09-21 08:33:24 +09:30
//go:embed web
var webFS embed.FS
var conf *config.Config
2021-09-21 08:33:24 +09:30
func main() {
2021-09-28 21:17:54 +09:30
conf = config.DefaultConfig()
2021-09-21 08:33:24 +09:30
r := mux.NewRouter()
r.HandleFunc("/", HomeHandler)
r.HandleFunc("/config", ConfigHandler)
2021-09-21 08:33:24 +09:30
r.HandleFunc("/fetch", FetchHandler)
r.HandleFunc("/rest/fetch/info", FetchInfoHandler)
r.HandleFunc("/rest/fetch/info/{id}", FetchInfoOneHandler)
r.HandleFunc("/rest/version", VersionHandler)
r.HandleFunc("/rest/config", ConfigRESTHandler)
2021-09-21 08:33:24 +09:30
http.Handle("/", r)
srv := &http.Server{
Handler: r,
2021-09-28 21:17:54 +09:30
Addr: fmt.Sprintf(":%d", conf.Server.Port),
2021-09-21 08:33:24 +09:30
// Good practice: enforce timeouts for servers you create!
2021-09-21 17:59:30 +09:30
WriteTimeout: 5 * time.Second,
ReadTimeout: 5 * time.Second,
2021-09-21 08:33:24 +09:30
}
2021-09-26 21:13:33 +09:30
// check for a new version every 4 hours
go func() {
for {
versionInfo.UpdateGitHubVersion()
time.Sleep(time.Hour * 4)
}
}()
log.Printf("starting gropple %s - https://github.com/tardisx/gropple", versionInfo.CurrentVersion)
2021-09-28 21:17:54 +09:30
log.Printf("go to %s for details on installing the bookmarklet and to check status", conf.Server.Address)
2021-09-21 08:33:24 +09:30
log.Fatal(srv.ListenAndServe())
2021-09-26 21:13:33 +09:30
}
2021-09-21 08:33:24 +09:30
2021-09-26 21:13:33 +09:30
func VersionHandler(w http.ResponseWriter, r *http.Request) {
if versionInfo.GithubVersionFetched {
b, _ := json.Marshal(versionInfo)
w.Write(b)
} else {
w.WriteHeader(400)
}
2021-09-21 08:33:24 +09:30
}
func HomeHandler(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
2021-09-28 21:17:54 +09:30
bookmarkletURL := fmt.Sprintf("javascript:(function(f,s,n,o){window.open(f+encodeURIComponent(s),n,o)}('%s/fetch?url=',window.location,'yourform','width=500,height=500'));", conf.Server.Address)
2021-09-21 08:33:24 +09:30
t, err := template.ParseFS(webFS, "web/layout.tmpl", "web/index.html")
2021-09-21 08:33:24 +09:30
if err != nil {
panic(err)
}
type Info struct {
Downloads []*download
2021-09-21 08:33:24 +09:30
BookmarkletURL template.URL
}
info := Info{
Downloads: downloads,
BookmarkletURL: template.URL(bookmarkletURL),
}
err = t.ExecuteTemplate(w, "layout", info)
2021-09-21 08:33:24 +09:30
if err != nil {
panic(err)
}
}
func ConfigHandler(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
t, err := template.ParseFS(webFS, "web/layout.tmpl", "web/config.html")
if err != nil {
panic(err)
}
err = t.ExecuteTemplate(w, "layout", nil)
if err != nil {
panic(err)
}
}
func ConfigRESTHandler(w http.ResponseWriter, r *http.Request) {
type errorResponse struct {
Error string `json:"error"`
}
if r.Method == "POST" {
log.Printf("Updating config")
b, err := io.ReadAll(r.Body)
if err != nil {
panic(err)
}
err = conf.UpdateFromJSON(b)
if err != nil {
errorRes := errorResponse{Error: err.Error()}
errorResB, _ := json.Marshal(errorRes)
w.WriteHeader(400)
w.Write(errorResB)
return
}
}
b, _ := json.Marshal(conf)
w.Write(b)
}
2021-09-26 12:33:31 +09:30
func FetchInfoOneHandler(w http.ResponseWriter, r *http.Request) {
2021-09-21 08:33:24 +09:30
vars := mux.Vars(r)
idString := vars["id"]
if idString != "" {
id, err := strconv.Atoi(idString)
if err != nil {
http.NotFound(w, r)
return
}
for _, dl := range downloads {
if dl.Id == id {
b, _ := json.Marshal(dl)
w.Write(b)
return
}
}
2021-09-21 08:33:24 +09:30
} else {
http.NotFound(w, r)
}
}
2021-09-26 12:33:31 +09:30
func FetchInfoHandler(w http.ResponseWriter, r *http.Request) {
b, _ := json.Marshal(downloads)
w.Write(b)
}
2021-09-21 08:33:24 +09:30
func FetchHandler(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query()
2021-09-26 12:48:42 +09:30
url, present := query["url"]
2021-09-21 08:33:24 +09:30
if !present {
2021-09-26 12:48:42 +09:30
w.WriteHeader(400)
fmt.Fprint(w, "No url supplied")
return
2021-09-21 08:33:24 +09:30
} else {
2021-09-26 12:48:42 +09:30
// check the URL for a sudden but inevitable betrayal
2021-09-28 21:17:54 +09:30
if strings.Contains(url[0], conf.Server.Address) {
2021-09-26 12:48:42 +09:30
w.WriteHeader(400)
fmt.Fprint(w, "you musn't gropple your gropple :-)")
return
}
2021-09-21 08:33:24 +09:30
// create the record
// XXX should be atomic!
downloadId++
newDownload := download{
Id: downloadId,
Url: url[0],
State: "starting",
Finished: false,
Eta: "?",
Percent: 0.0,
Log: make([]string, 0, 1000),
2021-09-21 08:33:24 +09:30
}
downloads = append(downloads, &newDownload)
2021-09-21 08:33:24 +09:30
// XXX atomic ^^
2021-09-21 08:33:24 +09:30
newDownload.Log = append(newDownload.Log, "start of log...")
go func() {
queue(&newDownload)
}()
2021-09-26 12:48:42 +09:30
t, err := template.ParseFS(webFS, "web/layout.tmpl", "web/popup.html")
2021-09-21 08:33:24 +09:30
if err != nil {
panic(err)
}
err = t.ExecuteTemplate(w, "layout", newDownload)
2021-09-21 08:33:24 +09:30
if err != nil {
panic(err)
}
}
}
func queue(dl *download) {
cmdSlice := []string{}
2021-09-28 21:17:54 +09:30
cmdSlice = append(cmdSlice, conf.DownloadProfiles[0].Args...)
cmdSlice = append(cmdSlice, dl.Url)
2021-09-21 08:33:24 +09:30
2021-09-28 21:17:54 +09:30
cmd := exec.Command(conf.DownloadProfiles[0].Command, cmdSlice...)
cmd.Dir = conf.Server.DownloadPath
2021-09-21 08:33:24 +09:30
stdout, err := cmd.StdoutPipe()
if err != nil {
dl.State = "failed"
dl.Finished = true
2021-09-21 08:33:24 +09:30
dl.Log = append(dl.Log, fmt.Sprintf("error setting up stdout pipe: %v", err))
return
}
stderr, err := cmd.StderrPipe()
if err != nil {
dl.State = "failed"
dl.Finished = true
2021-09-21 08:33:24 +09:30
dl.Log = append(dl.Log, fmt.Sprintf("error setting up stderr pipe: %v", err))
return
}
err = cmd.Start()
if err != nil {
dl.State = "failed"
dl.Finished = true
2021-09-21 08:33:24 +09:30
dl.Log = append(dl.Log, fmt.Sprintf("error starting youtube-dl: %v", err))
return
}
dl.Pid = cmd.Process.Pid
var wg sync.WaitGroup
wg.Add(2)
go func() {
defer wg.Done()
updateDownload(stdout, dl)
}()
go func() {
defer wg.Done()
updateDownload(stderr, dl)
}()
wg.Wait()
cmd.Wait()
2021-09-21 08:33:24 +09:30
dl.State = "complete"
dl.Finished = true
2021-09-21 08:33:24 +09:30
dl.ExitCode = cmd.ProcessState.ExitCode()
if dl.ExitCode != 0 {
dl.State = "failed"
}
2021-09-21 08:33:24 +09:30
}
func updateDownload(r io.Reader, dl *download) {
// XXX not sure if we might get a partial line?
buf := make([]byte, 1024)
for {
n, err := r.Read(buf)
if n > 0 {
s := string(buf[:n])
lines := strings.Split(s, "\n")
for _, l := range lines {
if l == "" {
continue
}
// append the raw log
dl.Log = append(dl.Log, l)
// look for the percent and eta and other metadata
updateMetadata(dl, l)
}
}
if err != nil {
break
}
}
}
func updateMetadata(dl *download, s string) {
// [download] 49.7% of ~15.72MiB at 5.83MiB/s ETA 00:07
etaRE := regexp.MustCompile(`download.+ETA +(\d\d:\d\d)`)
matches := etaRE.FindStringSubmatch(s)
if len(matches) == 2 {
dl.Eta = matches[1]
dl.State = "downloading"
}
percentRE := regexp.MustCompile(`download.+?([\d\.]+)%`)
matches = percentRE.FindStringSubmatch(s)
if len(matches) == 2 {
p, err := strconv.ParseFloat(matches[1], 32)
if err == nil {
dl.Percent = float32(p)
} else {
panic(err)
}
}
// This appears once per destination file
// [download] Destination: Filename with spaces and other punctuation here be careful!.mp4
filename := regexp.MustCompile(`download.+?Destination: (.+)$`)
matches = filename.FindStringSubmatch(s)
if len(matches) == 2 {
dl.Files = append(dl.Files, matches[1])
}
// This means a file has been "created" by merging others
// [ffmpeg] Merging formats into "Toto - Africa (Official HD Video)-FTQbiNvZqaY.mp4"
2021-09-26 12:33:03 +09:30
mergedFilename := regexp.MustCompile(`Merging formats into "(.+)"$`)
matches = mergedFilename.FindStringSubmatch(s)
if len(matches) == 2 {
dl.Files = append(dl.Files, matches[1])
}
// This means a file has been deleted
// Gross - this time it's unquoted and has trailing guff
// Deleting original file Toto - Africa (Official HD Video)-FTQbiNvZqaY.f137.mp4 (pass -k to keep)
// This is very fragile
deletedFile := regexp.MustCompile(`Deleting original file (.+) \(pass -k to keep\)$`)
matches = deletedFile.FindStringSubmatch(s)
if len(matches) == 2 {
// find the index
for i, f := range dl.Files {
if f == matches[1] {
dl.Files = append(dl.Files[:i], dl.Files[i+1:]...)
break
}
}
}
2021-09-21 08:33:24 +09:30
}