2016-10-20 13:15:38 +02:00
|
|
|
|
package main
|
|
|
|
|
|
|
|
|
|
import (
|
2016-10-22 21:13:13 +02:00
|
|
|
|
"context"
|
2016-10-20 13:15:38 +02:00
|
|
|
|
"encoding/json"
|
|
|
|
|
"errors"
|
|
|
|
|
"fmt"
|
|
|
|
|
"io"
|
|
|
|
|
"log"
|
2017-12-02 13:19:53 +01:00
|
|
|
|
"net"
|
2016-10-20 13:15:38 +02:00
|
|
|
|
"net/http"
|
2025-01-23 21:40:31 +01:00
|
|
|
|
"net/url"
|
2016-10-20 13:15:38 +02:00
|
|
|
|
"os"
|
|
|
|
|
"path"
|
|
|
|
|
"regexp"
|
2017-12-02 13:19:53 +01:00
|
|
|
|
"strconv"
|
2016-10-20 13:15:38 +02:00
|
|
|
|
"strings"
|
2017-12-02 13:19:53 +01:00
|
|
|
|
"syscall"
|
2016-10-20 13:15:38 +02:00
|
|
|
|
"time"
|
|
|
|
|
"unicode/utf8"
|
|
|
|
|
)
|
|
|
|
|
|
2018-10-01 00:36:49 +02:00
|
|
|
|
const (
|
2025-01-23 21:40:31 +01:00
|
|
|
|
targetURI = "http://as-hls-%s-live.akamaized.net/pool_%s/live/%s/" +
|
2024-02-11 09:06:16 +01:00
|
|
|
|
"%s/%s.isml/%s-audio%%3d%s.norewind.m3u8"
|
2024-02-11 10:31:43 +01:00
|
|
|
|
networksURI1 = "https://rms.api.bbc.co.uk/radio/networks.json"
|
|
|
|
|
networksURI2 = "https://rms.api.bbc.co.uk/v2/networks/%s"
|
|
|
|
|
metaURI = "https://rms.api.bbc.co.uk/v2/services/%s/segments/latest"
|
2018-10-01 00:36:49 +02:00
|
|
|
|
)
|
|
|
|
|
|
2025-01-23 21:40:31 +01:00
|
|
|
|
var client = &http.Client{Transport: &http.Transport{}}
|
|
|
|
|
|
|
|
|
|
func get(url string) (resp *http.Response, err error) {
|
|
|
|
|
req, err := http.NewRequest("GET", url, nil)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
// lstn.lv returned 403 for the default net.http User-Agent.
|
|
|
|
|
req.Header.Set("User-Agent", "bbc-on-ice/1")
|
|
|
|
|
return client.Do(req)
|
|
|
|
|
}
|
|
|
|
|
|
2024-02-11 10:31:43 +01:00
|
|
|
|
func getServiceTitle1(name string) (string, error) {
|
2025-01-23 21:40:31 +01:00
|
|
|
|
resp, err := get(networksURI1)
|
2024-02-11 10:11:47 +01:00
|
|
|
|
if resp != nil {
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
}
|
|
|
|
|
if err != nil {
|
|
|
|
|
return name, err
|
|
|
|
|
}
|
2025-01-23 21:40:31 +01:00
|
|
|
|
b, err := io.ReadAll(resp.Body)
|
2024-02-11 10:11:47 +01:00
|
|
|
|
|
|
|
|
|
var v struct {
|
|
|
|
|
Results []struct {
|
|
|
|
|
Services []struct {
|
|
|
|
|
ID string `json:"id"`
|
|
|
|
|
Title string `json:"title"`
|
|
|
|
|
} `json:"services"`
|
|
|
|
|
} `json:"results"`
|
|
|
|
|
}
|
|
|
|
|
err = json.Unmarshal(b, &v)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return name, errors.New("invalid metadata response")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, network := range v.Results {
|
|
|
|
|
for _, service := range network.Services {
|
|
|
|
|
if service.ID == name {
|
|
|
|
|
return service.Title, nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return name, errors.New("unknown service")
|
|
|
|
|
}
|
|
|
|
|
|
2024-02-11 10:31:43 +01:00
|
|
|
|
// getServiceTitle returns a human-friendly identifier for a BBC service ID.
|
|
|
|
|
func getServiceTitle(name string) (string, error) {
|
|
|
|
|
// This endpoint is incomplete,
|
|
|
|
|
// but it contains the kind of service titles we want.
|
|
|
|
|
title, err := getServiceTitle1(name)
|
|
|
|
|
if err == nil {
|
|
|
|
|
return title, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Network IDs tend to coincide with service IDs.
|
2025-01-23 21:40:31 +01:00
|
|
|
|
resp, err := get(fmt.Sprintf(networksURI2, name))
|
2024-02-11 10:31:43 +01:00
|
|
|
|
if resp != nil {
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
}
|
|
|
|
|
if err != nil {
|
|
|
|
|
return name, err
|
|
|
|
|
}
|
2025-01-23 21:40:31 +01:00
|
|
|
|
b, err := io.ReadAll(resp.Body)
|
2024-02-11 10:31:43 +01:00
|
|
|
|
|
|
|
|
|
var v struct {
|
|
|
|
|
LongTitle string `json:"long_title"`
|
|
|
|
|
}
|
|
|
|
|
err = json.Unmarshal(b, &v)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return name, errors.New("invalid metadata response")
|
|
|
|
|
}
|
|
|
|
|
if v.LongTitle == "" {
|
|
|
|
|
return name, errors.New("unknown service")
|
|
|
|
|
}
|
|
|
|
|
return v.LongTitle, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type meta struct {
|
|
|
|
|
title string // what's playing right now
|
|
|
|
|
timeout uint // timeout for the next poll in ms
|
|
|
|
|
}
|
|
|
|
|
|
2024-02-11 10:11:47 +01:00
|
|
|
|
var errNoSong = errors.New("no song is playing")
|
|
|
|
|
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// getMeta retrieves and decodes metadata info from an independent webservice.
|
2016-10-20 13:15:38 +02:00
|
|
|
|
func getMeta(name string) (*meta, error) {
|
2025-01-23 21:40:31 +01:00
|
|
|
|
resp, err := get(fmt.Sprintf(metaURI, name))
|
2016-10-25 00:21:26 +02:00
|
|
|
|
if resp != nil {
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
}
|
2016-10-20 13:15:38 +02:00
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
2025-01-23 21:40:31 +01:00
|
|
|
|
b, err := io.ReadAll(resp.Body)
|
2024-02-11 10:11:47 +01:00
|
|
|
|
if os.Getenv("DEBUG") != "" {
|
|
|
|
|
log.Println(string(b))
|
|
|
|
|
}
|
2016-10-20 13:15:38 +02:00
|
|
|
|
|
2024-02-11 09:06:16 +01:00
|
|
|
|
// TODO: update more completely for the new OpenAPI
|
|
|
|
|
// - `broadcasts/poll/bbc_radio_one` looks almost useful
|
|
|
|
|
// - https://rms.api.bbc.co.uk/v2/experience/inline/play/${name}
|
|
|
|
|
// seems to be what we want, even provides timer/polling values
|
2016-10-20 13:15:38 +02:00
|
|
|
|
var v struct {
|
2024-02-11 09:06:16 +01:00
|
|
|
|
Data []struct {
|
|
|
|
|
Titles struct {
|
|
|
|
|
Primary string `json:"primary"`
|
|
|
|
|
Secondary *string `json:"secondary"`
|
|
|
|
|
Tertiary *string `json:"tertiary"`
|
|
|
|
|
} `json:"titles"`
|
|
|
|
|
Offset struct {
|
|
|
|
|
NowPlaying bool `json:"now_playing"`
|
|
|
|
|
} `json:"offset"`
|
|
|
|
|
} `json:"data"`
|
2016-10-20 13:15:38 +02:00
|
|
|
|
}
|
2024-02-11 09:06:16 +01:00
|
|
|
|
err = json.Unmarshal(b, &v)
|
2016-10-20 13:15:38 +02:00
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, errors.New("invalid metadata response")
|
|
|
|
|
}
|
2024-02-11 09:06:16 +01:00
|
|
|
|
if len(v.Data) == 0 || !v.Data[0].Offset.NowPlaying {
|
2024-02-11 10:11:47 +01:00
|
|
|
|
return nil, errNoSong
|
2016-10-20 13:15:38 +02:00
|
|
|
|
}
|
2024-02-11 09:06:16 +01:00
|
|
|
|
|
|
|
|
|
titles := v.Data[0].Titles
|
2024-02-11 09:11:51 +01:00
|
|
|
|
parts := []string{titles.Primary}
|
2024-02-11 09:06:16 +01:00
|
|
|
|
if titles.Secondary != nil {
|
|
|
|
|
parts = append(parts, *titles.Secondary)
|
2018-10-01 00:36:49 +02:00
|
|
|
|
}
|
2024-02-11 09:11:51 +01:00
|
|
|
|
if titles.Tertiary != nil {
|
|
|
|
|
parts = append(parts, *titles.Tertiary)
|
|
|
|
|
}
|
2024-02-11 09:06:16 +01:00
|
|
|
|
return &meta{timeout: 5000, title: strings.Join(parts, " - ")}, nil
|
2016-10-20 13:15:38 +02:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// resolveM3U8 resolves an M3U8 playlist to the first link that seems to
|
|
|
|
|
// be playable, possibly recursing.
|
2016-10-20 13:15:38 +02:00
|
|
|
|
func resolveM3U8(target string) (out []string, err error) {
|
2025-01-23 21:40:31 +01:00
|
|
|
|
resp, err := get(target)
|
2016-10-20 13:15:38 +02:00
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
2025-01-23 21:40:31 +01:00
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
|
|
b, err := io.ReadAll(resp.Body)
|
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
|
|
|
return nil, fmt.Errorf("%s: %s", target, resp.Status)
|
|
|
|
|
}
|
2016-10-20 13:15:38 +02:00
|
|
|
|
if !utf8.Valid(b) {
|
|
|
|
|
return nil, errors.New("invalid UTF-8")
|
|
|
|
|
}
|
|
|
|
|
lines := strings.Split(string(b), "\n")
|
|
|
|
|
for _, line := range lines {
|
|
|
|
|
if strings.HasPrefix(line, "#") {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if !strings.Contains(line, "/") {
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// Seems to be a relative link, let's make it absolute.
|
2016-10-20 13:15:38 +02:00
|
|
|
|
dir, _ := path.Split(target)
|
|
|
|
|
line = dir + line
|
|
|
|
|
}
|
|
|
|
|
if strings.HasSuffix(line, "m3u8") {
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// The playlist seems to recurse, and so will we.
|
|
|
|
|
// XXX: This should be bounded, not just by the stack.
|
2016-10-20 13:15:38 +02:00
|
|
|
|
return resolveM3U8(line)
|
|
|
|
|
}
|
|
|
|
|
out = append(out, line)
|
|
|
|
|
}
|
|
|
|
|
return out, nil
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-23 21:40:31 +01:00
|
|
|
|
const resolveURI = "https://lstn.lv/bbcradio.m3u8?station=%s"
|
|
|
|
|
|
|
|
|
|
var poolRE = regexp.MustCompile(`/pool_([^/]+)/`)
|
|
|
|
|
|
|
|
|
|
// resolvePool figures out the randomized part of stream URIs.
|
|
|
|
|
func resolvePool(name string) (pool string, err error) {
|
|
|
|
|
target := fmt.Sprintf(resolveURI, url.QueryEscape(name))
|
|
|
|
|
resp, err := get(target)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return "", err
|
|
|
|
|
}
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
|
|
b, err := io.ReadAll(resp.Body)
|
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
|
|
|
return "", fmt.Errorf("%s: %s", target, resp.Status)
|
|
|
|
|
}
|
|
|
|
|
for _, line := range strings.Split(string(b), "\n") {
|
|
|
|
|
if strings.HasPrefix(line, "#") {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if m := poolRE.FindStringSubmatch(line); m == nil {
|
|
|
|
|
return "", fmt.Errorf("%s: unexpected URI", target)
|
|
|
|
|
} else {
|
|
|
|
|
return m[1], nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return "", fmt.Errorf("%s: no URI", target)
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// metaProc periodically polls the sub-URL given by name for titles and sends
|
|
|
|
|
// them out the given channel. Never returns prematurely.
|
2016-10-22 21:13:13 +02:00
|
|
|
|
func metaProc(ctx context.Context, name string, out chan<- string) {
|
2016-10-25 00:21:26 +02:00
|
|
|
|
defer close(out)
|
|
|
|
|
|
2018-11-03 00:54:50 +01:00
|
|
|
|
// "polling_timeout" seems to normally be 25 seconds, which is a lot,
|
|
|
|
|
// especially considering all the possible additional buffering.
|
|
|
|
|
const maxInterval = 5 * time.Second
|
|
|
|
|
|
2016-10-22 21:13:13 +02:00
|
|
|
|
var current, last string
|
|
|
|
|
var interval time.Duration
|
|
|
|
|
for {
|
|
|
|
|
meta, err := getMeta(name)
|
2024-02-11 10:11:47 +01:00
|
|
|
|
if err == errNoSong {
|
|
|
|
|
interval, current = maxInterval, ""
|
|
|
|
|
} else if err != nil {
|
|
|
|
|
interval, current = maxInterval, err.Error()
|
2016-10-22 21:13:13 +02:00
|
|
|
|
} else {
|
|
|
|
|
current = meta.title
|
2018-11-03 00:54:50 +01:00
|
|
|
|
interval = time.Duration(meta.timeout) * time.Millisecond
|
|
|
|
|
if interval > maxInterval {
|
|
|
|
|
interval = maxInterval
|
2018-10-01 00:36:49 +02:00
|
|
|
|
}
|
2016-10-22 21:13:13 +02:00
|
|
|
|
}
|
|
|
|
|
if current != last {
|
2016-10-25 00:21:26 +02:00
|
|
|
|
select {
|
|
|
|
|
case out <- current:
|
|
|
|
|
case <-ctx.Done():
|
|
|
|
|
return
|
|
|
|
|
}
|
2016-10-22 21:13:13 +02:00
|
|
|
|
last = current
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
select {
|
2018-11-03 00:54:50 +01:00
|
|
|
|
case <-time.After(interval):
|
2016-10-22 21:13:13 +02:00
|
|
|
|
case <-ctx.Done():
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// urlProc periodically checks the playlist for yet unseen URLs and sends them
|
|
|
|
|
// over the channel. Assumes that URLs are incremental for simplicity, although
|
|
|
|
|
// there doesn't seem to be any such gaurantee by the HLS protocol.
|
|
|
|
|
func urlProc(ctx context.Context, playlistURL string, out chan<- string) {
|
|
|
|
|
defer close(out)
|
|
|
|
|
|
|
|
|
|
highest := ""
|
|
|
|
|
for {
|
|
|
|
|
target, err := resolveM3U8(playlistURL)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
for _, url := range target {
|
|
|
|
|
if url <= highest {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
select {
|
|
|
|
|
case out <- url:
|
|
|
|
|
highest = url
|
|
|
|
|
case <-ctx.Done():
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-03 01:04:22 +01:00
|
|
|
|
// Media players will happily buffer the whole playlist at once,
|
|
|
|
|
// a small (less than target duration) additional pause is appropriate.
|
|
|
|
|
time.Sleep(3 * time.Second)
|
2018-10-01 00:36:49 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// https://tools.ietf.org/html/rfc8216
|
|
|
|
|
// http://www.gpac-licensing.com/2014/12/08/apple-hls-technical-depth/
|
|
|
|
|
func dataProc(ctx context.Context, playlistURL string, maxChunk int,
|
|
|
|
|
out chan<- []byte) {
|
|
|
|
|
defer close(out)
|
|
|
|
|
|
|
|
|
|
// The channel is buffered so that the urlProc can fetch in advance.
|
|
|
|
|
urls := make(chan string, 3)
|
|
|
|
|
go urlProc(ctx, playlistURL, urls)
|
|
|
|
|
|
|
|
|
|
for url := range urls {
|
2025-01-23 21:40:31 +01:00
|
|
|
|
resp, err := get(url)
|
2018-10-01 00:36:49 +02:00
|
|
|
|
if resp != nil {
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
}
|
|
|
|
|
if err != nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for {
|
|
|
|
|
chunk := make([]byte, maxChunk)
|
|
|
|
|
n, err := resp.Body.Read(chunk)
|
|
|
|
|
|
|
|
|
|
select {
|
|
|
|
|
case out <- chunk[:n]:
|
|
|
|
|
case <-ctx.Done():
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if err == io.EOF {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
if err != nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-22 21:13:13 +02:00
|
|
|
|
var pathRE = regexp.MustCompile(`^/(.*?)/(.*?)/(.*?)$`)
|
2016-10-20 13:15:38 +02:00
|
|
|
|
|
|
|
|
|
func proxy(w http.ResponseWriter, req *http.Request) {
|
2018-10-01 00:36:49 +02:00
|
|
|
|
const metaint = 1 << 15
|
2016-10-20 13:15:38 +02:00
|
|
|
|
m := pathRE.FindStringSubmatch(req.URL.Path)
|
|
|
|
|
if m == nil {
|
|
|
|
|
http.NotFound(w, req)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
hijacker, ok := w.(http.Hijacker)
|
|
|
|
|
if !ok {
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// We're not using TLS where HTTP/2 could have caused this.
|
2016-10-20 13:15:38 +02:00
|
|
|
|
panic("cannot hijack connection")
|
|
|
|
|
}
|
|
|
|
|
|
2024-02-11 09:06:16 +01:00
|
|
|
|
// [ww]/[uk], [48000/96000]/[128000/320000], bbc_radio_one/bbc_1xtra/...
|
2025-01-23 21:40:31 +01:00
|
|
|
|
region, quality, name, pool := m[1], m[2], m[3], "904"
|
|
|
|
|
if p, err := resolvePool(name); err != nil {
|
|
|
|
|
log.Printf("failed to resolve pool: %s\n", err)
|
|
|
|
|
} else {
|
|
|
|
|
pool = p
|
|
|
|
|
}
|
2018-10-01 00:36:49 +02:00
|
|
|
|
|
2024-02-11 09:06:16 +01:00
|
|
|
|
mediaPlaylistURL :=
|
2025-01-23 21:40:31 +01:00
|
|
|
|
fmt.Sprintf(targetURI, region, pool, region, name, name, name, quality)
|
2018-10-01 00:36:49 +02:00
|
|
|
|
|
|
|
|
|
// This validates the parameters as a side-effect.
|
2024-02-11 09:06:16 +01:00
|
|
|
|
media, err := resolveM3U8(mediaPlaylistURL)
|
|
|
|
|
if err == nil && len(media) == 0 {
|
2016-10-20 13:15:38 +02:00
|
|
|
|
err = errors.New("cannot resolve playlist")
|
|
|
|
|
}
|
|
|
|
|
if err != nil {
|
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-01 00:36:49 +02:00
|
|
|
|
wantMeta := req.Header.Get("Icy-MetaData") == "1"
|
2024-02-11 09:06:16 +01:00
|
|
|
|
resp, err := http.Head(media[0])
|
2016-10-20 13:15:38 +02:00
|
|
|
|
if err != nil {
|
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
conn, bufrw, err := hijacker.Hijack()
|
|
|
|
|
if err != nil {
|
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
defer conn.Close()
|
|
|
|
|
|
2024-02-11 10:11:47 +01:00
|
|
|
|
serviceTitle, _ := getServiceTitle(name)
|
2018-10-01 00:36:49 +02:00
|
|
|
|
|
2016-10-20 13:15:38 +02:00
|
|
|
|
fmt.Fprintf(bufrw, "ICY 200 OK\r\n")
|
2024-02-11 10:11:47 +01:00
|
|
|
|
fmt.Fprintf(bufrw, "icy-name:%s\r\n", serviceTitle)
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// BBC marks this as a video type, maybe just force audio/mpeg.
|
2016-10-20 13:15:38 +02:00
|
|
|
|
fmt.Fprintf(bufrw, "content-type:%s\r\n", resp.Header["Content-Type"][0])
|
|
|
|
|
fmt.Fprintf(bufrw, "icy-pub:%d\r\n", 0)
|
|
|
|
|
if wantMeta {
|
|
|
|
|
fmt.Fprintf(bufrw, "icy-metaint: %d\r\n", metaint)
|
|
|
|
|
}
|
|
|
|
|
fmt.Fprintf(bufrw, "\r\n")
|
|
|
|
|
|
|
|
|
|
metaChan := make(chan string)
|
2016-10-22 21:13:13 +02:00
|
|
|
|
go metaProc(req.Context(), name, metaChan)
|
2016-10-20 13:15:38 +02:00
|
|
|
|
|
|
|
|
|
chunkChan := make(chan []byte)
|
2024-02-11 09:06:16 +01:00
|
|
|
|
go dataProc(req.Context(), mediaPlaylistURL, metaint, chunkChan)
|
2016-10-20 13:15:38 +02:00
|
|
|
|
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// dataProc may return less data near the end of a subfile, so we give it
|
|
|
|
|
// a maximum count of bytes to return at once and do our own buffering.
|
2018-10-11 17:23:08 +02:00
|
|
|
|
var queuedMetaUpdate, queuedData []byte
|
|
|
|
|
writeMeta := func() error {
|
|
|
|
|
if !wantMeta {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var meta [1 + 16*255]byte
|
|
|
|
|
meta[0] = byte((copy(meta[1:], queuedMetaUpdate) + 15) / 16)
|
|
|
|
|
queuedMetaUpdate = nil
|
|
|
|
|
|
|
|
|
|
_, err := bufrw.Write(meta[:1+int(meta[0])*16])
|
|
|
|
|
return err
|
|
|
|
|
}
|
2016-10-20 13:15:38 +02:00
|
|
|
|
for {
|
|
|
|
|
select {
|
|
|
|
|
case title := <-metaChan:
|
2024-02-11 10:11:47 +01:00
|
|
|
|
if title == "" {
|
|
|
|
|
title = serviceTitle
|
|
|
|
|
}
|
2018-10-11 17:48:35 +02:00
|
|
|
|
queuedMetaUpdate = []byte(fmt.Sprintf("StreamTitle='%s'",
|
|
|
|
|
strings.Replace(title, "'", "’", -1)))
|
2016-10-25 00:21:26 +02:00
|
|
|
|
case chunk, ok := <-chunkChan:
|
|
|
|
|
if !ok {
|
2016-10-20 13:15:38 +02:00
|
|
|
|
return
|
|
|
|
|
}
|
2018-10-11 17:23:08 +02:00
|
|
|
|
missing := metaint - len(queuedData)
|
|
|
|
|
if len(chunk) < missing {
|
|
|
|
|
queuedData = append(queuedData, chunk...)
|
|
|
|
|
continue
|
2016-10-20 13:15:38 +02:00
|
|
|
|
}
|
2018-10-11 17:23:08 +02:00
|
|
|
|
queuedData = append(queuedData, chunk[:missing]...)
|
|
|
|
|
if _, err := bufrw.Write(queuedData); err != nil {
|
2016-10-20 13:15:38 +02:00
|
|
|
|
return
|
|
|
|
|
}
|
2018-10-11 17:23:08 +02:00
|
|
|
|
queuedData = chunk[missing:]
|
|
|
|
|
if writeMeta() != nil || bufrw.Flush() != nil {
|
2016-10-20 13:15:38 +02:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-02 13:19:53 +01:00
|
|
|
|
// https://www.freedesktop.org/software/systemd/man/sd_listen_fds.html
|
|
|
|
|
func socketActivationListener() net.Listener {
|
|
|
|
|
pid, err := strconv.Atoi(os.Getenv("LISTEN_PID"))
|
|
|
|
|
if err != nil || pid != os.Getpid() {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
nfds, err := strconv.Atoi(os.Getenv("LISTEN_FDS"))
|
|
|
|
|
if err != nil || nfds == 0 {
|
2018-10-01 00:36:49 +02:00
|
|
|
|
log.Println("LISTEN_FDS unworkable")
|
2017-12-02 13:19:53 +01:00
|
|
|
|
return nil
|
|
|
|
|
} else if nfds > 1 {
|
|
|
|
|
log.Fatalln("not supporting more than one listening socket")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const firstListenFd = 3
|
|
|
|
|
syscall.CloseOnExec(firstListenFd)
|
|
|
|
|
ln, err := net.FileListener(os.NewFile(firstListenFd, "socket activation"))
|
|
|
|
|
if err != nil {
|
|
|
|
|
log.Fatalln(err)
|
|
|
|
|
}
|
|
|
|
|
return ln
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// Had to copy this from Server.ListenAndServe.
|
2017-12-02 13:19:53 +01:00
|
|
|
|
type tcpKeepAliveListener struct{ *net.TCPListener }
|
|
|
|
|
|
|
|
|
|
func (ln tcpKeepAliveListener) Accept() (c net.Conn, err error) {
|
|
|
|
|
tc, err := ln.AcceptTCP()
|
|
|
|
|
if err != nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
_ = tc.SetKeepAlive(true)
|
|
|
|
|
_ = tc.SetKeepAlivePeriod(3 * time.Minute)
|
|
|
|
|
return tc, nil
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-20 13:15:38 +02:00
|
|
|
|
func main() {
|
2017-12-02 13:19:53 +01:00
|
|
|
|
var listener net.Listener
|
2018-10-01 00:36:49 +02:00
|
|
|
|
if ln := socketActivationListener(); ln != nil {
|
|
|
|
|
// Keepalives can be set in the systemd unit, see systemd.socket(5).
|
2017-12-02 13:19:53 +01:00
|
|
|
|
listener = ln
|
|
|
|
|
} else {
|
2018-10-01 00:36:49 +02:00
|
|
|
|
if len(os.Args) < 2 {
|
|
|
|
|
log.Fatalf("usage: %s LISTEN-ADDR\n", os.Args[0])
|
|
|
|
|
}
|
|
|
|
|
if ln, err := net.Listen("tcp", os.Args[1]); err != nil {
|
|
|
|
|
log.Fatalln(err)
|
|
|
|
|
} else {
|
|
|
|
|
listener = tcpKeepAliveListener{ln.(*net.TCPListener)}
|
|
|
|
|
}
|
2016-10-20 13:15:38 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
http.HandleFunc("/", proxy)
|
2018-10-01 00:36:49 +02:00
|
|
|
|
// We don't need to clean up properly since we store no data.
|
2017-12-02 13:19:53 +01:00
|
|
|
|
log.Fatalln(http.Serve(listener, nil))
|
2016-10-20 13:15:38 +02:00
|
|
|
|
}
|