init
This commit is contained in:
commit
fa47df6d89
22 changed files with 1724 additions and 0 deletions
33
ffmpeg/const.go
Normal file
33
ffmpeg/const.go
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
package ffmpeg
|
||||
|
||||
import "slices"
|
||||
|
||||
const SlicerTargetDurationSec = 4
|
||||
|
||||
var ValidNVENCPresets = []string{
|
||||
// ffmpeg -hide_banner -h encoder=h264_nvenc
|
||||
// ffmpeg -hide_banner -h encoder=hevc_nvenc
|
||||
"default",
|
||||
"slow",
|
||||
"medium",
|
||||
"fast",
|
||||
"hp",
|
||||
"hq",
|
||||
"bd",
|
||||
"ll",
|
||||
"llhq",
|
||||
"llhp",
|
||||
"lossless",
|
||||
"losslesshp",
|
||||
"p1",
|
||||
"p2",
|
||||
"p3",
|
||||
"p4",
|
||||
"p5",
|
||||
"p6",
|
||||
"p7",
|
||||
}
|
||||
|
||||
func IsNVENCPresetValid(preset string) bool {
|
||||
return slices.Contains(ValidNVENCPresets, preset)
|
||||
}
|
||||
120
ffmpeg/ffmpeg.go
Normal file
120
ffmpeg/ffmpeg.go
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
package ffmpeg
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strconv"
|
||||
"stream-poc2/model"
|
||||
)
|
||||
|
||||
// EncodeChunk doesn't have the nicest function signature...
|
||||
func EncodeChunk(profileSettings model.VideoProfileSettings, nativeHeight int, preset string, origSegmentPath string, writer io.Writer) error {
|
||||
ffmpegArgs := []string{
|
||||
"-nostdin", "-hide_banner", "-loglevel", "error",
|
||||
}
|
||||
|
||||
if profileSettings.Resolution.Height != nativeHeight {
|
||||
// apply resize filter
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
"-hwaccel", "cuda", "-hwaccel_output_format", "cuda",
|
||||
"-i", origSegmentPath,
|
||||
"-vf", fmt.Sprintf("scale_cuda=%d:%d", profileSettings.Resolution.Width, profileSettings.Resolution.Height),
|
||||
)
|
||||
} else {
|
||||
// just load the segment
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
"-i", origSegmentPath,
|
||||
)
|
||||
}
|
||||
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
"-copyts",
|
||||
"-c:v", profileSettings.FFMpegCodec(),
|
||||
)
|
||||
if preset != "" {
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
"-preset", preset,
|
||||
)
|
||||
}
|
||||
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
"-b:v", strconv.Itoa(profileSettings.AvgBitrate), "-maxrate", strconv.Itoa(profileSettings.MaxBitrate), "-bufsize", "1M",
|
||||
"-an", // <- no audio
|
||||
"-f", "mpegts", "-mpegts_copyts", "1",
|
||||
"-",
|
||||
)
|
||||
|
||||
log.Println("exec ffmpeg args: ", ffmpegArgs)
|
||||
|
||||
cmd := exec.Command("/usr/bin/ffmpeg", ffmpegArgs...)
|
||||
cmd.Stdout = writer
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
log.Println("Failed to run:", err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// MakeSegments has the ugliest function signature no the planet
|
||||
func MakeSegments(filepath, outdir string, audioStreams []model.AudioStream) (string, []string, error) {
|
||||
segmentsVideoCSV := path.Join(outdir, "segments_video.csv")
|
||||
segmentsAudioCSV := make([]string, len(audioStreams))
|
||||
|
||||
// do slicing
|
||||
ffmpegArgs := []string{
|
||||
"-nostdin", "-hide_banner", "-loglevel", "error",
|
||||
"-i", filepath,
|
||||
// video stream
|
||||
"-c:v", "copy", "-an",
|
||||
"-f", "ssegment", "-segment_format", "mpegts", "-segment_list", segmentsVideoCSV,
|
||||
"-segment_list_type", "csv", "-segment_time", strconv.Itoa(SlicerTargetDurationSec), path.Join(outdir, "video", "s%d.ts"),
|
||||
}
|
||||
|
||||
for i, audioStream := range audioStreams {
|
||||
csvName := path.Join(outdir, fmt.Sprintf("segments_audio_%d.csv", audioStream.Index))
|
||||
segmentsAudioCSV[i] = csvName
|
||||
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
// audio streams (encode to mp3)
|
||||
"-vn", "-map", fmt.Sprintf("0:a:%d", i),
|
||||
)
|
||||
|
||||
if audioStream.Codec != "mp3" {
|
||||
// mp3 always works
|
||||
// TODO: This does not update the stream info in the metadata !!!
|
||||
log.Println("Transcoding audio to mp3 for stream ", audioStream.Index)
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
"-c:a", "libmp3lame", "-q:a", "0",
|
||||
)
|
||||
} else {
|
||||
log.Println("Not transcoding audio as it is already mp3 for stream ", audioStream.Index)
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
"-c:a", "copy",
|
||||
)
|
||||
}
|
||||
|
||||
ffmpegArgs = append(ffmpegArgs,
|
||||
"-f", "ssegment", "-segment_format", "mpegts", "-segment_list", csvName,
|
||||
"-segment_list_type", "csv", "-segment_time", strconv.Itoa(SlicerTargetDurationSec), path.Join(outdir, "audio", strconv.Itoa(audioStream.Index), "s%d.ts"),
|
||||
)
|
||||
}
|
||||
|
||||
cmd := exec.Command("/usr/bin/ffmpeg", ffmpegArgs...)
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
log.Println("failed to segment:", err)
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
return segmentsVideoCSV, segmentsAudioCSV, nil
|
||||
}
|
||||
86
ffmpeg/ffprobe.go
Normal file
86
ffmpeg/ffprobe.go
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
package ffmpeg
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func FFProbe(fname string) (FFProbeOutput, error) {
|
||||
log.Println("probe", fname)
|
||||
cmd := exec.Command(
|
||||
"/usr/bin/ffprobe",
|
||||
"-loglevel", "error",
|
||||
fname,
|
||||
"-show_streams",
|
||||
"-of", "json",
|
||||
)
|
||||
cmd.Stderr = os.Stderr
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
log.Println("Failed to pipe", err)
|
||||
return FFProbeOutput{}, err
|
||||
}
|
||||
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
log.Println("Failed start", err)
|
||||
return FFProbeOutput{}, err
|
||||
}
|
||||
|
||||
var ffout FFProbeOutput
|
||||
err = json.NewDecoder(stdout).Decode(&ffout)
|
||||
if err != nil {
|
||||
log.Println("Failed to decode", err)
|
||||
return FFProbeOutput{}, err
|
||||
}
|
||||
|
||||
return ffout, nil
|
||||
}
|
||||
|
||||
func FigureOutHLSCodecsParam(stream FFProbeStream) (string, error) {
|
||||
// TODO: This is still very lame
|
||||
var avcProfileBits = map[string]int{
|
||||
"High": 0x64,
|
||||
"Main": 0x4D,
|
||||
"Baseline": 0x42,
|
||||
}
|
||||
|
||||
var hevcProfileBits = map[string]int{
|
||||
"Main": 1,
|
||||
"Main 10": 2,
|
||||
}
|
||||
|
||||
if stream.CodecType == "video" {
|
||||
if stream.CodecName == "h264" {
|
||||
//avc1.<profile_idc><constraint_set_flags><level_idc>
|
||||
codecTagBytes, err := hex.DecodeString(strings.TrimPrefix(stream.CodecTag, "0x"))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
"%s.%02X%02X%02X",
|
||||
stream.CodecTagString,
|
||||
avcProfileBits[stream.Profile],
|
||||
codecTagBytes[1],
|
||||
stream.Level,
|
||||
), nil
|
||||
}
|
||||
if stream.CodecName == "hevc" {
|
||||
// hvc1.<profile>.<compatibility>.<tier><level><constraints>
|
||||
return fmt.Sprintf(
|
||||
"%s.%d.L%d",
|
||||
stream.CodecTagString,
|
||||
hevcProfileBits[stream.Profile],
|
||||
stream.Level,
|
||||
), nil
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("unrecognized format")
|
||||
}
|
||||
76
ffmpeg/model.go
Normal file
76
ffmpeg/model.go
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
package ffmpeg
|
||||
|
||||
type FFProbeDispositions struct {
|
||||
Default int `json:"default"`
|
||||
Dub int `json:"dub"`
|
||||
Original int `json:"original"`
|
||||
Comment int `json:"comment"`
|
||||
Lyrics int `json:"lyrics"`
|
||||
Karaoke int `json:"karaoke"`
|
||||
Forced int `json:"forced"`
|
||||
HearingImpaired int `json:"hearing_impaired"`
|
||||
VisualImpaired int `json:"visual_impaired"`
|
||||
CleanEffects int `json:"clean_effects"`
|
||||
AttachedPic int `json:"attached_pic"`
|
||||
TimedThumbnails int `json:"timed_thumbnails"`
|
||||
NonDiegetic int `json:"non_diegetic"`
|
||||
Captions int `json:"captions"`
|
||||
Descriptions int `json:"descriptions"`
|
||||
Metadata int `json:"metadata"`
|
||||
Dependent int `json:"dependent"`
|
||||
StillImage int `json:"still_image"`
|
||||
Multilayer int `json:"multilayer"`
|
||||
}
|
||||
|
||||
type FFProbeStream struct {
|
||||
Index int `json:"index"`
|
||||
CodecName string `json:"codec_name"`
|
||||
CodecLongName string `json:"codec_long_name"`
|
||||
CodecType string `json:"codec_type"`
|
||||
CodecTagString string `json:"codec_tag_string"`
|
||||
CodecTag string `json:"codec_tag"`
|
||||
SampleFmt string `json:"sample_fmt,omitempty"`
|
||||
SampleRate string `json:"sample_rate,omitempty"`
|
||||
Channels int `json:"channels,omitempty"`
|
||||
ChannelLayout string `json:"channel_layout,omitempty"`
|
||||
BitsPerSample int `json:"bits_per_sample,omitempty"`
|
||||
InitialPadding int `json:"initial_padding,omitempty"`
|
||||
DmixMode string `json:"dmix_mode,omitempty"`
|
||||
LtrtCmixlev string `json:"ltrt_cmixlev,omitempty"`
|
||||
LtrtSurmixlev string `json:"ltrt_surmixlev,omitempty"`
|
||||
LoroCmixlev string `json:"loro_cmixlev,omitempty"`
|
||||
LoroSurmixlev string `json:"loro_surmixlev,omitempty"`
|
||||
RFrameRate string `json:"r_frame_rate"`
|
||||
AvgFrameRate string `json:"avg_frame_rate"`
|
||||
TimeBase string `json:"time_base"`
|
||||
StartPts int `json:"start_pts"`
|
||||
StartTime string `json:"start_time"`
|
||||
BitRate string `json:"bit_rate,omitempty"`
|
||||
Disposition FFProbeDispositions `json:"disposition"`
|
||||
Tags map[string]string `json:"tags"`
|
||||
DurationTs int `json:"duration_ts,omitempty"`
|
||||
Duration string `json:"duration,omitempty"`
|
||||
Profile string `json:"profile,omitempty"`
|
||||
Width int `json:"width,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
CodedWidth int `json:"coded_width,omitempty"`
|
||||
CodedHeight int `json:"coded_height,omitempty"`
|
||||
HasBFrames int `json:"has_b_frames,omitempty"`
|
||||
SampleAspectRatio string `json:"sample_aspect_ratio,omitempty"`
|
||||
DisplayAspectRatio string `json:"display_aspect_ratio,omitempty"`
|
||||
PixFmt string `json:"pix_fmt,omitempty"`
|
||||
Level int `json:"level,omitempty"`
|
||||
ColorRange string `json:"color_range,omitempty"`
|
||||
ColorSpace string `json:"color_space,omitempty"`
|
||||
ChromaLocation string `json:"chroma_location,omitempty"`
|
||||
FieldOrder string `json:"field_order,omitempty"`
|
||||
Refs int `json:"refs,omitempty"`
|
||||
IsAvc string `json:"is_avc,omitempty"`
|
||||
NalLengthSize string `json:"nal_length_size,omitempty"`
|
||||
BitsPerRawSample string `json:"bits_per_raw_sample,omitempty"`
|
||||
ExtradataSize int `json:"extradata_size,omitempty"`
|
||||
}
|
||||
|
||||
type FFProbeOutput struct {
|
||||
Streams []FFProbeStream `json:"streams"`
|
||||
}
|
||||
62
ffmpeg/slices.go
Normal file
62
ffmpeg/slices.go
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
package ffmpeg
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"errors"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
"stream-poc2/model"
|
||||
)
|
||||
|
||||
func ReadSegmentsCSV(fname string) ([]model.Slice, error) {
|
||||
log.Println("read csv:", fname)
|
||||
f, err := os.OpenFile(fname, os.O_RDONLY, 0)
|
||||
if err != nil {
|
||||
log.Println("failed to open CSV:", err)
|
||||
return nil, err
|
||||
}
|
||||
defer func(f *os.File) {
|
||||
err := f.Close()
|
||||
if err != nil {
|
||||
log.Println("Failed to close CSV:", err)
|
||||
}
|
||||
}(f)
|
||||
|
||||
r := csv.NewReader(f)
|
||||
var slices []model.Slice
|
||||
|
||||
for {
|
||||
var cols []string
|
||||
cols, err = r.Read()
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
break
|
||||
}
|
||||
log.Println("failed reading csv row", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var startTime float64
|
||||
var endTime float64
|
||||
startTime, err = strconv.ParseFloat(cols[1], 64)
|
||||
if err != nil {
|
||||
log.Println("Failed to parse float", err)
|
||||
return nil, err
|
||||
}
|
||||
endTime, err = strconv.ParseFloat(cols[2], 64)
|
||||
if err != nil {
|
||||
log.Println("Failed to parse float", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
slices = append(slices, model.Slice{
|
||||
Name: cols[0],
|
||||
StartTime: startTime,
|
||||
EndTime: endTime,
|
||||
})
|
||||
}
|
||||
|
||||
return slices, nil
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue