av1.go 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. package track
  2. import (
  3. "time"
  4. "github.com/bluenviron/gortsplib/v4/pkg/format/rtpav1"
  5. "github.com/bluenviron/mediacommon/pkg/codecs/av1"
  6. "go.uber.org/zap"
  7. "m7s.live/engine/v4/codec"
  8. . "m7s.live/engine/v4/common"
  9. "m7s.live/engine/v4/log"
  10. "m7s.live/engine/v4/util"
  11. )
  12. var _ SpesificTrack = (*AV1)(nil)
  13. type AV1 struct {
  14. Video
  15. decoder rtpav1.Decoder
  16. encoder rtpav1.Encoder
  17. seqHeader av1.SequenceHeader
  18. seenFrameHeader bool
  19. refFrameType map[byte]byte
  20. }
  21. func NewAV1(puber IPuber, stuff ...any) (vt *AV1) {
  22. vt = &AV1{}
  23. vt.Video.CodecID = codec.CodecID_AV1
  24. vt.SetStuff("av1", byte(96), uint32(90000), vt, stuff, puber)
  25. if vt.BytesPool == nil {
  26. vt.BytesPool = make(util.BytesPool, 17)
  27. }
  28. vt.nalulenSize = 0
  29. vt.dtsEst = util.NewDTSEstimator()
  30. vt.decoder.Init()
  31. vt.encoder.Init()
  32. vt.encoder.PayloadType = vt.PayloadType
  33. vt.ParamaterSets = [][]byte{nil, {0, 0, 0}}
  34. return
  35. }
  36. func (vt *AV1) WriteSequenceHead(head []byte) (err error) {
  37. vt.Video.WriteSequenceHead(head)
  38. var info codec.AV1CodecConfigurationRecord
  39. info.Unmarshal(head[5:])
  40. vt.seqHeader.Unmarshal(info.ConfigOBUs)
  41. vt.ParamaterSets = [][]byte{info.ConfigOBUs, {info.SeqLevelIdx0, info.SeqProfile, info.SeqTier0}}
  42. return
  43. }
  44. func (vt *AV1) WriteRTPFrame(rtpItem *util.ListItem[RTPFrame]) {
  45. defer func() {
  46. err := recover()
  47. if err != nil {
  48. vt.Error("WriteRTPFrame panic", zap.Any("err", err))
  49. vt.Publisher.Stop(zap.Any("err", err))
  50. }
  51. }()
  52. if vt.lastSeq != vt.lastSeq2+1 && vt.lastSeq2 != 0 {
  53. vt.lostFlag = true
  54. vt.Warn("lost rtp packet", zap.Uint16("lastSeq", vt.lastSeq), zap.Uint16("lastSeq2", vt.lastSeq2))
  55. }
  56. frame := &rtpItem.Value
  57. rv := vt.Value
  58. rv.RTP.Push(rtpItem)
  59. obus, err := vt.decoder.Decode(frame.Packet)
  60. for _, obu := range obus {
  61. var obuHeader av1.OBUHeader
  62. obuHeader.Unmarshal(obu)
  63. switch obuHeader.Type {
  64. case av1.OBUTypeSequenceHeader:
  65. rtmpHead := []byte{0b1001_0000 | byte(codec.PacketTypeMPEG2TSSequenceStart), 0, 0, 0, 0}
  66. util.BigEndian.PutUint32(rtmpHead[1:], codec.FourCC_AV1_32)
  67. // TODO: 生成 head
  68. rtmpHead = append(rtmpHead, obu...)
  69. vt.Video.WriteSequenceHead(rtmpHead)
  70. vt.ParamaterSets[0] = obu
  71. default:
  72. rv.AUList.Push(vt.BytesPool.GetShell(obu))
  73. }
  74. }
  75. if err == nil {
  76. vt.generateTimestamp(frame.Timestamp)
  77. vt.Flush()
  78. }
  79. }
  80. func (vt *AV1) writeAVCCFrame(ts uint32, r *util.BLLReader, frame *util.BLL) (err error) {
  81. vt.Value.PTS = time.Duration(ts) * 90
  82. vt.Value.DTS = time.Duration(ts) * 90
  83. var obuHeader av1.OBUHeader
  84. for r.CanRead() {
  85. offset := r.GetOffset()
  86. b, _ := r.ReadByte()
  87. obuHeader.Unmarshal([]byte{b})
  88. if log.Trace {
  89. vt.Trace("obu", zap.Any("type", obuHeader.Type), zap.Bool("iframe", vt.Value.IFrame))
  90. }
  91. obuSize, _, _ := r.LEB128Unmarshal()
  92. end := r.GetOffset()
  93. size := end - offset + int(obuSize)
  94. r = frame.NewReader()
  95. r.Skip(offset)
  96. obu := r.ReadN(size)
  97. switch obuHeader.Type {
  98. case codec.AV1_OBU_SEQUENCE_HEADER:
  99. // vt.seqHeader.Unmarshal(util.ConcatBuffers(obu))
  100. // vt.seenFrameHeader = false
  101. // vt.AppendAuBytes(obu...)
  102. case codec.AV1_OBU_FRAME:
  103. // if !vt.seenFrameHeader {
  104. // if vt.seqHeader.ReducedStillPictureHeader {
  105. // vt.Value.IFrame = true
  106. // vt.seenFrameHeader = true
  107. // } else {
  108. // showframe := obu[0][0] >> 7
  109. // if showframe != 0 {
  110. // frame_to_show_map_idx := (obu[0][0] >> 4) & 0b0111
  111. // vt.Value.IFrame = vt.refFrameType[frame_to_show_map_idx] == 0
  112. // } else {
  113. // vt.Value.IFrame = (obu[0][0])&0b0110_0000 == 0
  114. // }
  115. // vt.seenFrameHeader = showframe == 0
  116. // }
  117. // }
  118. // vt.AppendAuBytes(obu...)
  119. case codec.AV1_OBU_TEMPORAL_DELIMITER:
  120. case codec.AV1_OBU_FRAME_HEADER:
  121. }
  122. vt.AppendAuBytes(obu...)
  123. }
  124. return
  125. }
  126. func (vt *AV1) CompleteAVCC(rv *AVFrame) {
  127. mem := vt.BytesPool.Get(5)
  128. b := mem.Value
  129. if rv.IFrame {
  130. b[0] = 0b1001_0000 | byte(codec.PacketTypeCodedFrames)
  131. } else {
  132. b[0] = 0b1010_0000 | byte(codec.PacketTypeCodedFrames)
  133. }
  134. util.BigEndian.PutUint32(b[1:], codec.FourCC_AV1_32)
  135. // println(rv.PTS < rv.DTS, "\t", rv.PTS, "\t", rv.DTS, "\t", rv.PTS-rv.DTS)
  136. // 写入CTS
  137. rv.AVCC.Push(mem)
  138. rv.AUList.Range(func(au *util.BLL) bool {
  139. au.Range(func(slice util.Buffer) bool {
  140. rv.AVCC.Push(vt.BytesPool.GetShell(slice))
  141. return true
  142. })
  143. return true
  144. })
  145. }
  146. // RTP格式补完
  147. func (vt *AV1) CompleteRTP(value *AVFrame) {
  148. obus := vt.Value.AUList.ToBuffers()
  149. // if vt.Value.IFrame {
  150. // obus = append(net.Buffers{vt.ParamaterSets[0]}, obus...)
  151. // }
  152. rtps, err := vt.encoder.Encode(obus)
  153. if err != nil {
  154. vt.Error("AV1 encoder encode error", zap.Error(err))
  155. return
  156. }
  157. for _, rtp := range rtps {
  158. vt.Value.RTP.PushValue(RTPFrame{Packet: rtp})
  159. }
  160. }