mirror of
https://github.com/refraction-networking/uquic.git
synced 2025-04-04 12:47:36 +03:00
implement parsing and writing of MAX_STREAM_ID frames
This commit is contained in:
parent
32168928d1
commit
5afe35328a
4 changed files with 105 additions and 1 deletions
37
internal/wire/max_stream_id_frame.go
Normal file
37
internal/wire/max_stream_id_frame.go
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
package wire
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
|
||||||
|
"github.com/lucas-clemente/quic-go/internal/protocol"
|
||||||
|
"github.com/lucas-clemente/quic-go/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A MaxStreamIDFrame is a MAX_STREAM_ID frame
|
||||||
|
type MaxStreamIDFrame struct {
|
||||||
|
StreamID protocol.StreamID
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseMaxStreamIDFrame parses a MAX_STREAM_ID frame
|
||||||
|
func ParseMaxStreamIDFrame(r *bytes.Reader, _ protocol.VersionNumber) (*MaxStreamIDFrame, error) {
|
||||||
|
// read the Type byte
|
||||||
|
if _, err := r.ReadByte(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
streamID, err := utils.ReadVarInt(r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &MaxStreamIDFrame{StreamID: protocol.StreamID(streamID)}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *MaxStreamIDFrame) Write(b *bytes.Buffer, _ protocol.VersionNumber) error {
|
||||||
|
b.WriteByte(0x6)
|
||||||
|
utils.WriteVarInt(b, uint64(f.StreamID))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MinLength of a written frame
|
||||||
|
func (f *MaxStreamIDFrame) MinLength(protocol.VersionNumber) protocol.ByteCount {
|
||||||
|
return 1 + utils.VarIntLen(uint64(f.StreamID))
|
||||||
|
}
|
51
internal/wire/max_stream_id_frame_test.go
Normal file
51
internal/wire/max_stream_id_frame_test.go
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
package wire
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
|
||||||
|
"github.com/lucas-clemente/quic-go/internal/protocol"
|
||||||
|
"github.com/lucas-clemente/quic-go/internal/utils"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("MAX_STREAM_ID frame", func() {
|
||||||
|
Context("parsing", func() {
|
||||||
|
It("accepts sample frame", func() {
|
||||||
|
data := []byte{0x6}
|
||||||
|
data = append(data, encodeVarInt(0xdecafbad)...)
|
||||||
|
b := bytes.NewReader(data)
|
||||||
|
f, err := ParseMaxStreamIDFrame(b, protocol.VersionWhatever)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(f.StreamID).To(Equal(protocol.StreamID(0xdecafbad)))
|
||||||
|
Expect(b.Len()).To(BeZero())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("errors on EOFs", func() {
|
||||||
|
data := []byte{0x06}
|
||||||
|
data = append(data, encodeVarInt(0xdeadbeefcafe13)...)
|
||||||
|
_, err := ParseMaxStreamIDFrame(bytes.NewReader(data), protocol.VersionWhatever)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
for i := range data {
|
||||||
|
_, err := ParseMaxStreamIDFrame(bytes.NewReader(data[0:i]), protocol.VersionWhatever)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("writing", func() {
|
||||||
|
It("writes a sample frame", func() {
|
||||||
|
b := &bytes.Buffer{}
|
||||||
|
frame := MaxStreamIDFrame{StreamID: 0x12345678}
|
||||||
|
frame.Write(b, protocol.VersionWhatever)
|
||||||
|
expected := []byte{0x6}
|
||||||
|
expected = append(expected, encodeVarInt(0x12345678)...)
|
||||||
|
Expect(b.Bytes()).To(Equal(expected))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("has the correct min length", func() {
|
||||||
|
frame := MaxStreamIDFrame{StreamID: 0x1337}
|
||||||
|
Expect(frame.MinLength(protocol.VersionWhatever)).To(Equal(1 + utils.VarIntLen(0x1337)))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -106,7 +106,11 @@ func (u *packetUnpacker) parseIETFFrame(r *bytes.Reader, typeByte byte, hdr *wir
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = qerr.Error(qerr.InvalidWindowUpdateData, err.Error())
|
err = qerr.Error(qerr.InvalidWindowUpdateData, err.Error())
|
||||||
}
|
}
|
||||||
// TODO(#878): implement the MAX_STREAM_ID frame
|
case 0x6:
|
||||||
|
frame, err = wire.ParseMaxStreamIDFrame(r, u.version)
|
||||||
|
if err != nil {
|
||||||
|
err = qerr.Error(qerr.InvalidFrameData, err.Error())
|
||||||
|
}
|
||||||
case 0x7:
|
case 0x7:
|
||||||
frame, err = wire.ParsePingFrame(r, u.version)
|
frame, err = wire.ParsePingFrame(r, u.version)
|
||||||
case 0x8:
|
case 0x8:
|
||||||
|
|
|
@ -342,6 +342,17 @@ var _ = Describe("Packet unpacker", func() {
|
||||||
Expect(packet.frames).To(Equal([]wire.Frame{f}))
|
Expect(packet.frames).To(Equal([]wire.Frame{f}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
It("unpacks MAX_STREAM_ID frames", func() {
|
||||||
|
f := &wire.MaxStreamIDFrame{StreamID: 0x1337}
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
err := f.Write(buf, versionIETFFrames)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
setData(buf.Bytes())
|
||||||
|
packet, err := unpacker.Unpack(hdrBin, hdr, data)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(packet.frames).To(Equal([]wire.Frame{f}))
|
||||||
|
})
|
||||||
|
|
||||||
It("unpacks connection-level BLOCKED frames", func() {
|
It("unpacks connection-level BLOCKED frames", func() {
|
||||||
f := &wire.BlockedFrame{Offset: 0x1234}
|
f := &wire.BlockedFrame{Offset: 0x1234}
|
||||||
buf := &bytes.Buffer{}
|
buf := &bytes.Buffer{}
|
||||||
|
@ -406,6 +417,7 @@ var _ = Describe("Packet unpacker", func() {
|
||||||
0x02: qerr.InvalidConnectionCloseData,
|
0x02: qerr.InvalidConnectionCloseData,
|
||||||
0x04: qerr.InvalidWindowUpdateData,
|
0x04: qerr.InvalidWindowUpdateData,
|
||||||
0x05: qerr.InvalidWindowUpdateData,
|
0x05: qerr.InvalidWindowUpdateData,
|
||||||
|
0x06: qerr.InvalidFrameData,
|
||||||
0x08: qerr.InvalidBlockedData,
|
0x08: qerr.InvalidBlockedData,
|
||||||
0x09: qerr.InvalidBlockedData,
|
0x09: qerr.InvalidBlockedData,
|
||||||
0x0c: qerr.InvalidFrameData,
|
0x0c: qerr.InvalidFrameData,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue