feat(vmess): add length mask (opt=4) (#298)

This commit is contained in:
mzz 2021-11-28 23:02:46 +08:00 committed by GitHub
parent 807aebc678
commit 6006ec13c7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 106 additions and 33 deletions

View File

@ -17,15 +17,16 @@ import (
type aeadWriter struct {
io.Writer
chunkSizeEncoder ChunkSizeEncoder
cipher.AEAD
nonce [32]byte
count uint16
}
// AEADWriter returns a aead writer.
func AEADWriter(w io.Writer, aead cipher.AEAD, iv []byte) io.Writer {
aw := &aeadWriter{Writer: w, AEAD: aead}
copy(aw.nonce[2:], iv[2:12])
func AEADWriter(w io.Writer, aead cipher.AEAD, iv []byte, chunkSizeEncoder ChunkSizeEncoder) io.Writer {
aw := &aeadWriter{Writer: w, AEAD: aead, chunkSizeEncoder: chunkSizeEncoder}
copy(aw.nonce[2:], iv[2:aead.NonceSize()])
return aw
}
@ -33,7 +34,7 @@ func (w *aeadWriter) Write(b []byte) (n int, err error) {
buf := pool.GetBuffer(chunkSize)
defer pool.PutBuffer(buf)
var lenBuf [lenSize]byte
lenBuf := make([]byte, w.chunkSizeEncoder.SizeBytes())
var writeLen, dataLen int
nonce := w.nonce[:w.NonceSize()]
@ -44,7 +45,7 @@ func (w *aeadWriter) Write(b []byte) (n int, err error) {
}
dataLen = writeLen - w.Overhead()
binary.BigEndian.PutUint16(lenBuf[:], uint16(writeLen))
w.chunkSizeEncoder.Encode(uint16(writeLen), lenBuf)
binary.BigEndian.PutUint16(nonce[:2], w.count)
w.Seal(buf[:0], nonce, b[n:n+dataLen], nil)
@ -63,6 +64,7 @@ func (w *aeadWriter) Write(b []byte) (n int, err error) {
type aeadReader struct {
io.Reader
chunkSizeDecoder ChunkSizeDecoder
cipher.AEAD
nonce [32]byte
count uint16
@ -71,32 +73,35 @@ type aeadReader struct {
}
// AEADReader returns a aead reader.
func AEADReader(r io.Reader, aead cipher.AEAD, iv []byte) io.Reader {
ar := &aeadReader{Reader: r, AEAD: aead}
copy(ar.nonce[2:], iv[2:12])
func AEADReader(r io.Reader, aead cipher.AEAD, iv []byte, chunkSizeDecoder ChunkSizeDecoder) io.Reader {
ar := &aeadReader{Reader: r, AEAD: aead, chunkSizeDecoder: chunkSizeDecoder}
copy(ar.nonce[2:], iv[2:aead.NonceSize()])
return ar
}
func (r *aeadReader) read(p []byte) (int, error) {
if _, err := io.ReadFull(r.Reader, p[:lenSize]); err != nil {
if _, err := io.ReadFull(r.Reader, p[:r.chunkSizeDecoder.SizeBytes()]); err != nil {
return 0, err
}
size := int(binary.BigEndian.Uint16(p[:lenSize]))
size, err := r.chunkSizeDecoder.Decode(p[:r.chunkSizeDecoder.SizeBytes()])
if err != nil {
return 0, err
}
p = p[:size]
if _, err := io.ReadFull(r.Reader, p); err != nil {
return 0, err
}
binary.BigEndian.PutUint16(r.nonce[:2], r.count)
_, err := r.Open(p[:0], r.nonce[:r.NonceSize()], p, nil)
_, err = r.Open(p[:0], r.nonce[:r.NonceSize()], p, nil)
r.count++
if err != nil {
return 0, err
}
return size - r.Overhead(), nil
return int(size) - r.Overhead(), nil
}
func (r *aeadReader) Read(p []byte) (int, error) {

View File

@ -1,24 +1,23 @@
package vmess
import (
"encoding/binary"
"io"
"net"
)
const (
lenSize = 2
chunkSize = 16 << 10
)
type chunkedWriter struct {
io.Writer
buf [lenSize]byte
chunkSizeEncoder ChunkSizeEncoder
buf []byte
}
// ChunkedWriter returns a chunked writer.
func ChunkedWriter(w io.Writer) io.Writer {
return &chunkedWriter{Writer: w}
func ChunkedWriter(w io.Writer, chunkSizeEncoder ChunkSizeEncoder) io.Writer {
return &chunkedWriter{Writer: w, chunkSizeEncoder: chunkSizeEncoder, buf: make([]byte, chunkSizeEncoder.SizeBytes())}
}
func (w *chunkedWriter) Write(p []byte) (n int, err error) {
@ -28,8 +27,7 @@ func (w *chunkedWriter) Write(p []byte) (n int, err error) {
if dataLen > chunkSize {
dataLen = chunkSize
}
binary.BigEndian.PutUint16(w.buf[:], uint16(dataLen))
w.chunkSizeEncoder.Encode(uint16(dataLen), w.buf)
if _, err = (&net.Buffers{w.buf[:], p[n : n+dataLen]}).WriteTo(w.Writer); err != nil {
break
}
@ -42,23 +40,28 @@ func (w *chunkedWriter) Write(p []byte) (n int, err error) {
type chunkedReader struct {
io.Reader
buf [lenSize]byte
left int
chunkSizeDecoder ChunkSizeDecoder
buf []byte
left int
}
// ChunkedReader returns a chunked reader.
func ChunkedReader(r io.Reader) io.Reader {
return &chunkedReader{Reader: r}
func ChunkedReader(r io.Reader, chunkSizeDecoder ChunkSizeDecoder) io.Reader {
return &chunkedReader{Reader: r, chunkSizeDecoder: chunkSizeDecoder}
}
func (r *chunkedReader) Read(p []byte) (int, error) {
if r.left == 0 {
// get length
_, err := io.ReadFull(r.Reader, r.buf[:lenSize])
_, err := io.ReadFull(r.Reader, r.buf[:r.chunkSizeDecoder.SizeBytes()])
if err != nil {
return 0, err
}
r.left = int(binary.BigEndian.Uint16(r.buf[:lenSize]))
n, err := r.chunkSizeDecoder.Decode(r.buf[:])
if err != nil {
return 0, err
}
r.left = int(n)
// if left == 0, then this is the end
if r.left == 0 {

View File

@ -0,0 +1,60 @@
package vmess
import (
"encoding/binary"
"golang.org/x/crypto/sha3"
)
// ChunkSizeEncoder is a utility class to encode size value into bytes.
type ChunkSizeEncoder interface {
SizeBytes() int32
Encode(uint16, []byte) []byte
}
// ChunkSizeDecoder is a utility class to decode size value from bytes.
type ChunkSizeDecoder interface {
SizeBytes() int32
Decode([]byte) (uint16, error)
}
type ShakeSizeParser struct {
shake sha3.ShakeHash
buffer [2]byte
}
func NewShakeSizeParser(nonce []byte) *ShakeSizeParser {
shake := sha3.NewShake128()
shake.Write(nonce)
return &ShakeSizeParser{
shake: shake,
}
}
func (*ShakeSizeParser) SizeBytes() int32 {
return 2
}
func (s *ShakeSizeParser) next() uint16 {
s.shake.Read(s.buffer[:])
return binary.BigEndian.Uint16(s.buffer[:])
}
func (s *ShakeSizeParser) Decode(b []byte) (uint16, error) {
mask := s.next()
size := binary.BigEndian.Uint16(b)
return mask ^ size, nil
}
func (s *ShakeSizeParser) Encode(size uint16, b []byte) []byte {
mask := s.next()
binary.BigEndian.PutUint16(b, mask^size)
return b[:2]
}
func (s *ShakeSizeParser) NextPaddingLen() uint16 {
return s.next() % 64
}
func (s *ShakeSizeParser) MaxPaddingLen() uint16 {
return 64
}

View File

@ -27,7 +27,7 @@ const (
OptBasicFormat byte = 0
OptChunkStream byte = 1
// OptReuseTCPConnection byte = 2
// OptMetadataObfuscate byte = 4
OptMetadataObfuscate byte = 4
)
// Security types
@ -72,6 +72,9 @@ type Conn struct {
respBodyIV [16]byte
respBodyKey [16]byte
writeChunkSizeParser ChunkSizeEncoder
readChunkSizeParser ChunkSizeDecoder
net.Conn
dataReader io.Reader
dataWriter io.Writer
@ -90,7 +93,7 @@ func NewClient(uuidStr, security string, alterID int, aead bool) (*Client, error
c.users = append(c.users, user.GenAlterIDUsers(alterID)...)
c.count = len(c.users)
c.opt = OptChunkStream
c.opt = OptChunkStream | OptMetadataObfuscate
c.aead = aead
security = strings.ToLower(security)
@ -150,6 +153,8 @@ func (c *Client) NewConn(rc net.Conn, target string, cmd CmdType) (*Conn, error)
return nil, err
}
}
conn.writeChunkSizeParser = NewShakeSizeParser(conn.reqBodyIV[:])
conn.readChunkSizeParser = NewShakeSizeParser(conn.respBodyIV[:])
// Request
err = conn.Request(cmd)
@ -292,12 +297,12 @@ func (c *Conn) Write(b []byte) (n int, err error) {
if c.opt&OptChunkStream == OptChunkStream {
switch c.security {
case SecurityNone:
c.dataWriter = ChunkedWriter(c.Conn)
c.dataWriter = ChunkedWriter(c.Conn, c.writeChunkSizeParser)
case SecurityAES128GCM:
block, _ := aes.NewCipher(c.reqBodyKey[:])
aead, _ := cipher.NewGCM(block)
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:])
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:], c.writeChunkSizeParser)
case SecurityChacha20Poly1305:
key := pool.GetBuffer(32)
@ -306,7 +311,7 @@ func (c *Conn) Write(b []byte) (n int, err error) {
t = md5.Sum(key[:16])
copy(key[16:], t[:])
aead, _ := chacha20poly1305.New(key)
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:])
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:], c.writeChunkSizeParser)
pool.PutBuffer(key)
}
}
@ -328,12 +333,12 @@ func (c *Conn) Read(b []byte) (n int, err error) {
if c.opt&OptChunkStream == OptChunkStream {
switch c.security {
case SecurityNone:
c.dataReader = ChunkedReader(c.Conn)
c.dataReader = ChunkedReader(c.Conn, c.readChunkSizeParser)
case SecurityAES128GCM:
block, _ := aes.NewCipher(c.respBodyKey[:])
aead, _ := cipher.NewGCM(block)
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:])
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:], c.readChunkSizeParser)
case SecurityChacha20Poly1305:
key := pool.GetBuffer(32)
@ -342,7 +347,7 @@ func (c *Conn) Read(b []byte) (n int, err error) {
t = md5.Sum(key[:16])
copy(key[16:], t[:])
aead, _ := chacha20poly1305.New(key)
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:])
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:], c.readChunkSizeParser)
pool.PutBuffer(key)
}
}