mirror of
https://github.com/nadoo/glider.git
synced 2025-02-23 17:35:40 +08:00
feat(vmess): add length mask (opt=4) (#298)
This commit is contained in:
parent
807aebc678
commit
6006ec13c7
@ -17,15 +17,16 @@ import (
|
|||||||
|
|
||||||
type aeadWriter struct {
|
type aeadWriter struct {
|
||||||
io.Writer
|
io.Writer
|
||||||
|
chunkSizeEncoder ChunkSizeEncoder
|
||||||
cipher.AEAD
|
cipher.AEAD
|
||||||
nonce [32]byte
|
nonce [32]byte
|
||||||
count uint16
|
count uint16
|
||||||
}
|
}
|
||||||
|
|
||||||
// AEADWriter returns a aead writer.
|
// AEADWriter returns a aead writer.
|
||||||
func AEADWriter(w io.Writer, aead cipher.AEAD, iv []byte) io.Writer {
|
func AEADWriter(w io.Writer, aead cipher.AEAD, iv []byte, chunkSizeEncoder ChunkSizeEncoder) io.Writer {
|
||||||
aw := &aeadWriter{Writer: w, AEAD: aead}
|
aw := &aeadWriter{Writer: w, AEAD: aead, chunkSizeEncoder: chunkSizeEncoder}
|
||||||
copy(aw.nonce[2:], iv[2:12])
|
copy(aw.nonce[2:], iv[2:aead.NonceSize()])
|
||||||
return aw
|
return aw
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,7 +34,7 @@ func (w *aeadWriter) Write(b []byte) (n int, err error) {
|
|||||||
buf := pool.GetBuffer(chunkSize)
|
buf := pool.GetBuffer(chunkSize)
|
||||||
defer pool.PutBuffer(buf)
|
defer pool.PutBuffer(buf)
|
||||||
|
|
||||||
var lenBuf [lenSize]byte
|
lenBuf := make([]byte, w.chunkSizeEncoder.SizeBytes())
|
||||||
var writeLen, dataLen int
|
var writeLen, dataLen int
|
||||||
|
|
||||||
nonce := w.nonce[:w.NonceSize()]
|
nonce := w.nonce[:w.NonceSize()]
|
||||||
@ -44,7 +45,7 @@ func (w *aeadWriter) Write(b []byte) (n int, err error) {
|
|||||||
}
|
}
|
||||||
dataLen = writeLen - w.Overhead()
|
dataLen = writeLen - w.Overhead()
|
||||||
|
|
||||||
binary.BigEndian.PutUint16(lenBuf[:], uint16(writeLen))
|
w.chunkSizeEncoder.Encode(uint16(writeLen), lenBuf)
|
||||||
binary.BigEndian.PutUint16(nonce[:2], w.count)
|
binary.BigEndian.PutUint16(nonce[:2], w.count)
|
||||||
|
|
||||||
w.Seal(buf[:0], nonce, b[n:n+dataLen], nil)
|
w.Seal(buf[:0], nonce, b[n:n+dataLen], nil)
|
||||||
@ -63,6 +64,7 @@ func (w *aeadWriter) Write(b []byte) (n int, err error) {
|
|||||||
|
|
||||||
type aeadReader struct {
|
type aeadReader struct {
|
||||||
io.Reader
|
io.Reader
|
||||||
|
chunkSizeDecoder ChunkSizeDecoder
|
||||||
cipher.AEAD
|
cipher.AEAD
|
||||||
nonce [32]byte
|
nonce [32]byte
|
||||||
count uint16
|
count uint16
|
||||||
@ -71,32 +73,35 @@ type aeadReader struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// AEADReader returns a aead reader.
|
// AEADReader returns a aead reader.
|
||||||
func AEADReader(r io.Reader, aead cipher.AEAD, iv []byte) io.Reader {
|
func AEADReader(r io.Reader, aead cipher.AEAD, iv []byte, chunkSizeDecoder ChunkSizeDecoder) io.Reader {
|
||||||
ar := &aeadReader{Reader: r, AEAD: aead}
|
ar := &aeadReader{Reader: r, AEAD: aead, chunkSizeDecoder: chunkSizeDecoder}
|
||||||
copy(ar.nonce[2:], iv[2:12])
|
copy(ar.nonce[2:], iv[2:aead.NonceSize()])
|
||||||
return ar
|
return ar
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *aeadReader) read(p []byte) (int, error) {
|
func (r *aeadReader) read(p []byte) (int, error) {
|
||||||
if _, err := io.ReadFull(r.Reader, p[:lenSize]); err != nil {
|
if _, err := io.ReadFull(r.Reader, p[:r.chunkSizeDecoder.SizeBytes()]); err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
size := int(binary.BigEndian.Uint16(p[:lenSize]))
|
size, err := r.chunkSizeDecoder.Decode(p[:r.chunkSizeDecoder.SizeBytes()])
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
p = p[:size]
|
p = p[:size]
|
||||||
if _, err := io.ReadFull(r.Reader, p); err != nil {
|
if _, err := io.ReadFull(r.Reader, p); err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
binary.BigEndian.PutUint16(r.nonce[:2], r.count)
|
binary.BigEndian.PutUint16(r.nonce[:2], r.count)
|
||||||
_, err := r.Open(p[:0], r.nonce[:r.NonceSize()], p, nil)
|
_, err = r.Open(p[:0], r.nonce[:r.NonceSize()], p, nil)
|
||||||
r.count++
|
r.count++
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return size - r.Overhead(), nil
|
return int(size) - r.Overhead(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *aeadReader) Read(p []byte) (int, error) {
|
func (r *aeadReader) Read(p []byte) (int, error) {
|
||||||
|
@ -1,24 +1,23 @@
|
|||||||
package vmess
|
package vmess
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
|
||||||
"io"
|
"io"
|
||||||
"net"
|
"net"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
lenSize = 2
|
|
||||||
chunkSize = 16 << 10
|
chunkSize = 16 << 10
|
||||||
)
|
)
|
||||||
|
|
||||||
type chunkedWriter struct {
|
type chunkedWriter struct {
|
||||||
io.Writer
|
io.Writer
|
||||||
buf [lenSize]byte
|
chunkSizeEncoder ChunkSizeEncoder
|
||||||
|
buf []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChunkedWriter returns a chunked writer.
|
// ChunkedWriter returns a chunked writer.
|
||||||
func ChunkedWriter(w io.Writer) io.Writer {
|
func ChunkedWriter(w io.Writer, chunkSizeEncoder ChunkSizeEncoder) io.Writer {
|
||||||
return &chunkedWriter{Writer: w}
|
return &chunkedWriter{Writer: w, chunkSizeEncoder: chunkSizeEncoder, buf: make([]byte, chunkSizeEncoder.SizeBytes())}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *chunkedWriter) Write(p []byte) (n int, err error) {
|
func (w *chunkedWriter) Write(p []byte) (n int, err error) {
|
||||||
@ -28,8 +27,7 @@ func (w *chunkedWriter) Write(p []byte) (n int, err error) {
|
|||||||
if dataLen > chunkSize {
|
if dataLen > chunkSize {
|
||||||
dataLen = chunkSize
|
dataLen = chunkSize
|
||||||
}
|
}
|
||||||
|
w.chunkSizeEncoder.Encode(uint16(dataLen), w.buf)
|
||||||
binary.BigEndian.PutUint16(w.buf[:], uint16(dataLen))
|
|
||||||
if _, err = (&net.Buffers{w.buf[:], p[n : n+dataLen]}).WriteTo(w.Writer); err != nil {
|
if _, err = (&net.Buffers{w.buf[:], p[n : n+dataLen]}).WriteTo(w.Writer); err != nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -42,23 +40,28 @@ func (w *chunkedWriter) Write(p []byte) (n int, err error) {
|
|||||||
|
|
||||||
type chunkedReader struct {
|
type chunkedReader struct {
|
||||||
io.Reader
|
io.Reader
|
||||||
buf [lenSize]byte
|
chunkSizeDecoder ChunkSizeDecoder
|
||||||
|
buf []byte
|
||||||
left int
|
left int
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChunkedReader returns a chunked reader.
|
// ChunkedReader returns a chunked reader.
|
||||||
func ChunkedReader(r io.Reader) io.Reader {
|
func ChunkedReader(r io.Reader, chunkSizeDecoder ChunkSizeDecoder) io.Reader {
|
||||||
return &chunkedReader{Reader: r}
|
return &chunkedReader{Reader: r, chunkSizeDecoder: chunkSizeDecoder}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *chunkedReader) Read(p []byte) (int, error) {
|
func (r *chunkedReader) Read(p []byte) (int, error) {
|
||||||
if r.left == 0 {
|
if r.left == 0 {
|
||||||
// get length
|
// get length
|
||||||
_, err := io.ReadFull(r.Reader, r.buf[:lenSize])
|
_, err := io.ReadFull(r.Reader, r.buf[:r.chunkSizeDecoder.SizeBytes()])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
r.left = int(binary.BigEndian.Uint16(r.buf[:lenSize]))
|
n, err := r.chunkSizeDecoder.Decode(r.buf[:])
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
r.left = int(n)
|
||||||
|
|
||||||
// if left == 0, then this is the end
|
// if left == 0, then this is the end
|
||||||
if r.left == 0 {
|
if r.left == 0 {
|
||||||
|
60
proxy/vmess/chunk_size_parser.go
Normal file
60
proxy/vmess/chunk_size_parser.go
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
package vmess
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/binary"
|
||||||
|
"golang.org/x/crypto/sha3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ChunkSizeEncoder is a utility class to encode size value into bytes.
|
||||||
|
type ChunkSizeEncoder interface {
|
||||||
|
SizeBytes() int32
|
||||||
|
Encode(uint16, []byte) []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChunkSizeDecoder is a utility class to decode size value from bytes.
|
||||||
|
type ChunkSizeDecoder interface {
|
||||||
|
SizeBytes() int32
|
||||||
|
Decode([]byte) (uint16, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ShakeSizeParser struct {
|
||||||
|
shake sha3.ShakeHash
|
||||||
|
buffer [2]byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewShakeSizeParser(nonce []byte) *ShakeSizeParser {
|
||||||
|
shake := sha3.NewShake128()
|
||||||
|
shake.Write(nonce)
|
||||||
|
return &ShakeSizeParser{
|
||||||
|
shake: shake,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*ShakeSizeParser) SizeBytes() int32 {
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ShakeSizeParser) next() uint16 {
|
||||||
|
s.shake.Read(s.buffer[:])
|
||||||
|
return binary.BigEndian.Uint16(s.buffer[:])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ShakeSizeParser) Decode(b []byte) (uint16, error) {
|
||||||
|
mask := s.next()
|
||||||
|
size := binary.BigEndian.Uint16(b)
|
||||||
|
return mask ^ size, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ShakeSizeParser) Encode(size uint16, b []byte) []byte {
|
||||||
|
mask := s.next()
|
||||||
|
binary.BigEndian.PutUint16(b, mask^size)
|
||||||
|
return b[:2]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ShakeSizeParser) NextPaddingLen() uint16 {
|
||||||
|
return s.next() % 64
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ShakeSizeParser) MaxPaddingLen() uint16 {
|
||||||
|
return 64
|
||||||
|
}
|
@ -27,7 +27,7 @@ const (
|
|||||||
OptBasicFormat byte = 0
|
OptBasicFormat byte = 0
|
||||||
OptChunkStream byte = 1
|
OptChunkStream byte = 1
|
||||||
// OptReuseTCPConnection byte = 2
|
// OptReuseTCPConnection byte = 2
|
||||||
// OptMetadataObfuscate byte = 4
|
OptMetadataObfuscate byte = 4
|
||||||
)
|
)
|
||||||
|
|
||||||
// Security types
|
// Security types
|
||||||
@ -72,6 +72,9 @@ type Conn struct {
|
|||||||
respBodyIV [16]byte
|
respBodyIV [16]byte
|
||||||
respBodyKey [16]byte
|
respBodyKey [16]byte
|
||||||
|
|
||||||
|
writeChunkSizeParser ChunkSizeEncoder
|
||||||
|
readChunkSizeParser ChunkSizeDecoder
|
||||||
|
|
||||||
net.Conn
|
net.Conn
|
||||||
dataReader io.Reader
|
dataReader io.Reader
|
||||||
dataWriter io.Writer
|
dataWriter io.Writer
|
||||||
@ -90,7 +93,7 @@ func NewClient(uuidStr, security string, alterID int, aead bool) (*Client, error
|
|||||||
c.users = append(c.users, user.GenAlterIDUsers(alterID)...)
|
c.users = append(c.users, user.GenAlterIDUsers(alterID)...)
|
||||||
c.count = len(c.users)
|
c.count = len(c.users)
|
||||||
|
|
||||||
c.opt = OptChunkStream
|
c.opt = OptChunkStream | OptMetadataObfuscate
|
||||||
c.aead = aead
|
c.aead = aead
|
||||||
|
|
||||||
security = strings.ToLower(security)
|
security = strings.ToLower(security)
|
||||||
@ -150,6 +153,8 @@ func (c *Client) NewConn(rc net.Conn, target string, cmd CmdType) (*Conn, error)
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
conn.writeChunkSizeParser = NewShakeSizeParser(conn.reqBodyIV[:])
|
||||||
|
conn.readChunkSizeParser = NewShakeSizeParser(conn.respBodyIV[:])
|
||||||
|
|
||||||
// Request
|
// Request
|
||||||
err = conn.Request(cmd)
|
err = conn.Request(cmd)
|
||||||
@ -292,12 +297,12 @@ func (c *Conn) Write(b []byte) (n int, err error) {
|
|||||||
if c.opt&OptChunkStream == OptChunkStream {
|
if c.opt&OptChunkStream == OptChunkStream {
|
||||||
switch c.security {
|
switch c.security {
|
||||||
case SecurityNone:
|
case SecurityNone:
|
||||||
c.dataWriter = ChunkedWriter(c.Conn)
|
c.dataWriter = ChunkedWriter(c.Conn, c.writeChunkSizeParser)
|
||||||
|
|
||||||
case SecurityAES128GCM:
|
case SecurityAES128GCM:
|
||||||
block, _ := aes.NewCipher(c.reqBodyKey[:])
|
block, _ := aes.NewCipher(c.reqBodyKey[:])
|
||||||
aead, _ := cipher.NewGCM(block)
|
aead, _ := cipher.NewGCM(block)
|
||||||
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:])
|
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:], c.writeChunkSizeParser)
|
||||||
|
|
||||||
case SecurityChacha20Poly1305:
|
case SecurityChacha20Poly1305:
|
||||||
key := pool.GetBuffer(32)
|
key := pool.GetBuffer(32)
|
||||||
@ -306,7 +311,7 @@ func (c *Conn) Write(b []byte) (n int, err error) {
|
|||||||
t = md5.Sum(key[:16])
|
t = md5.Sum(key[:16])
|
||||||
copy(key[16:], t[:])
|
copy(key[16:], t[:])
|
||||||
aead, _ := chacha20poly1305.New(key)
|
aead, _ := chacha20poly1305.New(key)
|
||||||
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:])
|
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:], c.writeChunkSizeParser)
|
||||||
pool.PutBuffer(key)
|
pool.PutBuffer(key)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -328,12 +333,12 @@ func (c *Conn) Read(b []byte) (n int, err error) {
|
|||||||
if c.opt&OptChunkStream == OptChunkStream {
|
if c.opt&OptChunkStream == OptChunkStream {
|
||||||
switch c.security {
|
switch c.security {
|
||||||
case SecurityNone:
|
case SecurityNone:
|
||||||
c.dataReader = ChunkedReader(c.Conn)
|
c.dataReader = ChunkedReader(c.Conn, c.readChunkSizeParser)
|
||||||
|
|
||||||
case SecurityAES128GCM:
|
case SecurityAES128GCM:
|
||||||
block, _ := aes.NewCipher(c.respBodyKey[:])
|
block, _ := aes.NewCipher(c.respBodyKey[:])
|
||||||
aead, _ := cipher.NewGCM(block)
|
aead, _ := cipher.NewGCM(block)
|
||||||
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:])
|
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:], c.readChunkSizeParser)
|
||||||
|
|
||||||
case SecurityChacha20Poly1305:
|
case SecurityChacha20Poly1305:
|
||||||
key := pool.GetBuffer(32)
|
key := pool.GetBuffer(32)
|
||||||
@ -342,7 +347,7 @@ func (c *Conn) Read(b []byte) (n int, err error) {
|
|||||||
t = md5.Sum(key[:16])
|
t = md5.Sum(key[:16])
|
||||||
copy(key[16:], t[:])
|
copy(key[16:], t[:])
|
||||||
aead, _ := chacha20poly1305.New(key)
|
aead, _ := chacha20poly1305.New(key)
|
||||||
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:])
|
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:], c.readChunkSizeParser)
|
||||||
pool.PutBuffer(key)
|
pool.PutBuffer(key)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user