Skip to content
This repository has been archived by the owner on Aug 30, 2019. It is now read-only.

Commit

Permalink
obfuscate: add exception when parsing empty-string identifiers (#514)
Browse files Browse the repository at this point in the history
  • Loading branch information
gbbr authored Nov 5, 2018
1 parent 45a5f37 commit 5e69c6a
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 7 deletions.
32 changes: 26 additions & 6 deletions obfuscate/sql_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,6 @@ func TestSQLResourceWithError(t *testing.T) {
}

func TestSQLQuantizer(t *testing.T) {
assert := assert.New(t)

cases := []sqlTestCase{
{
"select * from users where id = 42",
Expand Down Expand Up @@ -340,6 +338,26 @@ FROM [Blogs] AS [b]
ORDER BY [b].[Name]`,
`SELECT [ b ] . [ BlogId ], [ b ] . [ Name ] FROM [ Blogs ] ORDER BY [ b ] . [ Name ]`,
},
{
`SELECT * FROM users WHERE firstname=''`,
`SELECT * FROM users WHERE firstname = ?`,
},
{
`SELECT * FROM users WHERE firstname=' '`,
`SELECT * FROM users WHERE firstname = ?`,
},
{
`SELECT * FROM users WHERE firstname=""`,
`SELECT * FROM users WHERE firstname = ""`,
},
{
`SELECT * FROM users WHERE lastname=" "`,
`SELECT * FROM users WHERE lastname = ""`,
},
{
`SELECT * FROM users WHERE lastname=" "`,
`SELECT * FROM users WHERE lastname = ""`,
},
{
`SELECT [b].[BlogId], [b].[Name]
FROM [Blogs] AS [b
Expand All @@ -348,10 +366,12 @@ ORDER BY [b].[Name]`,
},
}

for _, c := range cases {
s := SQLSpan(c.query)
NewObfuscator(nil).Obfuscate(s)
assert.Equal(c.expected, s.Resource)
for i, c := range cases {
t.Run(strconv.Itoa(i), func(t *testing.T) {
s := SQLSpan(c.query)
NewObfuscator(nil).Obfuscate(s)
assert.Equal(t, c.expected, s.Resource)
})
}
}

Expand Down
11 changes: 10 additions & 1 deletion obfuscate/sql_tokenizer.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package obfuscate
import (
"bytes"
"strings"
"unicode"
)

// tokenizer.go implemenents a lexer-like iterator that tokenizes SQL and CQL
Expand Down Expand Up @@ -408,7 +409,15 @@ func (tkn *Tokenizer) scanString(delim uint16, typ int) (int, []byte) {
}
buffer.WriteByte(byte(ch))
}
return typ, buffer.Bytes()
buf := buffer.Bytes()
if typ == ID && len(buf) == 0 || bytes.IndexFunc(buf, func(r rune) bool { return !unicode.IsSpace(r) }) == -1 {
// This string is an empty or white-space only identifier.
// We should keep the start and end delimiters in order to
// avoid creating invalid queries.
// See: https://github.com/DataDog/datadog-trace-agent/issues/316
return typ, []byte{byte(delim), byte(delim)}
}
return typ, buf
}

func (tkn *Tokenizer) scanCommentType1(prefix string) (int, []byte) {
Expand Down

0 comments on commit 5e69c6a

Please sign in to comment.