130 lines
2.3 KiB
Go
130 lines
2.3 KiB
Go
package q
|
|
|
|
import (
|
|
"strings"
|
|
|
|
"github.com/DataDog/go-sqllexer"
|
|
)
|
|
|
|
type Query interface {
|
|
GetFullSql() string
|
|
}
|
|
|
|
const NONE = 0
|
|
|
|
type QueryType int
|
|
|
|
const (
|
|
SELECT QueryType = iota + 1
|
|
UPDATE
|
|
INSERT
|
|
DELETE
|
|
)
|
|
|
|
type Conditional struct {
|
|
Key string
|
|
Operator string
|
|
Value string
|
|
Extension string // AND, OR, etc
|
|
}
|
|
|
|
func GetQueryTypeFromToken(token *sqllexer.Token) QueryType {
|
|
if token.Type != sqllexer.COMMAND {
|
|
return NONE
|
|
}
|
|
|
|
var foundType QueryType
|
|
switch strings.ToUpper(token.Value) {
|
|
case "SELECT":
|
|
foundType = SELECT
|
|
case "UPDATE":
|
|
foundType = UPDATE
|
|
case "INSERT":
|
|
foundType = INSERT
|
|
case "DELETE":
|
|
foundType = DELETE
|
|
default:
|
|
foundType = NONE
|
|
}
|
|
|
|
return foundType
|
|
}
|
|
|
|
func IsCrudSqlStatement(token *sqllexer.Token) bool {
|
|
queryType := GetQueryTypeFromToken(token)
|
|
return (queryType > 0 && queryType <= 4)
|
|
}
|
|
|
|
func IsTokenBeginingOfStatement(currentToken *sqllexer.Token, previousToken *sqllexer.Token) bool {
|
|
return IsCrudSqlStatement(currentToken)
|
|
}
|
|
|
|
func IsTokenEndOfStatement(token *sqllexer.Token) bool {
|
|
return (token.Type == sqllexer.EOF || token.Value == ";")
|
|
}
|
|
|
|
func GetQueryTypeFromSql(sql string) QueryType {
|
|
var queryType QueryType
|
|
|
|
lexer := sqllexer.New(sql)
|
|
for {
|
|
token := lexer.Scan()
|
|
if IsTokenEndOfStatement(token) {
|
|
break
|
|
}
|
|
|
|
queryType = GetQueryTypeFromToken(token)
|
|
if queryType > 0 {
|
|
break
|
|
}
|
|
}
|
|
|
|
return queryType
|
|
}
|
|
|
|
func ExtractSqlStatmentsFromString(sqlString string) []string {
|
|
var foundStatments []string
|
|
|
|
var isBeginingFound = false
|
|
var isEndingFound = false
|
|
|
|
var previousScannedToken sqllexer.Token
|
|
var currentWorkingStatment = ""
|
|
|
|
lexer := sqllexer.New(sqlString)
|
|
for {
|
|
token := lexer.Scan()
|
|
previousScannedToken = *token
|
|
|
|
if IsTokenEndOfStatement(token) {
|
|
isEndingFound = true
|
|
}
|
|
|
|
if isEndingFound {
|
|
if strings.Trim(currentWorkingStatment, " ") != "" {
|
|
foundStatments = append(foundStatments, currentWorkingStatment)
|
|
}
|
|
|
|
isBeginingFound = false
|
|
isEndingFound = false
|
|
currentWorkingStatment = ""
|
|
|
|
if token.Type == sqllexer.EOF {
|
|
break
|
|
} else {
|
|
continue
|
|
}
|
|
}
|
|
|
|
if !isBeginingFound && IsTokenBeginingOfStatement(token, &previousScannedToken) {
|
|
isBeginingFound = true
|
|
} else if !isBeginingFound {
|
|
continue
|
|
}
|
|
|
|
currentWorkingStatment = currentWorkingStatment + token.Value
|
|
}
|
|
|
|
return foundStatments
|
|
}
|