forked from Qihoo360/Atlas
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtutorial-tokenize.lua
More file actions
49 lines (36 loc) · 1.46 KB
/
tutorial-tokenize.lua
File metadata and controls
49 lines (36 loc) · 1.46 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
--[[ $%BEGINLICENSE%$
Copyright (c) 2007, 2009, Oracle and/or its affiliates. All rights reserved.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; version 2 of the
License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
02110-1301 USA
$%ENDLICENSE%$ --]]
--[[
--]]
local tokenizer = require("proxy.tokenizer")
function read_query(packet)
if packet:byte() == proxy.COM_QUERY then
local tokens = tokenizer.tokenize(packet:sub(2))
-- just for debug
for i = 1, #tokens do
-- print the token and what we know about it
local token = tokens[i]
local txt = token["text"]
if token["token_name"] == 'TK_STRING' then
txt = string.format("%q", txt)
end
-- print(i .. ": " .. " { " .. token["token_name"] .. ", " .. token["text"] .. " }" )
print(i .. ": " .. " { " .. token["token_name"] .. ", " .. txt .. " }" )
end
print("normalized query: " .. tokenizer.normalize(tokens))
print("")
end
end