parent
634ed382f5
commit
824db2a43b
@ -1,45 +1,45 @@
|
||||
import mariadb
|
||||
|
||||
class dbconn:
|
||||
def __init__(self) -> None:
|
||||
self.db = None
|
||||
self.cur = None
|
||||
def clear(self):
|
||||
drop_all = f'''
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
SET @tables = NULL;
|
||||
|
||||
SELECT GROUP_CONCAT('`', table_schema, '`.`', table_name, '`') INTO @tables
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = '{self.db.database}';
|
||||
|
||||
SET @tables = CONCAT('DROP TABLE ', @tables);
|
||||
PREPARE stmt FROM @tables;
|
||||
EXECUTE stmt;
|
||||
DEALLOCATE PREPARE stmt;
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
||||
'''
|
||||
if self.db:
|
||||
if not self.cur:
|
||||
self.cur = self.db.cursor()
|
||||
self.cur.execute(drop_all)
|
||||
|
||||
def connect(self, ip, password = '0508', user = 'root', db = 'db', port = 3306):
|
||||
try:
|
||||
self.db = mariadb.connect(
|
||||
user = user,
|
||||
password = password,
|
||||
host = ip,
|
||||
port = port,
|
||||
database = db
|
||||
)
|
||||
self.cur = self.db.cursor()
|
||||
|
||||
except mariadb.Error as e:
|
||||
print(e)
|
||||
self.db = None
|
||||
self.cur = None
|
||||
|
||||
def exec(self, sql, params = None):
|
||||
import mariadb
|
||||
|
||||
class dbconn:
|
||||
def __init__(self) -> None:
|
||||
self.db = None
|
||||
self.cur = None
|
||||
def clear(self):
|
||||
drop_all = f'''
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
SET @tables = NULL;
|
||||
|
||||
SELECT GROUP_CONCAT('`', table_schema, '`.`', table_name, '`') INTO @tables
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = '{self.db.database}';
|
||||
|
||||
SET @tables = CONCAT('DROP TABLE ', @tables);
|
||||
PREPARE stmt FROM @tables;
|
||||
EXECUTE stmt;
|
||||
DEALLOCATE PREPARE stmt;
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
||||
'''
|
||||
if self.db:
|
||||
if not self.cur:
|
||||
self.cur = self.db.cursor()
|
||||
self.cur.execute(drop_all)
|
||||
|
||||
def connect(self, ip, password = '0508', user = 'root', db = 'db', port = 3306):
|
||||
try:
|
||||
self.db = mariadb.connect(
|
||||
user = user,
|
||||
password = password,
|
||||
host = ip,
|
||||
port = port,
|
||||
database = db
|
||||
)
|
||||
self.cur = self.db.cursor()
|
||||
|
||||
except mariadb.Error as e:
|
||||
print(e)
|
||||
self.db = None
|
||||
self.cur = None
|
||||
|
||||
def exec(self, sql, params = None):
|
||||
self.cur.execute(sql)
|
@ -1,11 +1,11 @@
|
||||
from reconstruct.ast import Context, ast_node
|
||||
|
||||
def initialize():
|
||||
return Context()
|
||||
|
||||
def generate(ast, cxt):
|
||||
for k in ast.keys():
|
||||
if k in ast_node.types.keys():
|
||||
ast_node.types[k](None, ast, cxt)
|
||||
|
||||
__all__ = ["initialize", "generate"]
|
||||
from reconstruct.ast import Context, ast_node
|
||||
|
||||
def initialize():
|
||||
return Context()
|
||||
|
||||
def generate(ast, cxt):
|
||||
for k in ast.keys():
|
||||
if k in ast_node.types.keys():
|
||||
ast_node.types[k](None, ast, cxt)
|
||||
|
||||
__all__ = ["initialize", "generate"]
|
||||
|
@ -1,303 +1,303 @@
|
||||
from engine.utils import enlist, base62uuid, base62alp
|
||||
from reconstruct.storage import Context, TableInfo, ColRef
|
||||
|
||||
class ast_node:
|
||||
header = []
|
||||
types = dict()
|
||||
first_order = False
|
||||
|
||||
def __init__(self, parent:"ast_node", node, context:Context = None):
|
||||
self.context = parent.context if context is None else context
|
||||
self.parent = parent
|
||||
self.sql = ''
|
||||
self.datasource = None
|
||||
self.init(node)
|
||||
self.produce(node)
|
||||
self.spawn(node)
|
||||
self.consume(node)
|
||||
|
||||
def emit(self, code):
|
||||
self.context.emit(code)
|
||||
def add(self, code):
|
||||
self.sql += code + ' '
|
||||
|
||||
name = 'null'
|
||||
|
||||
def init(self, _):
|
||||
self.add(self.__class__.name.upper())
|
||||
def produce(self, _):
|
||||
pass
|
||||
def spawn(self, _):
|
||||
pass
|
||||
|
||||
def consume(self, _):
|
||||
if self.parent is None:
|
||||
self.emit(self.sql+';\n')
|
||||
|
||||
|
||||
from reconstruct.expr import expr
|
||||
|
||||
|
||||
class projection(ast_node):
|
||||
name = 'projection'
|
||||
first_order = 'select'
|
||||
|
||||
def init(self, _):
|
||||
pass
|
||||
def produce(self, node):
|
||||
p = node['select']
|
||||
self.projections = p if type(p) is list else [p]
|
||||
self.add('SELECT')
|
||||
|
||||
def spawn(self, node):
|
||||
self.datasource = None # datasource is Join instead of TableInfo
|
||||
if 'from' in node:
|
||||
from_clause = node['from']
|
||||
self.datasource = join(self, from_clause)
|
||||
if 'assumptions' in from_clause:
|
||||
self.assumptions = enlist(from_clause['assumptions'])
|
||||
|
||||
if self.datasource is not None:
|
||||
self.datasource_changed = True
|
||||
self.prev_datasource = self.context.datasource
|
||||
self.context.datasource = self.datasource
|
||||
|
||||
if 'where' in node:
|
||||
self.where = filter(self, node['where'])
|
||||
else:
|
||||
self.where = None
|
||||
|
||||
if 'groupby' in node:
|
||||
self.group_node = groupby(self, node['groupby'])
|
||||
else:
|
||||
self.group_node = None
|
||||
|
||||
def consume(self, node):
|
||||
# deal with projections
|
||||
self.out_table = TableInfo('out_'+base62uuid(4), [], self.context)
|
||||
cols = []
|
||||
col_exprs = []
|
||||
for i, proj in enumerate(self.projections):
|
||||
compound = False
|
||||
self.datasource.rec = set()
|
||||
name = ''
|
||||
if type(proj) is dict:
|
||||
|
||||
if 'value' in proj:
|
||||
e = proj['value']
|
||||
name = expr(self, e).sql
|
||||
disp_name = ''.join([a if a in base62alp else '' for a in name])
|
||||
compound = True # compound column
|
||||
if 'name' in proj: # renaming column by AS keyword
|
||||
name += ' ' + proj['name']
|
||||
col_exprs.append(name)
|
||||
|
||||
elif type(proj) is str:
|
||||
col = self.datasource.get_col(proj)
|
||||
name = col.name
|
||||
self.datasource.rec = None
|
||||
# TODO: Type deduction in Python
|
||||
cols.append(ColRef('unknown', self.out_table, None, disp_name, i, compound=compound))
|
||||
self.add(', '.join(col_exprs))
|
||||
|
||||
def finialize(astnode:ast_node):
|
||||
if(astnode is not None):
|
||||
self.add(astnode.sql)
|
||||
self.add('FROM')
|
||||
finialize(self.datasource)
|
||||
finialize(self.where)
|
||||
finialize(self.group_node)
|
||||
if 'orderby' in node:
|
||||
self.add(orderby(self, node['orderby']).sql)
|
||||
if 'outfile' in node:
|
||||
self.add(outfile(self, node['outfile']).sql)
|
||||
if self.parent is None:
|
||||
self.emit(self.sql+';\n')
|
||||
else:
|
||||
# TODO: subquery, name create tmp-table from subquery w/ alias as name
|
||||
pass
|
||||
|
||||
|
||||
class orderby(ast_node):
|
||||
name = 'order by'
|
||||
def produce(self, node):
|
||||
if node is None:
|
||||
self.sql = ''
|
||||
return
|
||||
elif type(node) is not list:
|
||||
node = [node]
|
||||
|
||||
o_list = []
|
||||
|
||||
for o in node:
|
||||
o_str = expr(self, o['value']).sql
|
||||
if 'sort' in o and f'{o["sort"]}'.lower() == 'desc':
|
||||
o_str += ' ' + 'DESC'
|
||||
o_list.append(o_str)
|
||||
self.add(', '.join(o_list))
|
||||
|
||||
|
||||
class groupby(orderby):
|
||||
name = 'group by'
|
||||
|
||||
|
||||
class join(ast_node):
|
||||
name = 'join'
|
||||
def init(self, _):
|
||||
self.joins:list = []
|
||||
self.tables = []
|
||||
self.tables_dir = dict()
|
||||
# self.tmp_name = 'join_' + base62uuid(4)
|
||||
# self.datasource = TableInfo(self.tmp_name, [], self.context)
|
||||
def append(self, tbls, __alias = ''):
|
||||
alias = lambda t : '(' + t + ') ' + __alias if len(__alias) else t
|
||||
if type(tbls) is join:
|
||||
self.joins.append(alias(tbls.__str__()))
|
||||
self.tables += tbls.tables
|
||||
self.tables_dir = {**self.tables_dir, **tbls.tables_dir}
|
||||
|
||||
elif type(tbls) is TableInfo:
|
||||
self.joins.append(alias(tbls.table_name))
|
||||
self.tables.append(tbls)
|
||||
self.tables_dir[tbls.table_name] = tbls
|
||||
for a in tbls.alias:
|
||||
self.tables_dir[a] = tbls
|
||||
|
||||
elif type(tbls) is projection:
|
||||
self.joins.append(alias(tbls.finalize()))
|
||||
|
||||
def produce(self, node):
|
||||
if type(node) is list:
|
||||
for d in node:
|
||||
self.append(join(self, d).__str__())
|
||||
|
||||
elif type(node) is dict:
|
||||
alias = ''
|
||||
if 'value' in node:
|
||||
table_name = node['value']
|
||||
tbl = None
|
||||
if 'name' in node:
|
||||
alias = node['name']
|
||||
if type(table_name) is dict:
|
||||
if 'select' in table_name:
|
||||
# TODO: subquery, create and register TableInfo in projection
|
||||
tbl = projection(self, table_name).finalize()
|
||||
else:
|
||||
tbl = self.context.tables_byname[table_name]
|
||||
if 'name' in node:
|
||||
tbl.add_alias(node['name'])
|
||||
|
||||
self.append(tbl, alias)
|
||||
else:
|
||||
keys = node.keys()
|
||||
if keys[0].lower().endswith('join'):
|
||||
j = join(self, node[keys[0]])
|
||||
tablename = f' {keys[0]} {j}'
|
||||
if keys[1].lower() == 'on':
|
||||
tablename += f' on {expr(self, node[keys[1]])}'
|
||||
self.joins.append(tablename)
|
||||
self.tables += j.tables
|
||||
self.tables_dir = {**self.tables_dir, **j.tables_dir}
|
||||
|
||||
elif type(node) is str:
|
||||
self.append(self.context.tables_byname[node])
|
||||
|
||||
def get_cols(self, colExpr: str) -> ColRef:
|
||||
for t in self.tables:
|
||||
if colExpr in t.columns_byname:
|
||||
return t.columns_byname[colExpr]
|
||||
|
||||
def parse_col_names(self, colExpr:str) -> ColRef:
|
||||
parsedColExpr = colExpr.split('.')
|
||||
if len(parsedColExpr) <= 1:
|
||||
return self.get_cols(colExpr)
|
||||
else:
|
||||
datasource = self.tables_dir[parsedColExpr[0]]
|
||||
if datasource is None:
|
||||
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
|
||||
else:
|
||||
return datasource.parse_col_names(parsedColExpr[1])
|
||||
|
||||
def consume(self, _):
|
||||
self.sql = ', '.join(self.joins)
|
||||
return super().consume(_)
|
||||
def __str__(self):
|
||||
return ', '.join(self.joins)
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
class filter(ast_node):
|
||||
name = 'where'
|
||||
def produce(self, node):
|
||||
self.add(expr(self, node).sql)
|
||||
|
||||
|
||||
class create_table(ast_node):
|
||||
name = 'create_table'
|
||||
first_order = name
|
||||
def init(self, node):
|
||||
self.sql = 'CREATE TABLE '
|
||||
|
||||
def produce(self, node):
|
||||
ct = node[self.name]
|
||||
tbl = self.context.add_table(ct['name'], ct['columns'])
|
||||
self.sql = f'CREATE TABLE {tbl.table_name}('
|
||||
columns = []
|
||||
for c in tbl.columns:
|
||||
columns.append(f'{c.name} {c.type.upper()}')
|
||||
self.sql += ', '.join(columns)
|
||||
self.sql += ')'
|
||||
|
||||
|
||||
|
||||
class insert(ast_node):
|
||||
name = 'insert'
|
||||
first_order = name
|
||||
def produce(self, node):
|
||||
values = node['query']['select']
|
||||
tbl = node['insert']
|
||||
self.sql = f'INSERT INTO {tbl} VALUES('
|
||||
# if len(values) != table.n_cols:
|
||||
# raise ValueError("Column Mismatch")
|
||||
list_values = []
|
||||
for i, s in enumerate(values):
|
||||
if 'value' in s:
|
||||
list_values.append(f"{s['value']}")
|
||||
else:
|
||||
# subquery, dispatch to select astnode
|
||||
pass
|
||||
self.sql += ', '.join(list_values) + ')'
|
||||
|
||||
|
||||
class load(ast_node):
|
||||
name="load"
|
||||
first_order = name
|
||||
def produce(self, node):
|
||||
node = node['load']
|
||||
s1 = 'LOAD DATA INFILE '
|
||||
s2 = 'INTO TABLE '
|
||||
s3 = 'FIELDS TERMINATED BY '
|
||||
self.sql = f'{s1} \"{node["file"]["literal"]}\" {s2} {node["table"]}'
|
||||
if 'term' in node:
|
||||
self.sql += f' {s3} \"{node["term"]["literal"]}\"'
|
||||
|
||||
|
||||
class outfile(ast_node):
|
||||
name="_outfile"
|
||||
def produce(self, node):
|
||||
filename = node['loc']['literal'] if 'loc' in node else node['literal']
|
||||
self.sql = f'INTO OUTFILE "{filename}"'
|
||||
if 'term' in node:
|
||||
self.sql += f' FIELDS TERMINATED BY \"{node["term"]["literal"]}\"'
|
||||
|
||||
|
||||
def include(objs):
|
||||
import inspect
|
||||
for _, cls in inspect.getmembers(objs):
|
||||
if inspect.isclass(cls) and issubclass(cls, ast_node) and type(cls.first_order) is str:
|
||||
ast_node.types[cls.first_order] = cls
|
||||
|
||||
|
||||
import sys
|
||||
from engine.utils import enlist, base62uuid, base62alp
|
||||
from reconstruct.storage import Context, TableInfo, ColRef
|
||||
|
||||
class ast_node:
|
||||
header = []
|
||||
types = dict()
|
||||
first_order = False
|
||||
|
||||
def __init__(self, parent:"ast_node", node, context:Context = None):
|
||||
self.context = parent.context if context is None else context
|
||||
self.parent = parent
|
||||
self.sql = ''
|
||||
self.datasource = None
|
||||
self.init(node)
|
||||
self.produce(node)
|
||||
self.spawn(node)
|
||||
self.consume(node)
|
||||
|
||||
def emit(self, code):
|
||||
self.context.emit(code)
|
||||
def add(self, code):
|
||||
self.sql += code + ' '
|
||||
|
||||
name = 'null'
|
||||
|
||||
def init(self, _):
|
||||
self.add(self.__class__.name.upper())
|
||||
def produce(self, _):
|
||||
pass
|
||||
def spawn(self, _):
|
||||
pass
|
||||
|
||||
def consume(self, _):
|
||||
if self.parent is None:
|
||||
self.emit(self.sql+';\n')
|
||||
|
||||
|
||||
from reconstruct.expr import expr
|
||||
|
||||
|
||||
class projection(ast_node):
|
||||
name = 'projection'
|
||||
first_order = 'select'
|
||||
|
||||
def init(self, _):
|
||||
pass
|
||||
def produce(self, node):
|
||||
p = node['select']
|
||||
self.projections = p if type(p) is list else [p]
|
||||
self.add('SELECT')
|
||||
|
||||
def spawn(self, node):
|
||||
self.datasource = None # datasource is Join instead of TableInfo
|
||||
if 'from' in node:
|
||||
from_clause = node['from']
|
||||
self.datasource = join(self, from_clause)
|
||||
if 'assumptions' in from_clause:
|
||||
self.assumptions = enlist(from_clause['assumptions'])
|
||||
|
||||
if self.datasource is not None:
|
||||
self.datasource_changed = True
|
||||
self.prev_datasource = self.context.datasource
|
||||
self.context.datasource = self.datasource
|
||||
|
||||
if 'where' in node:
|
||||
self.where = filter(self, node['where'])
|
||||
else:
|
||||
self.where = None
|
||||
|
||||
if 'groupby' in node:
|
||||
self.group_node = groupby(self, node['groupby'])
|
||||
else:
|
||||
self.group_node = None
|
||||
|
||||
def consume(self, node):
|
||||
# deal with projections
|
||||
self.out_table = TableInfo('out_'+base62uuid(4), [], self.context)
|
||||
cols = []
|
||||
col_exprs = []
|
||||
for i, proj in enumerate(self.projections):
|
||||
compound = False
|
||||
self.datasource.rec = set()
|
||||
name = ''
|
||||
if type(proj) is dict:
|
||||
|
||||
if 'value' in proj:
|
||||
e = proj['value']
|
||||
name = expr(self, e).sql
|
||||
disp_name = ''.join([a if a in base62alp else '' for a in name])
|
||||
compound = True # compound column
|
||||
if 'name' in proj: # renaming column by AS keyword
|
||||
name += ' ' + proj['name']
|
||||
col_exprs.append(name)
|
||||
|
||||
elif type(proj) is str:
|
||||
col = self.datasource.get_col(proj)
|
||||
name = col.name
|
||||
self.datasource.rec = None
|
||||
# TODO: Type deduction in Python
|
||||
cols.append(ColRef('unknown', self.out_table, None, disp_name, i, compound=compound))
|
||||
self.add(', '.join(col_exprs))
|
||||
|
||||
def finialize(astnode:ast_node):
|
||||
if(astnode is not None):
|
||||
self.add(astnode.sql)
|
||||
self.add('FROM')
|
||||
finialize(self.datasource)
|
||||
finialize(self.where)
|
||||
finialize(self.group_node)
|
||||
if 'orderby' in node:
|
||||
self.add(orderby(self, node['orderby']).sql)
|
||||
if 'outfile' in node:
|
||||
self.add(outfile(self, node['outfile']).sql)
|
||||
if self.parent is None:
|
||||
self.emit(self.sql+';\n')
|
||||
else:
|
||||
# TODO: subquery, name create tmp-table from subquery w/ alias as name
|
||||
pass
|
||||
|
||||
|
||||
class orderby(ast_node):
|
||||
name = 'order by'
|
||||
def produce(self, node):
|
||||
if node is None:
|
||||
self.sql = ''
|
||||
return
|
||||
elif type(node) is not list:
|
||||
node = [node]
|
||||
|
||||
o_list = []
|
||||
|
||||
for o in node:
|
||||
o_str = expr(self, o['value']).sql
|
||||
if 'sort' in o and f'{o["sort"]}'.lower() == 'desc':
|
||||
o_str += ' ' + 'DESC'
|
||||
o_list.append(o_str)
|
||||
self.add(', '.join(o_list))
|
||||
|
||||
|
||||
class groupby(orderby):
|
||||
name = 'group by'
|
||||
|
||||
|
||||
class join(ast_node):
|
||||
name = 'join'
|
||||
def init(self, _):
|
||||
self.joins:list = []
|
||||
self.tables = []
|
||||
self.tables_dir = dict()
|
||||
# self.tmp_name = 'join_' + base62uuid(4)
|
||||
# self.datasource = TableInfo(self.tmp_name, [], self.context)
|
||||
def append(self, tbls, __alias = ''):
|
||||
alias = lambda t : '(' + t + ') ' + __alias if len(__alias) else t
|
||||
if type(tbls) is join:
|
||||
self.joins.append(alias(tbls.__str__()))
|
||||
self.tables += tbls.tables
|
||||
self.tables_dir = {**self.tables_dir, **tbls.tables_dir}
|
||||
|
||||
elif type(tbls) is TableInfo:
|
||||
self.joins.append(alias(tbls.table_name))
|
||||
self.tables.append(tbls)
|
||||
self.tables_dir[tbls.table_name] = tbls
|
||||
for a in tbls.alias:
|
||||
self.tables_dir[a] = tbls
|
||||
|
||||
elif type(tbls) is projection:
|
||||
self.joins.append(alias(tbls.finalize()))
|
||||
|
||||
def produce(self, node):
|
||||
if type(node) is list:
|
||||
for d in node:
|
||||
self.append(join(self, d).__str__())
|
||||
|
||||
elif type(node) is dict:
|
||||
alias = ''
|
||||
if 'value' in node:
|
||||
table_name = node['value']
|
||||
tbl = None
|
||||
if 'name' in node:
|
||||
alias = node['name']
|
||||
if type(table_name) is dict:
|
||||
if 'select' in table_name:
|
||||
# TODO: subquery, create and register TableInfo in projection
|
||||
tbl = projection(self, table_name).finalize()
|
||||
else:
|
||||
tbl = self.context.tables_byname[table_name]
|
||||
if 'name' in node:
|
||||
tbl.add_alias(node['name'])
|
||||
|
||||
self.append(tbl, alias)
|
||||
else:
|
||||
keys = node.keys()
|
||||
if keys[0].lower().endswith('join'):
|
||||
j = join(self, node[keys[0]])
|
||||
tablename = f' {keys[0]} {j}'
|
||||
if keys[1].lower() == 'on':
|
||||
tablename += f' on {expr(self, node[keys[1]])}'
|
||||
self.joins.append(tablename)
|
||||
self.tables += j.tables
|
||||
self.tables_dir = {**self.tables_dir, **j.tables_dir}
|
||||
|
||||
elif type(node) is str:
|
||||
self.append(self.context.tables_byname[node])
|
||||
|
||||
def get_cols(self, colExpr: str) -> ColRef:
|
||||
for t in self.tables:
|
||||
if colExpr in t.columns_byname:
|
||||
return t.columns_byname[colExpr]
|
||||
|
||||
def parse_col_names(self, colExpr:str) -> ColRef:
|
||||
parsedColExpr = colExpr.split('.')
|
||||
if len(parsedColExpr) <= 1:
|
||||
return self.get_cols(colExpr)
|
||||
else:
|
||||
datasource = self.tables_dir[parsedColExpr[0]]
|
||||
if datasource is None:
|
||||
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
|
||||
else:
|
||||
return datasource.parse_col_names(parsedColExpr[1])
|
||||
|
||||
def consume(self, _):
|
||||
self.sql = ', '.join(self.joins)
|
||||
return super().consume(_)
|
||||
def __str__(self):
|
||||
return ', '.join(self.joins)
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
class filter(ast_node):
|
||||
name = 'where'
|
||||
def produce(self, node):
|
||||
self.add(expr(self, node).sql)
|
||||
|
||||
|
||||
class create_table(ast_node):
|
||||
name = 'create_table'
|
||||
first_order = name
|
||||
def init(self, node):
|
||||
self.sql = 'CREATE TABLE '
|
||||
|
||||
def produce(self, node):
|
||||
ct = node[self.name]
|
||||
tbl = self.context.add_table(ct['name'], ct['columns'])
|
||||
self.sql = f'CREATE TABLE {tbl.table_name}('
|
||||
columns = []
|
||||
for c in tbl.columns:
|
||||
columns.append(f'{c.name} {c.type.upper()}')
|
||||
self.sql += ', '.join(columns)
|
||||
self.sql += ')'
|
||||
|
||||
|
||||
|
||||
class insert(ast_node):
|
||||
name = 'insert'
|
||||
first_order = name
|
||||
def produce(self, node):
|
||||
values = node['query']['select']
|
||||
tbl = node['insert']
|
||||
self.sql = f'INSERT INTO {tbl} VALUES('
|
||||
# if len(values) != table.n_cols:
|
||||
# raise ValueError("Column Mismatch")
|
||||
list_values = []
|
||||
for i, s in enumerate(values):
|
||||
if 'value' in s:
|
||||
list_values.append(f"{s['value']}")
|
||||
else:
|
||||
# subquery, dispatch to select astnode
|
||||
pass
|
||||
self.sql += ', '.join(list_values) + ')'
|
||||
|
||||
|
||||
class load(ast_node):
|
||||
name="load"
|
||||
first_order = name
|
||||
def produce(self, node):
|
||||
node = node['load']
|
||||
s1 = 'LOAD DATA INFILE '
|
||||
s2 = 'INTO TABLE '
|
||||
s3 = 'FIELDS TERMINATED BY '
|
||||
self.sql = f'{s1} \"{node["file"]["literal"]}\" {s2} {node["table"]}'
|
||||
if 'term' in node:
|
||||
self.sql += f' {s3} \"{node["term"]["literal"]}\"'
|
||||
|
||||
|
||||
class outfile(ast_node):
|
||||
name="_outfile"
|
||||
def produce(self, node):
|
||||
filename = node['loc']['literal'] if 'loc' in node else node['literal']
|
||||
self.sql = f'INTO OUTFILE "{filename}"'
|
||||
if 'term' in node:
|
||||
self.sql += f' FIELDS TERMINATED BY \"{node["term"]["literal"]}\"'
|
||||
|
||||
|
||||
def include(objs):
|
||||
import inspect
|
||||
for _, cls in inspect.getmembers(objs):
|
||||
if inspect.isclass(cls) and issubclass(cls, ast_node) and type(cls.first_order) is str:
|
||||
ast_node.types[cls.first_order] = cls
|
||||
|
||||
|
||||
import sys
|
||||
include(sys.modules[__name__])
|
@ -1,128 +1,128 @@
|
||||
from reconstruct.ast import ast_node
|
||||
from reconstruct.storage import ColRef, TableInfo
|
||||
|
||||
|
||||
class expr(ast_node):
|
||||
name='expr'
|
||||
builtin_func_maps = {
|
||||
'max': 'MAX',
|
||||
'min': 'MIN',
|
||||
'avg': 'AVG',
|
||||
'sum': 'SUM',
|
||||
'count' : 'COUNT',
|
||||
'mins': ['mins', 'minw'],
|
||||
'maxs': ['maxs', 'maxw'],
|
||||
'avgs': ['avgs', 'avgw'],
|
||||
'sums': ['sums', 'sumw'],
|
||||
}
|
||||
|
||||
binary_ops = {
|
||||
'sub':'-',
|
||||
'add':'+',
|
||||
'mul':'*',
|
||||
'div':'/',
|
||||
'mod':'%',
|
||||
'and':' AND ',
|
||||
'or':' OR ',
|
||||
'xor' : ' XOR ',
|
||||
'gt':'>',
|
||||
'lt':'<',
|
||||
'le':'<=',
|
||||
'gt':'>='
|
||||
}
|
||||
|
||||
compound_ops = {
|
||||
}
|
||||
|
||||
unary_ops = {
|
||||
'neg' : '-',
|
||||
'not' : ' NOT '
|
||||
}
|
||||
|
||||
coumpound_generating_ops = ['avgs', 'mins', 'maxs', 'sums'] + \
|
||||
list(binary_ops.keys()) + list(compound_ops.keys()) + list(unary_ops.keys() )
|
||||
|
||||
def __init__(self, parent, node):
|
||||
self.raw_col = None
|
||||
self.inside_agg = False
|
||||
if(type(parent) is expr):
|
||||
self.inside_agg = parent.inside_agg
|
||||
ast_node.__init__(self, parent, node, None)
|
||||
|
||||
def init(self, _):
|
||||
from engine.projection import projection
|
||||
parent = self.parent
|
||||
self.isvector = parent.isvector if type(parent) is expr else False
|
||||
self.is_compound = parent.is_compound if type(parent) is expr else False
|
||||
if type(parent) in [projection, expr]:
|
||||
self.datasource = parent.datasource
|
||||
else:
|
||||
self.datasource = self.context.datasource
|
||||
self.udf_map = parent.context.udf_map
|
||||
self.func_maps = {**self.udf_map, **self.builtin_func_maps}
|
||||
|
||||
def produce(self, node):
|
||||
if type(node) is dict:
|
||||
for key, val in node.items():
|
||||
if key in self.func_maps:
|
||||
# TODO: distinguish between UDF agg functions and other UDF functions.
|
||||
self.inside_agg = True
|
||||
if type(val) is list and len(val) > 1:
|
||||
cfunc = self.func_maps[key]
|
||||
cfunc = cfunc[len(val) - 1] if type(cfunc) is list else cfunc
|
||||
self.sql += f"{cfunc}("
|
||||
for i, p in enumerate(val):
|
||||
self.sql += expr(self, p).sql + (',' if i < len(val) - 1 else '')
|
||||
else:
|
||||
funcname = self.func_maps[key]
|
||||
funcname = funcname[0] if type(funcname) is list else funcname
|
||||
self.sql += f"{funcname}("
|
||||
self.sql += expr(self, val).sql
|
||||
self.sql += ')'
|
||||
self.inside_agg = False
|
||||
elif key in self.binary_ops:
|
||||
l = expr(self, val[0]).sql
|
||||
r = expr(self, val[1]).sql
|
||||
self.sql += f'({l}{self.binary_ops[key]}{r})'
|
||||
elif key in self.compound_ops:
|
||||
x = []
|
||||
if type(val) is list:
|
||||
for v in val:
|
||||
x.append(expr(self, v).sql)
|
||||
self.sql = self.compound_ops[key][1](x)
|
||||
elif key in self.unary_ops:
|
||||
self.sql += f'{self.unary_ops[key]}({expr(self, val).sql})'
|
||||
else:
|
||||
print(f'Undefined expr: {key}{val}')
|
||||
|
||||
if key in self.coumpound_generating_ops and not self.is_compound:
|
||||
self.is_compound = True
|
||||
p = self.parent
|
||||
while type(p) is expr and not p.is_compound:
|
||||
p.is_compound = True
|
||||
p = p.parent
|
||||
|
||||
elif type(node) is str:
|
||||
p = self.parent
|
||||
while type(p) is expr and not p.isvector:
|
||||
p.isvector = True
|
||||
p = p.parent
|
||||
|
||||
self.raw_col = self.datasource.parse_col_names(node)
|
||||
self.raw_col = self.raw_col if type(self.raw_col) is ColRef else None
|
||||
if self.raw_col is not None:
|
||||
self.sql = self.raw_col.name
|
||||
else:
|
||||
self.sql = node
|
||||
|
||||
elif type(node) is bool:
|
||||
self.sql = '1' if node else '0'
|
||||
else:
|
||||
self.sql = f'{node}'
|
||||
|
||||
def __str__(self):
|
||||
return self.sql
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
from reconstruct.ast import ast_node
|
||||
from reconstruct.storage import ColRef, TableInfo
|
||||
|
||||
|
||||
class expr(ast_node):
|
||||
name='expr'
|
||||
builtin_func_maps = {
|
||||
'max': 'MAX',
|
||||
'min': 'MIN',
|
||||
'avg': 'AVG',
|
||||
'sum': 'SUM',
|
||||
'count' : 'COUNT',
|
||||
'mins': ['mins', 'minw'],
|
||||
'maxs': ['maxs', 'maxw'],
|
||||
'avgs': ['avgs', 'avgw'],
|
||||
'sums': ['sums', 'sumw'],
|
||||
}
|
||||
|
||||
binary_ops = {
|
||||
'sub':'-',
|
||||
'add':'+',
|
||||
'mul':'*',
|
||||
'div':'/',
|
||||
'mod':'%',
|
||||
'and':' AND ',
|
||||
'or':' OR ',
|
||||
'xor' : ' XOR ',
|
||||
'gt':'>',
|
||||
'lt':'<',
|
||||
'le':'<=',
|
||||
'gt':'>='
|
||||
}
|
||||
|
||||
compound_ops = {
|
||||
}
|
||||
|
||||
unary_ops = {
|
||||
'neg' : '-',
|
||||
'not' : ' NOT '
|
||||
}
|
||||
|
||||
coumpound_generating_ops = ['avgs', 'mins', 'maxs', 'sums'] + \
|
||||
list(binary_ops.keys()) + list(compound_ops.keys()) + list(unary_ops.keys() )
|
||||
|
||||
def __init__(self, parent, node):
|
||||
self.raw_col = None
|
||||
self.inside_agg = False
|
||||
if(type(parent) is expr):
|
||||
self.inside_agg = parent.inside_agg
|
||||
ast_node.__init__(self, parent, node, None)
|
||||
|
||||
def init(self, _):
|
||||
from engine.projection import projection
|
||||
parent = self.parent
|
||||
self.isvector = parent.isvector if type(parent) is expr else False
|
||||
self.is_compound = parent.is_compound if type(parent) is expr else False
|
||||
if type(parent) in [projection, expr]:
|
||||
self.datasource = parent.datasource
|
||||
else:
|
||||
self.datasource = self.context.datasource
|
||||
self.udf_map = parent.context.udf_map
|
||||
self.func_maps = {**self.udf_map, **self.builtin_func_maps}
|
||||
|
||||
def produce(self, node):
|
||||
if type(node) is dict:
|
||||
for key, val in node.items():
|
||||
if key in self.func_maps:
|
||||
# TODO: distinguish between UDF agg functions and other UDF functions.
|
||||
self.inside_agg = True
|
||||
if type(val) is list and len(val) > 1:
|
||||
cfunc = self.func_maps[key]
|
||||
cfunc = cfunc[len(val) - 1] if type(cfunc) is list else cfunc
|
||||
self.sql += f"{cfunc}("
|
||||
for i, p in enumerate(val):
|
||||
self.sql += expr(self, p).sql + (',' if i < len(val) - 1 else '')
|
||||
else:
|
||||
funcname = self.func_maps[key]
|
||||
funcname = funcname[0] if type(funcname) is list else funcname
|
||||
self.sql += f"{funcname}("
|
||||
self.sql += expr(self, val).sql
|
||||
self.sql += ')'
|
||||
self.inside_agg = False
|
||||
elif key in self.binary_ops:
|
||||
l = expr(self, val[0]).sql
|
||||
r = expr(self, val[1]).sql
|
||||
self.sql += f'({l}{self.binary_ops[key]}{r})'
|
||||
elif key in self.compound_ops:
|
||||
x = []
|
||||
if type(val) is list:
|
||||
for v in val:
|
||||
x.append(expr(self, v).sql)
|
||||
self.sql = self.compound_ops[key][1](x)
|
||||
elif key in self.unary_ops:
|
||||
self.sql += f'{self.unary_ops[key]}({expr(self, val).sql})'
|
||||
else:
|
||||
print(f'Undefined expr: {key}{val}')
|
||||
|
||||
if key in self.coumpound_generating_ops and not self.is_compound:
|
||||
self.is_compound = True
|
||||
p = self.parent
|
||||
while type(p) is expr and not p.is_compound:
|
||||
p.is_compound = True
|
||||
p = p.parent
|
||||
|
||||
elif type(node) is str:
|
||||
p = self.parent
|
||||
while type(p) is expr and not p.isvector:
|
||||
p.isvector = True
|
||||
p = p.parent
|
||||
|
||||
self.raw_col = self.datasource.parse_col_names(node)
|
||||
self.raw_col = self.raw_col if type(self.raw_col) is ColRef else None
|
||||
if self.raw_col is not None:
|
||||
self.sql = self.raw_col.name
|
||||
else:
|
||||
self.sql = node
|
||||
|
||||
elif type(node) is bool:
|
||||
self.sql = '1' if node else '0'
|
||||
else:
|
||||
self.sql = f'{node}'
|
||||
|
||||
def __str__(self):
|
||||
return self.sql
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
@ -1,89 +1,89 @@
|
||||
class ColRef:
|
||||
def __init__(self, _ty, cobj, table:'TableInfo', name, id, compound = False):
|
||||
self.type = _ty
|
||||
self.cobj = cobj
|
||||
self.table = table
|
||||
self.name = name
|
||||
self.alias = set()
|
||||
self.id = id # position in table
|
||||
self.compound = compound # compound field (list as a field)
|
||||
# e.g. order by, group by, filter by expressions
|
||||
|
||||
self.__arr__ = (_ty, cobj, table, name, id)
|
||||
def __getitem__(self, key):
|
||||
if type(key) is str:
|
||||
return getattr(self, key)
|
||||
else:
|
||||
return self.__arr__[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.__arr__[key] = value
|
||||
|
||||
|
||||
class TableInfo:
|
||||
def __init__(self, table_name, cols, cxt:'Context'):
|
||||
# statics
|
||||
self.table_name = table_name
|
||||
self.alias = set([table_name])
|
||||
self.columns_byname = dict() # column_name, type
|
||||
self.columns = []
|
||||
self.cxt = cxt
|
||||
# keep track of temp vars
|
||||
self.rec = None
|
||||
self.add_cols(cols)
|
||||
# runtime
|
||||
self.order = [] # assumptions
|
||||
|
||||
cxt.tables_byname[self.table_name] = self # construct reverse map
|
||||
|
||||
def add_cols(self, cols, new = True):
|
||||
for i, c in enumerate(cols):
|
||||
self.add_col(c, new, i)
|
||||
def add_col(self, c, new = True, i = 0):
|
||||
_ty = c['type']
|
||||
if new:
|
||||
_ty = _ty if type(c) is ColRef else list(_ty.keys())[0]
|
||||
col_object = ColRef(_ty, c, self, c['name'], len(self.columns))
|
||||
else:
|
||||
col_object = c
|
||||
c.table = self
|
||||
self.columns_byname[c['name']] = col_object
|
||||
self.columns.append(col_object)
|
||||
|
||||
def add_alias(self, alias):
|
||||
if alias in self.cxt.tables_byname.keys():
|
||||
print("Error: table alias already exists")
|
||||
return
|
||||
self.cxt.tables_byname[alias] = self
|
||||
self.alias.add(alias)
|
||||
def parse_col_names(self, colExpr) -> ColRef:
|
||||
parsedColExpr = colExpr.split('.')
|
||||
if len(parsedColExpr) <= 1:
|
||||
return self.columns_byname[colExpr]
|
||||
else:
|
||||
datasource = self.cxt.tables_byname[parsedColExpr[0]]
|
||||
if datasource is None:
|
||||
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
|
||||
else:
|
||||
return datasource.parse_col_names(parsedColExpr[1])
|
||||
|
||||
|
||||
class Context:
|
||||
def __init__(self):
|
||||
self.sql = ''
|
||||
self.tables_byname = dict()
|
||||
self.col_byname = dict()
|
||||
self.tables = []
|
||||
self.cols = []
|
||||
self.datasource = None
|
||||
self.udf_map = dict()
|
||||
|
||||
def emit(self, sql:str):
|
||||
self.sql += sql + ' '
|
||||
|
||||
def add_table(self, table_name, cols):
|
||||
tbl = TableInfo(table_name, cols, self)
|
||||
self.tables.append(tbl)
|
||||
return tbl
|
||||
|
||||
class ColRef:
|
||||
def __init__(self, _ty, cobj, table:'TableInfo', name, id, compound = False):
|
||||
self.type = _ty
|
||||
self.cobj = cobj
|
||||
self.table = table
|
||||
self.name = name
|
||||
self.alias = set()
|
||||
self.id = id # position in table
|
||||
self.compound = compound # compound field (list as a field)
|
||||
# e.g. order by, group by, filter by expressions
|
||||
|
||||
self.__arr__ = (_ty, cobj, table, name, id)
|
||||
def __getitem__(self, key):
|
||||
if type(key) is str:
|
||||
return getattr(self, key)
|
||||
else:
|
||||
return self.__arr__[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.__arr__[key] = value
|
||||
|
||||
|
||||
class TableInfo:
|
||||
def __init__(self, table_name, cols, cxt:'Context'):
|
||||
# statics
|
||||
self.table_name = table_name
|
||||
self.alias = set([table_name])
|
||||
self.columns_byname = dict() # column_name, type
|
||||
self.columns = []
|
||||
self.cxt = cxt
|
||||
# keep track of temp vars
|
||||
self.rec = None
|
||||
self.add_cols(cols)
|
||||
# runtime
|
||||
self.order = [] # assumptions
|
||||
|
||||
cxt.tables_byname[self.table_name] = self # construct reverse map
|
||||
|
||||
def add_cols(self, cols, new = True):
|
||||
for i, c in enumerate(cols):
|
||||
self.add_col(c, new, i)
|
||||
def add_col(self, c, new = True, i = 0):
|
||||
_ty = c['type']
|
||||
if new:
|
||||
_ty = _ty if type(c) is ColRef else list(_ty.keys())[0]
|
||||
col_object = ColRef(_ty, c, self, c['name'], len(self.columns))
|
||||
else:
|
||||
col_object = c
|
||||
c.table = self
|
||||
self.columns_byname[c['name']] = col_object
|
||||
self.columns.append(col_object)
|
||||
|
||||
def add_alias(self, alias):
|
||||
if alias in self.cxt.tables_byname.keys():
|
||||
print("Error: table alias already exists")
|
||||
return
|
||||
self.cxt.tables_byname[alias] = self
|
||||
self.alias.add(alias)
|
||||
def parse_col_names(self, colExpr) -> ColRef:
|
||||
parsedColExpr = colExpr.split('.')
|
||||
if len(parsedColExpr) <= 1:
|
||||
return self.columns_byname[colExpr]
|
||||
else:
|
||||
datasource = self.cxt.tables_byname[parsedColExpr[0]]
|
||||
if datasource is None:
|
||||
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
|
||||
else:
|
||||
return datasource.parse_col_names(parsedColExpr[1])
|
||||
|
||||
|
||||
class Context:
|
||||
def __init__(self):
|
||||
self.sql = ''
|
||||
self.tables_byname = dict()
|
||||
self.col_byname = dict()
|
||||
self.tables = []
|
||||
self.cols = []
|
||||
self.datasource = None
|
||||
self.udf_map = dict()
|
||||
|
||||
def emit(self, sql:str):
|
||||
self.sql += sql + ' '
|
||||
|
||||
def add_table(self, table_name, cols):
|
||||
tbl = TableInfo(table_name, cols, self)
|
||||
self.tables.append(tbl)
|
||||
return tbl
|
||||
|
||||
|
Binary file not shown.
@ -1,27 +0,0 @@
|
||||
{
|
||||
"BackgroundImageAbsolutePath": "c:\\users\\bill\\appdata\\local\\microsoft\\visualstudio\\17.0_03c65567\\extensions\\atkxhose.05t\\Images\\background.png",
|
||||
"BackgroundImagesDirectoryAbsolutePath": "c:\\users\\bill\\appdata\\local\\microsoft\\visualstudio\\17.0_03c65567\\extensions\\atkxhose.05t\\Images",
|
||||
"ExpandToIDE": false,
|
||||
"Extensions": ".png, .jpg, .gif, .bmp",
|
||||
"ImageBackgroundType": 0,
|
||||
"ImageFadeAnimationInterval": "PT5S",
|
||||
"ImageStretch": 0,
|
||||
"IsLimitToMainlyEditorWindow": false,
|
||||
"LoopSlideshow": true,
|
||||
"MaxHeight": 0,
|
||||
"MaxWidth": 0,
|
||||
"Opacity": 0.35,
|
||||
"PositionHorizon": 1,
|
||||
"PositionVertical": 1,
|
||||
"ShuffleSlideshow": false,
|
||||
"SoftEdgeX": 0,
|
||||
"SoftEdgeY": 0,
|
||||
"TileMode": 0,
|
||||
"UpdateImageInterval": "PT1M",
|
||||
"ViewBoxPointX": 0,
|
||||
"ViewBoxPointY": 0,
|
||||
"ViewPortHeight": 1,
|
||||
"ViewPortPointX": 0,
|
||||
"ViewPortPointY": 0,
|
||||
"ViewPortWidth": 1
|
||||
}
|
Loading…
Reference in new issue