parent
02723b1701
commit
634ed382f5
@ -0,0 +1,45 @@
|
|||||||
|
import mariadb
|
||||||
|
|
||||||
|
class dbconn:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.db = None
|
||||||
|
self.cur = None
|
||||||
|
def clear(self):
|
||||||
|
drop_all = f'''
|
||||||
|
SET FOREIGN_KEY_CHECKS = 0;
|
||||||
|
|
||||||
|
SET @tables = NULL;
|
||||||
|
|
||||||
|
SELECT GROUP_CONCAT('`', table_schema, '`.`', table_name, '`') INTO @tables
|
||||||
|
FROM information_schema.tables
|
||||||
|
WHERE table_schema = '{self.db.database}';
|
||||||
|
|
||||||
|
SET @tables = CONCAT('DROP TABLE ', @tables);
|
||||||
|
PREPARE stmt FROM @tables;
|
||||||
|
EXECUTE stmt;
|
||||||
|
DEALLOCATE PREPARE stmt;
|
||||||
|
SET FOREIGN_KEY_CHECKS = 1;
|
||||||
|
'''
|
||||||
|
if self.db:
|
||||||
|
if not self.cur:
|
||||||
|
self.cur = self.db.cursor()
|
||||||
|
self.cur.execute(drop_all)
|
||||||
|
|
||||||
|
def connect(self, ip, password = '0508', user = 'root', db = 'db', port = 3306):
|
||||||
|
try:
|
||||||
|
self.db = mariadb.connect(
|
||||||
|
user = user,
|
||||||
|
password = password,
|
||||||
|
host = ip,
|
||||||
|
port = port,
|
||||||
|
database = db
|
||||||
|
)
|
||||||
|
self.cur = self.db.cursor()
|
||||||
|
|
||||||
|
except mariadb.Error as e:
|
||||||
|
print(e)
|
||||||
|
self.db = None
|
||||||
|
self.cur = None
|
||||||
|
|
||||||
|
def exec(self, sql, params = None):
|
||||||
|
self.cur.execute(sql)
|
@ -1,65 +1,57 @@
|
|||||||
#include <unordered_map>
|
#include "./server/libaquery.h"
|
||||||
#include "./server/aggregations.h"
|
#include "./server/aggregations.h"
|
||||||
#include "csv.h"
|
#include "csv.h"
|
||||||
#include "./server/libaquery.h"
|
|
||||||
#include "./server/hasher.h"
|
#include "./server/hasher.h"
|
||||||
|
#include <unordered_map>
|
||||||
|
|
||||||
extern "C" int __DLLEXPORT__ dllmain(Context* cxt) {
|
extern "C" int __DLLEXPORT__ dllmain(Context* cxt) {
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace types;
|
using namespace types;
|
||||||
|
|
||||||
auto sale = new TableInfo<int,int>("sale", 2);
|
auto test = new TableInfo<int,int,int,int>("test", 4);
|
||||||
cxt->tables.insert({"sale", sale});
|
cxt->tables.insert({"test", test});
|
||||||
auto& sale_Month = *(ColRef<int> *)(&sale->colrefs[0]);
|
auto& test_a = *(ColRef<int> *)(&test->colrefs[0]);
|
||||||
auto& sale_sales = *(ColRef<int> *)(&sale->colrefs[1]);
|
auto& test_b = *(ColRef<int> *)(&test->colrefs[1]);
|
||||||
sale_Month.init("Month");
|
auto& test_c = *(ColRef<int> *)(&test->colrefs[2]);
|
||||||
sale_sales.init("sales");
|
auto& test_d = *(ColRef<int> *)(&test->colrefs[3]);
|
||||||
io::CSVReader<2> csv_reader_6ojNrU("moving_avg.csv");
|
test_a.init("a");
|
||||||
csv_reader_6ojNrU.read_header(io::ignore_extra_column, "Month","sales");
|
test_b.init("b");
|
||||||
int tmp_30abZdE5;
|
test_c.init("c");
|
||||||
int tmp_zx6KcpzH;
|
test_d.init("d");
|
||||||
while(csv_reader_6ojNrU.read_row(tmp_30abZdE5,tmp_zx6KcpzH)) {
|
io::CSVReader<4> csv_reader_307VD4("test.csv");
|
||||||
|
csv_reader_307VD4.read_header(io::ignore_extra_column, "a","b","c","d");
|
||||||
|
int tmp_3LXIYQmp;
|
||||||
|
int tmp_1m5NCKR4;
|
||||||
|
int tmp_10LZcLgy;
|
||||||
|
int tmp_39pPZL8W;
|
||||||
|
while(csv_reader_307VD4.read_row(tmp_3LXIYQmp,tmp_1m5NCKR4,tmp_10LZcLgy,tmp_39pPZL8W)) {
|
||||||
|
|
||||||
sale_Month.emplace_back(tmp_30abZdE5);
|
test_a.emplace_back(tmp_3LXIYQmp);
|
||||||
sale_sales.emplace_back(tmp_zx6KcpzH);
|
test_b.emplace_back(tmp_1m5NCKR4);
|
||||||
}
|
test_c.emplace_back(tmp_10LZcLgy);
|
||||||
auto out_4oKV = new TableInfo<value_type<decays<decltype(sale_Month)>>,value_type<decays<decltype(avgw(3,sale_sales))>>>("out_4oKV", 2);
|
test_d.emplace_back(tmp_39pPZL8W);
|
||||||
cxt->tables.insert({"out_4oKV", out_4oKV});
|
|
||||||
auto& out_4oKV_Month = *(ColRef<value_type<decays<decltype(sale_Month)>>> *)(&out_4oKV->colrefs[0]);
|
|
||||||
auto& out_4oKV_avgw3salesales = *(ColRef<value_type<decays<decltype(avgw(3,sale_sales))>>> *)(&out_4oKV->colrefs[1]);
|
|
||||||
auto order_3t9jQY = sale->order_by<0>();
|
|
||||||
out_4oKV_Month.init("Month");
|
|
||||||
out_4oKV_Month = sale_Month[*order_3t9jQY];
|
|
||||||
out_4oKV_avgw3salesales.init("avgw3salesales");
|
|
||||||
out_4oKV_avgw3salesales = avgw(3,sale_sales[*order_3t9jQY]);
|
|
||||||
print(*out_4oKV);
|
|
||||||
FILE* fp_d7p2ph = fopen("moving_avg_output.csv", "w");
|
|
||||||
out_4oKV->printall(";", "\n", nullptr, fp_d7p2ph);
|
|
||||||
fclose(fp_d7p2ph);
|
|
||||||
typedef record<decltype(sale_sales[0])> record_typexsfbsFs;
|
|
||||||
unordered_map<record_typexsfbsFs, vector_type<uint32_t>, transTypes<record_typexsfbsFs, hasher>> g5N8IBNq;
|
|
||||||
for (uint32_t i4w = 0; i4w < sale_sales.size; ++i4w){
|
|
||||||
g5N8IBNq[forward_as_tuple(sale_sales[i4w])].emplace_back(i4w);
|
|
||||||
}
|
}
|
||||||
auto out_7JGJ = new TableInfo<decays<decltype(sale_Month)>,value_type<decays<decltype(minw(2,sale_sales))>>>("out_7JGJ", 2);
|
typedef record<decltype(test_a[0]),decltype(test_b[0]),decltype(test_d[0])> record_type3OMslKw;
|
||||||
cxt->tables.insert({"out_7JGJ", out_7JGJ});
|
unordered_map<record_type3OMslKw, vector_type<uint32_t>, transTypes<record_type3OMslKw, hasher>> g7LNVAss;
|
||||||
auto& out_7JGJ_Month = *(ColRef<decays<decltype(sale_Month)>> *)(&out_7JGJ->colrefs[0]);
|
for (uint32_t i1T = 0; i1T < test_a.size; ++i1T){
|
||||||
auto& out_7JGJ_minw2salesales = *(ColRef<value_type<decays<decltype(minw(2,sale_sales))>>> *)(&out_7JGJ->colrefs[1]);
|
g7LNVAss[forward_as_tuple(test_a[i1T],test_b[i1T],test_d[i1T])].emplace_back(i1T);
|
||||||
out_7JGJ_Month.init("Month");
|
|
||||||
out_7JGJ_minw2salesales.init("minw2salesales");
|
|
||||||
for(auto& iVb : g5N8IBNq) {
|
|
||||||
auto &val_6xjJXey = iVb.second;
|
|
||||||
sale->order_by<-1>(&val_6xjJXey);
|
|
||||||
}
|
}
|
||||||
for(auto& i5G : g5N8IBNq) {
|
auto out_HSfK = new TableInfo<decays<decltype(sum(test_c))>,value_type<decays<decltype(test_b)>>,value_type<decays<decltype(test_d)>>>("out_HSfK", 3);
|
||||||
auto &key_1e9JJOf = i5G.first;
|
cxt->tables.insert({"out_HSfK", out_HSfK});
|
||||||
auto &val_6g6wlkk = i5G.second;
|
auto& out_HSfK_sumtestc = *(ColRef<decays<decltype(sum(test_c))>> *)(&out_HSfK->colrefs[0]);
|
||||||
out_7JGJ_Month.emplace_back(sale_Month[val_6g6wlkk]);
|
auto& out_HSfK_b = *(ColRef<value_type<decays<decltype(test_b)>>> *)(&out_HSfK->colrefs[1]);
|
||||||
out_7JGJ_minw2salesales.emplace_back(minw(2,get<0>(key_1e9JJOf)));
|
auto& out_HSfK_d = *(ColRef<value_type<decays<decltype(test_d)>>> *)(&out_HSfK->colrefs[2]);
|
||||||
|
out_HSfK_sumtestc.init("sumtestc");
|
||||||
|
out_HSfK_b.init("b");
|
||||||
|
out_HSfK_d.init("d");
|
||||||
|
for(auto& i18 : g7LNVAss) {
|
||||||
|
auto &key_3s5slnK = i18.first;
|
||||||
|
auto &val_2nNLv0D = i18.second;
|
||||||
|
out_HSfK_sumtestc.emplace_back(sum(test_c[val_2nNLv0D]));
|
||||||
|
out_HSfK_b.emplace_back(get<1>(key_3s5slnK));
|
||||||
|
out_HSfK_d.emplace_back(get<2>(key_3s5slnK));
|
||||||
}
|
}
|
||||||
print(*out_7JGJ);
|
auto d5b7C95U = out_HSfK->order_by_view<-3,1>();
|
||||||
FILE* fp_1yhzJM = fopen("flatten.csv", "w");
|
print(d5b7C95U);
|
||||||
out_7JGJ->printall(",", "\n", nullptr, fp_1yhzJM);
|
|
||||||
fclose(fp_1yhzJM);
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
@ -0,0 +1,11 @@
|
|||||||
|
from reconstruct.ast import Context, ast_node
|
||||||
|
|
||||||
|
def initialize():
|
||||||
|
return Context()
|
||||||
|
|
||||||
|
def generate(ast, cxt):
|
||||||
|
for k in ast.keys():
|
||||||
|
if k in ast_node.types.keys():
|
||||||
|
ast_node.types[k](None, ast, cxt)
|
||||||
|
|
||||||
|
__all__ = ["initialize", "generate"]
|
@ -0,0 +1,303 @@
|
|||||||
|
from engine.utils import enlist, base62uuid, base62alp
|
||||||
|
from reconstruct.storage import Context, TableInfo, ColRef
|
||||||
|
|
||||||
|
class ast_node:
|
||||||
|
header = []
|
||||||
|
types = dict()
|
||||||
|
first_order = False
|
||||||
|
|
||||||
|
def __init__(self, parent:"ast_node", node, context:Context = None):
|
||||||
|
self.context = parent.context if context is None else context
|
||||||
|
self.parent = parent
|
||||||
|
self.sql = ''
|
||||||
|
self.datasource = None
|
||||||
|
self.init(node)
|
||||||
|
self.produce(node)
|
||||||
|
self.spawn(node)
|
||||||
|
self.consume(node)
|
||||||
|
|
||||||
|
def emit(self, code):
|
||||||
|
self.context.emit(code)
|
||||||
|
def add(self, code):
|
||||||
|
self.sql += code + ' '
|
||||||
|
|
||||||
|
name = 'null'
|
||||||
|
|
||||||
|
def init(self, _):
|
||||||
|
self.add(self.__class__.name.upper())
|
||||||
|
def produce(self, _):
|
||||||
|
pass
|
||||||
|
def spawn(self, _):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def consume(self, _):
|
||||||
|
if self.parent is None:
|
||||||
|
self.emit(self.sql+';\n')
|
||||||
|
|
||||||
|
|
||||||
|
from reconstruct.expr import expr
|
||||||
|
|
||||||
|
|
||||||
|
class projection(ast_node):
|
||||||
|
name = 'projection'
|
||||||
|
first_order = 'select'
|
||||||
|
|
||||||
|
def init(self, _):
|
||||||
|
pass
|
||||||
|
def produce(self, node):
|
||||||
|
p = node['select']
|
||||||
|
self.projections = p if type(p) is list else [p]
|
||||||
|
self.add('SELECT')
|
||||||
|
|
||||||
|
def spawn(self, node):
|
||||||
|
self.datasource = None # datasource is Join instead of TableInfo
|
||||||
|
if 'from' in node:
|
||||||
|
from_clause = node['from']
|
||||||
|
self.datasource = join(self, from_clause)
|
||||||
|
if 'assumptions' in from_clause:
|
||||||
|
self.assumptions = enlist(from_clause['assumptions'])
|
||||||
|
|
||||||
|
if self.datasource is not None:
|
||||||
|
self.datasource_changed = True
|
||||||
|
self.prev_datasource = self.context.datasource
|
||||||
|
self.context.datasource = self.datasource
|
||||||
|
|
||||||
|
if 'where' in node:
|
||||||
|
self.where = filter(self, node['where'])
|
||||||
|
else:
|
||||||
|
self.where = None
|
||||||
|
|
||||||
|
if 'groupby' in node:
|
||||||
|
self.group_node = groupby(self, node['groupby'])
|
||||||
|
else:
|
||||||
|
self.group_node = None
|
||||||
|
|
||||||
|
def consume(self, node):
|
||||||
|
# deal with projections
|
||||||
|
self.out_table = TableInfo('out_'+base62uuid(4), [], self.context)
|
||||||
|
cols = []
|
||||||
|
col_exprs = []
|
||||||
|
for i, proj in enumerate(self.projections):
|
||||||
|
compound = False
|
||||||
|
self.datasource.rec = set()
|
||||||
|
name = ''
|
||||||
|
if type(proj) is dict:
|
||||||
|
|
||||||
|
if 'value' in proj:
|
||||||
|
e = proj['value']
|
||||||
|
name = expr(self, e).sql
|
||||||
|
disp_name = ''.join([a if a in base62alp else '' for a in name])
|
||||||
|
compound = True # compound column
|
||||||
|
if 'name' in proj: # renaming column by AS keyword
|
||||||
|
name += ' ' + proj['name']
|
||||||
|
col_exprs.append(name)
|
||||||
|
|
||||||
|
elif type(proj) is str:
|
||||||
|
col = self.datasource.get_col(proj)
|
||||||
|
name = col.name
|
||||||
|
self.datasource.rec = None
|
||||||
|
# TODO: Type deduction in Python
|
||||||
|
cols.append(ColRef('unknown', self.out_table, None, disp_name, i, compound=compound))
|
||||||
|
self.add(', '.join(col_exprs))
|
||||||
|
|
||||||
|
def finialize(astnode:ast_node):
|
||||||
|
if(astnode is not None):
|
||||||
|
self.add(astnode.sql)
|
||||||
|
self.add('FROM')
|
||||||
|
finialize(self.datasource)
|
||||||
|
finialize(self.where)
|
||||||
|
finialize(self.group_node)
|
||||||
|
if 'orderby' in node:
|
||||||
|
self.add(orderby(self, node['orderby']).sql)
|
||||||
|
if 'outfile' in node:
|
||||||
|
self.add(outfile(self, node['outfile']).sql)
|
||||||
|
if self.parent is None:
|
||||||
|
self.emit(self.sql+';\n')
|
||||||
|
else:
|
||||||
|
# TODO: subquery, name create tmp-table from subquery w/ alias as name
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class orderby(ast_node):
|
||||||
|
name = 'order by'
|
||||||
|
def produce(self, node):
|
||||||
|
if node is None:
|
||||||
|
self.sql = ''
|
||||||
|
return
|
||||||
|
elif type(node) is not list:
|
||||||
|
node = [node]
|
||||||
|
|
||||||
|
o_list = []
|
||||||
|
|
||||||
|
for o in node:
|
||||||
|
o_str = expr(self, o['value']).sql
|
||||||
|
if 'sort' in o and f'{o["sort"]}'.lower() == 'desc':
|
||||||
|
o_str += ' ' + 'DESC'
|
||||||
|
o_list.append(o_str)
|
||||||
|
self.add(', '.join(o_list))
|
||||||
|
|
||||||
|
|
||||||
|
class groupby(orderby):
|
||||||
|
name = 'group by'
|
||||||
|
|
||||||
|
|
||||||
|
class join(ast_node):
|
||||||
|
name = 'join'
|
||||||
|
def init(self, _):
|
||||||
|
self.joins:list = []
|
||||||
|
self.tables = []
|
||||||
|
self.tables_dir = dict()
|
||||||
|
# self.tmp_name = 'join_' + base62uuid(4)
|
||||||
|
# self.datasource = TableInfo(self.tmp_name, [], self.context)
|
||||||
|
def append(self, tbls, __alias = ''):
|
||||||
|
alias = lambda t : '(' + t + ') ' + __alias if len(__alias) else t
|
||||||
|
if type(tbls) is join:
|
||||||
|
self.joins.append(alias(tbls.__str__()))
|
||||||
|
self.tables += tbls.tables
|
||||||
|
self.tables_dir = {**self.tables_dir, **tbls.tables_dir}
|
||||||
|
|
||||||
|
elif type(tbls) is TableInfo:
|
||||||
|
self.joins.append(alias(tbls.table_name))
|
||||||
|
self.tables.append(tbls)
|
||||||
|
self.tables_dir[tbls.table_name] = tbls
|
||||||
|
for a in tbls.alias:
|
||||||
|
self.tables_dir[a] = tbls
|
||||||
|
|
||||||
|
elif type(tbls) is projection:
|
||||||
|
self.joins.append(alias(tbls.finalize()))
|
||||||
|
|
||||||
|
def produce(self, node):
|
||||||
|
if type(node) is list:
|
||||||
|
for d in node:
|
||||||
|
self.append(join(self, d).__str__())
|
||||||
|
|
||||||
|
elif type(node) is dict:
|
||||||
|
alias = ''
|
||||||
|
if 'value' in node:
|
||||||
|
table_name = node['value']
|
||||||
|
tbl = None
|
||||||
|
if 'name' in node:
|
||||||
|
alias = node['name']
|
||||||
|
if type(table_name) is dict:
|
||||||
|
if 'select' in table_name:
|
||||||
|
# TODO: subquery, create and register TableInfo in projection
|
||||||
|
tbl = projection(self, table_name).finalize()
|
||||||
|
else:
|
||||||
|
tbl = self.context.tables_byname[table_name]
|
||||||
|
if 'name' in node:
|
||||||
|
tbl.add_alias(node['name'])
|
||||||
|
|
||||||
|
self.append(tbl, alias)
|
||||||
|
else:
|
||||||
|
keys = node.keys()
|
||||||
|
if keys[0].lower().endswith('join'):
|
||||||
|
j = join(self, node[keys[0]])
|
||||||
|
tablename = f' {keys[0]} {j}'
|
||||||
|
if keys[1].lower() == 'on':
|
||||||
|
tablename += f' on {expr(self, node[keys[1]])}'
|
||||||
|
self.joins.append(tablename)
|
||||||
|
self.tables += j.tables
|
||||||
|
self.tables_dir = {**self.tables_dir, **j.tables_dir}
|
||||||
|
|
||||||
|
elif type(node) is str:
|
||||||
|
self.append(self.context.tables_byname[node])
|
||||||
|
|
||||||
|
def get_cols(self, colExpr: str) -> ColRef:
|
||||||
|
for t in self.tables:
|
||||||
|
if colExpr in t.columns_byname:
|
||||||
|
return t.columns_byname[colExpr]
|
||||||
|
|
||||||
|
def parse_col_names(self, colExpr:str) -> ColRef:
|
||||||
|
parsedColExpr = colExpr.split('.')
|
||||||
|
if len(parsedColExpr) <= 1:
|
||||||
|
return self.get_cols(colExpr)
|
||||||
|
else:
|
||||||
|
datasource = self.tables_dir[parsedColExpr[0]]
|
||||||
|
if datasource is None:
|
||||||
|
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
|
||||||
|
else:
|
||||||
|
return datasource.parse_col_names(parsedColExpr[1])
|
||||||
|
|
||||||
|
def consume(self, _):
|
||||||
|
self.sql = ', '.join(self.joins)
|
||||||
|
return super().consume(_)
|
||||||
|
def __str__(self):
|
||||||
|
return ', '.join(self.joins)
|
||||||
|
def __repr__(self):
|
||||||
|
return self.__str__()
|
||||||
|
|
||||||
|
|
||||||
|
class filter(ast_node):
|
||||||
|
name = 'where'
|
||||||
|
def produce(self, node):
|
||||||
|
self.add(expr(self, node).sql)
|
||||||
|
|
||||||
|
|
||||||
|
class create_table(ast_node):
|
||||||
|
name = 'create_table'
|
||||||
|
first_order = name
|
||||||
|
def init(self, node):
|
||||||
|
self.sql = 'CREATE TABLE '
|
||||||
|
|
||||||
|
def produce(self, node):
|
||||||
|
ct = node[self.name]
|
||||||
|
tbl = self.context.add_table(ct['name'], ct['columns'])
|
||||||
|
self.sql = f'CREATE TABLE {tbl.table_name}('
|
||||||
|
columns = []
|
||||||
|
for c in tbl.columns:
|
||||||
|
columns.append(f'{c.name} {c.type.upper()}')
|
||||||
|
self.sql += ', '.join(columns)
|
||||||
|
self.sql += ')'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class insert(ast_node):
|
||||||
|
name = 'insert'
|
||||||
|
first_order = name
|
||||||
|
def produce(self, node):
|
||||||
|
values = node['query']['select']
|
||||||
|
tbl = node['insert']
|
||||||
|
self.sql = f'INSERT INTO {tbl} VALUES('
|
||||||
|
# if len(values) != table.n_cols:
|
||||||
|
# raise ValueError("Column Mismatch")
|
||||||
|
list_values = []
|
||||||
|
for i, s in enumerate(values):
|
||||||
|
if 'value' in s:
|
||||||
|
list_values.append(f"{s['value']}")
|
||||||
|
else:
|
||||||
|
# subquery, dispatch to select astnode
|
||||||
|
pass
|
||||||
|
self.sql += ', '.join(list_values) + ')'
|
||||||
|
|
||||||
|
|
||||||
|
class load(ast_node):
|
||||||
|
name="load"
|
||||||
|
first_order = name
|
||||||
|
def produce(self, node):
|
||||||
|
node = node['load']
|
||||||
|
s1 = 'LOAD DATA INFILE '
|
||||||
|
s2 = 'INTO TABLE '
|
||||||
|
s3 = 'FIELDS TERMINATED BY '
|
||||||
|
self.sql = f'{s1} \"{node["file"]["literal"]}\" {s2} {node["table"]}'
|
||||||
|
if 'term' in node:
|
||||||
|
self.sql += f' {s3} \"{node["term"]["literal"]}\"'
|
||||||
|
|
||||||
|
|
||||||
|
class outfile(ast_node):
|
||||||
|
name="_outfile"
|
||||||
|
def produce(self, node):
|
||||||
|
filename = node['loc']['literal'] if 'loc' in node else node['literal']
|
||||||
|
self.sql = f'INTO OUTFILE "{filename}"'
|
||||||
|
if 'term' in node:
|
||||||
|
self.sql += f' FIELDS TERMINATED BY \"{node["term"]["literal"]}\"'
|
||||||
|
|
||||||
|
|
||||||
|
def include(objs):
|
||||||
|
import inspect
|
||||||
|
for _, cls in inspect.getmembers(objs):
|
||||||
|
if inspect.isclass(cls) and issubclass(cls, ast_node) and type(cls.first_order) is str:
|
||||||
|
ast_node.types[cls.first_order] = cls
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
include(sys.modules[__name__])
|
@ -0,0 +1,128 @@
|
|||||||
|
from reconstruct.ast import ast_node
|
||||||
|
from reconstruct.storage import ColRef, TableInfo
|
||||||
|
|
||||||
|
|
||||||
|
class expr(ast_node):
|
||||||
|
name='expr'
|
||||||
|
builtin_func_maps = {
|
||||||
|
'max': 'MAX',
|
||||||
|
'min': 'MIN',
|
||||||
|
'avg': 'AVG',
|
||||||
|
'sum': 'SUM',
|
||||||
|
'count' : 'COUNT',
|
||||||
|
'mins': ['mins', 'minw'],
|
||||||
|
'maxs': ['maxs', 'maxw'],
|
||||||
|
'avgs': ['avgs', 'avgw'],
|
||||||
|
'sums': ['sums', 'sumw'],
|
||||||
|
}
|
||||||
|
|
||||||
|
binary_ops = {
|
||||||
|
'sub':'-',
|
||||||
|
'add':'+',
|
||||||
|
'mul':'*',
|
||||||
|
'div':'/',
|
||||||
|
'mod':'%',
|
||||||
|
'and':' AND ',
|
||||||
|
'or':' OR ',
|
||||||
|
'xor' : ' XOR ',
|
||||||
|
'gt':'>',
|
||||||
|
'lt':'<',
|
||||||
|
'le':'<=',
|
||||||
|
'gt':'>='
|
||||||
|
}
|
||||||
|
|
||||||
|
compound_ops = {
|
||||||
|
}
|
||||||
|
|
||||||
|
unary_ops = {
|
||||||
|
'neg' : '-',
|
||||||
|
'not' : ' NOT '
|
||||||
|
}
|
||||||
|
|
||||||
|
coumpound_generating_ops = ['avgs', 'mins', 'maxs', 'sums'] + \
|
||||||
|
list(binary_ops.keys()) + list(compound_ops.keys()) + list(unary_ops.keys() )
|
||||||
|
|
||||||
|
def __init__(self, parent, node):
|
||||||
|
self.raw_col = None
|
||||||
|
self.inside_agg = False
|
||||||
|
if(type(parent) is expr):
|
||||||
|
self.inside_agg = parent.inside_agg
|
||||||
|
ast_node.__init__(self, parent, node, None)
|
||||||
|
|
||||||
|
def init(self, _):
|
||||||
|
from engine.projection import projection
|
||||||
|
parent = self.parent
|
||||||
|
self.isvector = parent.isvector if type(parent) is expr else False
|
||||||
|
self.is_compound = parent.is_compound if type(parent) is expr else False
|
||||||
|
if type(parent) in [projection, expr]:
|
||||||
|
self.datasource = parent.datasource
|
||||||
|
else:
|
||||||
|
self.datasource = self.context.datasource
|
||||||
|
self.udf_map = parent.context.udf_map
|
||||||
|
self.func_maps = {**self.udf_map, **self.builtin_func_maps}
|
||||||
|
|
||||||
|
def produce(self, node):
|
||||||
|
if type(node) is dict:
|
||||||
|
for key, val in node.items():
|
||||||
|
if key in self.func_maps:
|
||||||
|
# TODO: distinguish between UDF agg functions and other UDF functions.
|
||||||
|
self.inside_agg = True
|
||||||
|
if type(val) is list and len(val) > 1:
|
||||||
|
cfunc = self.func_maps[key]
|
||||||
|
cfunc = cfunc[len(val) - 1] if type(cfunc) is list else cfunc
|
||||||
|
self.sql += f"{cfunc}("
|
||||||
|
for i, p in enumerate(val):
|
||||||
|
self.sql += expr(self, p).sql + (',' if i < len(val) - 1 else '')
|
||||||
|
else:
|
||||||
|
funcname = self.func_maps[key]
|
||||||
|
funcname = funcname[0] if type(funcname) is list else funcname
|
||||||
|
self.sql += f"{funcname}("
|
||||||
|
self.sql += expr(self, val).sql
|
||||||
|
self.sql += ')'
|
||||||
|
self.inside_agg = False
|
||||||
|
elif key in self.binary_ops:
|
||||||
|
l = expr(self, val[0]).sql
|
||||||
|
r = expr(self, val[1]).sql
|
||||||
|
self.sql += f'({l}{self.binary_ops[key]}{r})'
|
||||||
|
elif key in self.compound_ops:
|
||||||
|
x = []
|
||||||
|
if type(val) is list:
|
||||||
|
for v in val:
|
||||||
|
x.append(expr(self, v).sql)
|
||||||
|
self.sql = self.compound_ops[key][1](x)
|
||||||
|
elif key in self.unary_ops:
|
||||||
|
self.sql += f'{self.unary_ops[key]}({expr(self, val).sql})'
|
||||||
|
else:
|
||||||
|
print(f'Undefined expr: {key}{val}')
|
||||||
|
|
||||||
|
if key in self.coumpound_generating_ops and not self.is_compound:
|
||||||
|
self.is_compound = True
|
||||||
|
p = self.parent
|
||||||
|
while type(p) is expr and not p.is_compound:
|
||||||
|
p.is_compound = True
|
||||||
|
p = p.parent
|
||||||
|
|
||||||
|
elif type(node) is str:
|
||||||
|
p = self.parent
|
||||||
|
while type(p) is expr and not p.isvector:
|
||||||
|
p.isvector = True
|
||||||
|
p = p.parent
|
||||||
|
|
||||||
|
self.raw_col = self.datasource.parse_col_names(node)
|
||||||
|
self.raw_col = self.raw_col if type(self.raw_col) is ColRef else None
|
||||||
|
if self.raw_col is not None:
|
||||||
|
self.sql = self.raw_col.name
|
||||||
|
else:
|
||||||
|
self.sql = node
|
||||||
|
|
||||||
|
elif type(node) is bool:
|
||||||
|
self.sql = '1' if node else '0'
|
||||||
|
else:
|
||||||
|
self.sql = f'{node}'
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.sql
|
||||||
|
def __repr__(self):
|
||||||
|
return self.__str__()
|
||||||
|
|
||||||
|
|
@ -0,0 +1,89 @@
|
|||||||
|
class ColRef:
|
||||||
|
def __init__(self, _ty, cobj, table:'TableInfo', name, id, compound = False):
|
||||||
|
self.type = _ty
|
||||||
|
self.cobj = cobj
|
||||||
|
self.table = table
|
||||||
|
self.name = name
|
||||||
|
self.alias = set()
|
||||||
|
self.id = id # position in table
|
||||||
|
self.compound = compound # compound field (list as a field)
|
||||||
|
# e.g. order by, group by, filter by expressions
|
||||||
|
|
||||||
|
self.__arr__ = (_ty, cobj, table, name, id)
|
||||||
|
def __getitem__(self, key):
|
||||||
|
if type(key) is str:
|
||||||
|
return getattr(self, key)
|
||||||
|
else:
|
||||||
|
return self.__arr__[key]
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
self.__arr__[key] = value
|
||||||
|
|
||||||
|
|
||||||
|
class TableInfo:
|
||||||
|
def __init__(self, table_name, cols, cxt:'Context'):
|
||||||
|
# statics
|
||||||
|
self.table_name = table_name
|
||||||
|
self.alias = set([table_name])
|
||||||
|
self.columns_byname = dict() # column_name, type
|
||||||
|
self.columns = []
|
||||||
|
self.cxt = cxt
|
||||||
|
# keep track of temp vars
|
||||||
|
self.rec = None
|
||||||
|
self.add_cols(cols)
|
||||||
|
# runtime
|
||||||
|
self.order = [] # assumptions
|
||||||
|
|
||||||
|
cxt.tables_byname[self.table_name] = self # construct reverse map
|
||||||
|
|
||||||
|
def add_cols(self, cols, new = True):
|
||||||
|
for i, c in enumerate(cols):
|
||||||
|
self.add_col(c, new, i)
|
||||||
|
def add_col(self, c, new = True, i = 0):
|
||||||
|
_ty = c['type']
|
||||||
|
if new:
|
||||||
|
_ty = _ty if type(c) is ColRef else list(_ty.keys())[0]
|
||||||
|
col_object = ColRef(_ty, c, self, c['name'], len(self.columns))
|
||||||
|
else:
|
||||||
|
col_object = c
|
||||||
|
c.table = self
|
||||||
|
self.columns_byname[c['name']] = col_object
|
||||||
|
self.columns.append(col_object)
|
||||||
|
|
||||||
|
def add_alias(self, alias):
|
||||||
|
if alias in self.cxt.tables_byname.keys():
|
||||||
|
print("Error: table alias already exists")
|
||||||
|
return
|
||||||
|
self.cxt.tables_byname[alias] = self
|
||||||
|
self.alias.add(alias)
|
||||||
|
def parse_col_names(self, colExpr) -> ColRef:
|
||||||
|
parsedColExpr = colExpr.split('.')
|
||||||
|
if len(parsedColExpr) <= 1:
|
||||||
|
return self.columns_byname[colExpr]
|
||||||
|
else:
|
||||||
|
datasource = self.cxt.tables_byname[parsedColExpr[0]]
|
||||||
|
if datasource is None:
|
||||||
|
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
|
||||||
|
else:
|
||||||
|
return datasource.parse_col_names(parsedColExpr[1])
|
||||||
|
|
||||||
|
|
||||||
|
class Context:
|
||||||
|
def __init__(self):
|
||||||
|
self.sql = ''
|
||||||
|
self.tables_byname = dict()
|
||||||
|
self.col_byname = dict()
|
||||||
|
self.tables = []
|
||||||
|
self.cols = []
|
||||||
|
self.datasource = None
|
||||||
|
self.udf_map = dict()
|
||||||
|
|
||||||
|
def emit(self, sql:str):
|
||||||
|
self.sql += sql + ' '
|
||||||
|
|
||||||
|
def add_table(self, table_name, cols):
|
||||||
|
tbl = TableInfo(table_name, cols, self)
|
||||||
|
self.tables.append(tbl)
|
||||||
|
return tbl
|
||||||
|
|
||||||
|
|
@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"BackgroundImageAbsolutePath": "c:\\users\\bill\\appdata\\local\\microsoft\\visualstudio\\17.0_03c65567\\extensions\\atkxhose.05t\\Images\\background.png",
|
||||||
|
"BackgroundImagesDirectoryAbsolutePath": "c:\\users\\bill\\appdata\\local\\microsoft\\visualstudio\\17.0_03c65567\\extensions\\atkxhose.05t\\Images",
|
||||||
|
"ExpandToIDE": false,
|
||||||
|
"Extensions": ".png, .jpg, .gif, .bmp",
|
||||||
|
"ImageBackgroundType": 0,
|
||||||
|
"ImageFadeAnimationInterval": "PT5S",
|
||||||
|
"ImageStretch": 0,
|
||||||
|
"IsLimitToMainlyEditorWindow": false,
|
||||||
|
"LoopSlideshow": true,
|
||||||
|
"MaxHeight": 0,
|
||||||
|
"MaxWidth": 0,
|
||||||
|
"Opacity": 0.35,
|
||||||
|
"PositionHorizon": 1,
|
||||||
|
"PositionVertical": 1,
|
||||||
|
"ShuffleSlideshow": false,
|
||||||
|
"SoftEdgeX": 0,
|
||||||
|
"SoftEdgeY": 0,
|
||||||
|
"TileMode": 0,
|
||||||
|
"UpdateImageInterval": "PT1M",
|
||||||
|
"ViewBoxPointX": 0,
|
||||||
|
"ViewBoxPointY": 0,
|
||||||
|
"ViewPortHeight": 1,
|
||||||
|
"ViewPortPointX": 0,
|
||||||
|
"ViewPortPointY": 0,
|
||||||
|
"ViewPortWidth": 1
|
||||||
|
}
|
Loading…
Reference in new issue