reconstruct

dev
bill sun 3 years ago
parent 02723b1701
commit 634ed382f5

@ -0,0 +1,45 @@
import mariadb
class dbconn:
def __init__(self) -> None:
self.db = None
self.cur = None
def clear(self):
drop_all = f'''
SET FOREIGN_KEY_CHECKS = 0;
SET @tables = NULL;
SELECT GROUP_CONCAT('`', table_schema, '`.`', table_name, '`') INTO @tables
FROM information_schema.tables
WHERE table_schema = '{self.db.database}';
SET @tables = CONCAT('DROP TABLE ', @tables);
PREPARE stmt FROM @tables;
EXECUTE stmt;
DEALLOCATE PREPARE stmt;
SET FOREIGN_KEY_CHECKS = 1;
'''
if self.db:
if not self.cur:
self.cur = self.db.cursor()
self.cur.execute(drop_all)
def connect(self, ip, password = '0508', user = 'root', db = 'db', port = 3306):
try:
self.db = mariadb.connect(
user = user,
password = password,
host = ip,
port = port,
database = db
)
self.cur = self.db.cursor()
except mariadb.Error as e:
print(e)
self.db = None
self.cur = None
def exec(self, sql, params = None):
self.cur.execute(sql)

@ -160,12 +160,14 @@ class TableInfo:
return self.get_col(col_name).cname
def add_alias(self, alias):
# TODO: Exception when alias already defined.
# TODO: Scoping of alias should be constrainted in the query.
if alias in self.cxt.tables_byname.keys():
print("Error: table alias already exists")
return
self.cxt.tables_byname[alias] = self
self.alias.add(alias)
def parse_tablenames(self, colExpr, materialize = True, raw = False):
def parse_col_names(self, colExpr, materialize = True, raw = False):
# get_col = self.get_col if materialize else self.get_col_d
parsedColExpr = colExpr.split('.')
@ -177,7 +179,7 @@ class TableInfo:
if datasource is None:
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
else:
ret = datasource.parse_tablenames(parsedColExpr[1], raw)
ret = datasource.parse_col_names(parsedColExpr[1], raw)
from engine.expr import index_expr
string = ret.reference() + index_expr
if self.groupinfo is not None and ret and ret in self.groupinfo.raw_groups:
@ -205,6 +207,23 @@ class Context:
LOG_INFO = 'INFO'
LOG_ERROR = 'ERROR'
LOG_SILENT = 'SILENT'
def new(self):
self.tmp_names = set()
self.udf_map = dict()
self.headers = set(['\"./server/libaquery.h\"'])
self.finalized = False
# read header
self.ccode = ''
self.ccodelet = ''
with open('header.cxx', 'r') as outfile:
self.ccode = outfile.read()
# datasource will be availible after `from' clause is parsed
# and will be deactivated when the `from' is out of scope
self.datasource = None
self.ds_stack = []
self.scans = []
self.removing_scan = False
def __init__(self):
self.tables:list[TableInfo] = []
self.tables_byname = dict()

@ -115,7 +115,7 @@ class expr(ast_node):
p.isvector = True
p = p.parent
self._expr, self.raw_col = self.datasource.parse_tablenames(node, self.materialize_cols, True)
self._expr, self.raw_col = self.datasource.parse_col_names(node, self.materialize_cols, True)
self.raw_col = self.raw_col if type(self.raw_col) is ColRef else None
if self.__abs and self.raw_col:
self._expr = self.raw_col.reference() + ("" if self.inside_agg else index_expr)

@ -45,12 +45,14 @@ class projection(ast_node):
print(f'from func over table{node}')
elif type(value) is str:
self.datasource = self.context.tables_byname[value]
if 'name' in value:
self.datasource.add_alias(value['name'])
if 'assumptions' in from_clause:
self.assumptions = enlist(from_clause['assumptions'])
elif type(from_clause) is str:
self.datasource = self.context.tables_byname[from_clause]
if self.datasource is None:
raise ValueError('spawn error: from clause')

@ -26,6 +26,7 @@ class Int(Types):
name = "Int"
cname = "int"
ctype_name = "types::AINT"
class Float(Types):
name = "Float"
cname = "float"

@ -1,65 +1,57 @@
#include <unordered_map>
#include "./server/libaquery.h"
#include "./server/aggregations.h"
#include "csv.h"
#include "./server/libaquery.h"
#include "./server/hasher.h"
#include <unordered_map>
extern "C" int __DLLEXPORT__ dllmain(Context* cxt) {
using namespace std;
using namespace types;
auto sale = new TableInfo<int,int>("sale", 2);
cxt->tables.insert({"sale", sale});
auto& sale_Month = *(ColRef<int> *)(&sale->colrefs[0]);
auto& sale_sales = *(ColRef<int> *)(&sale->colrefs[1]);
sale_Month.init("Month");
sale_sales.init("sales");
io::CSVReader<2> csv_reader_6ojNrU("moving_avg.csv");
csv_reader_6ojNrU.read_header(io::ignore_extra_column, "Month","sales");
int tmp_30abZdE5;
int tmp_zx6KcpzH;
while(csv_reader_6ojNrU.read_row(tmp_30abZdE5,tmp_zx6KcpzH)) {
auto test = new TableInfo<int,int,int,int>("test", 4);
cxt->tables.insert({"test", test});
auto& test_a = *(ColRef<int> *)(&test->colrefs[0]);
auto& test_b = *(ColRef<int> *)(&test->colrefs[1]);
auto& test_c = *(ColRef<int> *)(&test->colrefs[2]);
auto& test_d = *(ColRef<int> *)(&test->colrefs[3]);
test_a.init("a");
test_b.init("b");
test_c.init("c");
test_d.init("d");
io::CSVReader<4> csv_reader_307VD4("test.csv");
csv_reader_307VD4.read_header(io::ignore_extra_column, "a","b","c","d");
int tmp_3LXIYQmp;
int tmp_1m5NCKR4;
int tmp_10LZcLgy;
int tmp_39pPZL8W;
while(csv_reader_307VD4.read_row(tmp_3LXIYQmp,tmp_1m5NCKR4,tmp_10LZcLgy,tmp_39pPZL8W)) {
sale_Month.emplace_back(tmp_30abZdE5);
sale_sales.emplace_back(tmp_zx6KcpzH);
}
auto out_4oKV = new TableInfo<value_type<decays<decltype(sale_Month)>>,value_type<decays<decltype(avgw(3,sale_sales))>>>("out_4oKV", 2);
cxt->tables.insert({"out_4oKV", out_4oKV});
auto& out_4oKV_Month = *(ColRef<value_type<decays<decltype(sale_Month)>>> *)(&out_4oKV->colrefs[0]);
auto& out_4oKV_avgw3salesales = *(ColRef<value_type<decays<decltype(avgw(3,sale_sales))>>> *)(&out_4oKV->colrefs[1]);
auto order_3t9jQY = sale->order_by<0>();
out_4oKV_Month.init("Month");
out_4oKV_Month = sale_Month[*order_3t9jQY];
out_4oKV_avgw3salesales.init("avgw3salesales");
out_4oKV_avgw3salesales = avgw(3,sale_sales[*order_3t9jQY]);
print(*out_4oKV);
FILE* fp_d7p2ph = fopen("moving_avg_output.csv", "w");
out_4oKV->printall(";", "\n", nullptr, fp_d7p2ph);
fclose(fp_d7p2ph);
typedef record<decltype(sale_sales[0])> record_typexsfbsFs;
unordered_map<record_typexsfbsFs, vector_type<uint32_t>, transTypes<record_typexsfbsFs, hasher>> g5N8IBNq;
for (uint32_t i4w = 0; i4w < sale_sales.size; ++i4w){
g5N8IBNq[forward_as_tuple(sale_sales[i4w])].emplace_back(i4w);
test_a.emplace_back(tmp_3LXIYQmp);
test_b.emplace_back(tmp_1m5NCKR4);
test_c.emplace_back(tmp_10LZcLgy);
test_d.emplace_back(tmp_39pPZL8W);
}
auto out_7JGJ = new TableInfo<decays<decltype(sale_Month)>,value_type<decays<decltype(minw(2,sale_sales))>>>("out_7JGJ", 2);
cxt->tables.insert({"out_7JGJ", out_7JGJ});
auto& out_7JGJ_Month = *(ColRef<decays<decltype(sale_Month)>> *)(&out_7JGJ->colrefs[0]);
auto& out_7JGJ_minw2salesales = *(ColRef<value_type<decays<decltype(minw(2,sale_sales))>>> *)(&out_7JGJ->colrefs[1]);
out_7JGJ_Month.init("Month");
out_7JGJ_minw2salesales.init("minw2salesales");
for(auto& iVb : g5N8IBNq) {
auto &val_6xjJXey = iVb.second;
sale->order_by<-1>(&val_6xjJXey);
typedef record<decltype(test_a[0]),decltype(test_b[0]),decltype(test_d[0])> record_type3OMslKw;
unordered_map<record_type3OMslKw, vector_type<uint32_t>, transTypes<record_type3OMslKw, hasher>> g7LNVAss;
for (uint32_t i1T = 0; i1T < test_a.size; ++i1T){
g7LNVAss[forward_as_tuple(test_a[i1T],test_b[i1T],test_d[i1T])].emplace_back(i1T);
}
for(auto& i5G : g5N8IBNq) {
auto &key_1e9JJOf = i5G.first;
auto &val_6g6wlkk = i5G.second;
out_7JGJ_Month.emplace_back(sale_Month[val_6g6wlkk]);
out_7JGJ_minw2salesales.emplace_back(minw(2,get<0>(key_1e9JJOf)));
auto out_HSfK = new TableInfo<decays<decltype(sum(test_c))>,value_type<decays<decltype(test_b)>>,value_type<decays<decltype(test_d)>>>("out_HSfK", 3);
cxt->tables.insert({"out_HSfK", out_HSfK});
auto& out_HSfK_sumtestc = *(ColRef<decays<decltype(sum(test_c))>> *)(&out_HSfK->colrefs[0]);
auto& out_HSfK_b = *(ColRef<value_type<decays<decltype(test_b)>>> *)(&out_HSfK->colrefs[1]);
auto& out_HSfK_d = *(ColRef<value_type<decays<decltype(test_d)>>> *)(&out_HSfK->colrefs[2]);
out_HSfK_sumtestc.init("sumtestc");
out_HSfK_b.init("b");
out_HSfK_d.init("d");
for(auto& i18 : g7LNVAss) {
auto &key_3s5slnK = i18.first;
auto &val_2nNLv0D = i18.second;
out_HSfK_sumtestc.emplace_back(sum(test_c[val_2nNLv0D]));
out_HSfK_b.emplace_back(get<1>(key_3s5slnK));
out_HSfK_d.emplace_back(get<2>(key_3s5slnK));
}
print(*out_7JGJ);
FILE* fp_1yhzJM = fopen("flatten.csv", "w");
out_7JGJ->printall(",", "\n", nullptr, fp_1yhzJM);
fclose(fp_1yhzJM);
auto d5b7C95U = out_HSfK->order_by_view<-3,1>();
print(d5b7C95U);
return 0;
}

@ -1,10 +1,11 @@
from concurrent.futures import thread
import re
import time
import dbconn
from mo_parsing import ParseException
import aquery_parser as parser
import engine
import reconstruct as xengine
import subprocess
import mmap
import sys
@ -15,8 +16,10 @@ try:
os.remove('server.bin')
except Exception as e:
print(type(e), e)
nullstream = open(os.devnull, 'w')
subprocess.call(['make', 'server.bin'])
subprocess.call(['make', 'server.bin'], stdout=nullstream)
cleanup = True
def rm():
@ -40,7 +43,8 @@ def rm():
os.remove(f)
mm.close()
cleanup = False
nullstream.close()
atexit.register(rm)
def init():
@ -77,7 +81,6 @@ q = 'SELECT p.Name, v.Name FROM Production.Product p JOIN Purchasing.ProductVend
res = parser.parse(q)
print(res)
# else:f
# if subprocess.call(['make', 'snippet']) == 0:
@ -91,16 +94,25 @@ print(res)
# handle.close()
# os.remove(shm)
# exit()
keep = False
keep = True
cxt = engine.initialize()
cxt.Info(res)
while test_parser:
try:
if server.poll() is not None:
init()
print("> ", end="")
ready = 1
while ready == 1:
mm.seek(0,os.SEEK_SET)
ready = mm.read(2)[1]
time.sleep(.00001)
q = input().lower()
if q == 'exec':
if not keep or cxt is None:
cxt = engine.initialize()
else:
cxt.new()
stmts_stmts = stmts['stmts']
if type(stmts_stmts) is list:
for s in stmts_stmts:
@ -110,10 +122,21 @@ while test_parser:
cxt.Info(cxt.ccode)
with open('out.cpp', 'wb') as outfile:
outfile.write((cxt.finalize()).encode('utf-8'))
if subprocess.call(['make', 'snippet']) == 0:
if subprocess.call(['make', 'snippet'], stdout = nullstream) == 0:
mm.seek(0,os.SEEK_SET)
mm.write(b'\x01\x01')
continue
if q == 'xexec':
cxt = xengine.initialize()
stmts_stmts = stmts['stmts']
if type(stmts_stmts) is list:
for s in stmts_stmts:
xengine.generate(s, cxt)
else:
xengine.generate(stmts_stmts, cxt)
print(cxt.sql)
continue
elif q.startswith('log'):
qs = re.split(' |\t', q)
if len(qs) > 1:
@ -159,8 +182,17 @@ while test_parser:
continue
stmts = parser.parse(q)
cxt.Info(stmts)
except (ValueError, FileNotFoundError, ParseException) as e:
except ParseException as e:
print(e)
continue
except (ValueError, FileNotFoundError) as e:
# rm()
# init()
print(e)
except (KeyboardInterrupt):
break
except (Exception) as e:
rm()
cxt.Error(type(e), e)
raise e
rm()
rm()

@ -0,0 +1,11 @@
from reconstruct.ast import Context, ast_node
def initialize():
return Context()
def generate(ast, cxt):
for k in ast.keys():
if k in ast_node.types.keys():
ast_node.types[k](None, ast, cxt)
__all__ = ["initialize", "generate"]

@ -0,0 +1,303 @@
from engine.utils import enlist, base62uuid, base62alp
from reconstruct.storage import Context, TableInfo, ColRef
class ast_node:
header = []
types = dict()
first_order = False
def __init__(self, parent:"ast_node", node, context:Context = None):
self.context = parent.context if context is None else context
self.parent = parent
self.sql = ''
self.datasource = None
self.init(node)
self.produce(node)
self.spawn(node)
self.consume(node)
def emit(self, code):
self.context.emit(code)
def add(self, code):
self.sql += code + ' '
name = 'null'
def init(self, _):
self.add(self.__class__.name.upper())
def produce(self, _):
pass
def spawn(self, _):
pass
def consume(self, _):
if self.parent is None:
self.emit(self.sql+';\n')
from reconstruct.expr import expr
class projection(ast_node):
name = 'projection'
first_order = 'select'
def init(self, _):
pass
def produce(self, node):
p = node['select']
self.projections = p if type(p) is list else [p]
self.add('SELECT')
def spawn(self, node):
self.datasource = None # datasource is Join instead of TableInfo
if 'from' in node:
from_clause = node['from']
self.datasource = join(self, from_clause)
if 'assumptions' in from_clause:
self.assumptions = enlist(from_clause['assumptions'])
if self.datasource is not None:
self.datasource_changed = True
self.prev_datasource = self.context.datasource
self.context.datasource = self.datasource
if 'where' in node:
self.where = filter(self, node['where'])
else:
self.where = None
if 'groupby' in node:
self.group_node = groupby(self, node['groupby'])
else:
self.group_node = None
def consume(self, node):
# deal with projections
self.out_table = TableInfo('out_'+base62uuid(4), [], self.context)
cols = []
col_exprs = []
for i, proj in enumerate(self.projections):
compound = False
self.datasource.rec = set()
name = ''
if type(proj) is dict:
if 'value' in proj:
e = proj['value']
name = expr(self, e).sql
disp_name = ''.join([a if a in base62alp else '' for a in name])
compound = True # compound column
if 'name' in proj: # renaming column by AS keyword
name += ' ' + proj['name']
col_exprs.append(name)
elif type(proj) is str:
col = self.datasource.get_col(proj)
name = col.name
self.datasource.rec = None
# TODO: Type deduction in Python
cols.append(ColRef('unknown', self.out_table, None, disp_name, i, compound=compound))
self.add(', '.join(col_exprs))
def finialize(astnode:ast_node):
if(astnode is not None):
self.add(astnode.sql)
self.add('FROM')
finialize(self.datasource)
finialize(self.where)
finialize(self.group_node)
if 'orderby' in node:
self.add(orderby(self, node['orderby']).sql)
if 'outfile' in node:
self.add(outfile(self, node['outfile']).sql)
if self.parent is None:
self.emit(self.sql+';\n')
else:
# TODO: subquery, name create tmp-table from subquery w/ alias as name
pass
class orderby(ast_node):
name = 'order by'
def produce(self, node):
if node is None:
self.sql = ''
return
elif type(node) is not list:
node = [node]
o_list = []
for o in node:
o_str = expr(self, o['value']).sql
if 'sort' in o and f'{o["sort"]}'.lower() == 'desc':
o_str += ' ' + 'DESC'
o_list.append(o_str)
self.add(', '.join(o_list))
class groupby(orderby):
name = 'group by'
class join(ast_node):
name = 'join'
def init(self, _):
self.joins:list = []
self.tables = []
self.tables_dir = dict()
# self.tmp_name = 'join_' + base62uuid(4)
# self.datasource = TableInfo(self.tmp_name, [], self.context)
def append(self, tbls, __alias = ''):
alias = lambda t : '(' + t + ') ' + __alias if len(__alias) else t
if type(tbls) is join:
self.joins.append(alias(tbls.__str__()))
self.tables += tbls.tables
self.tables_dir = {**self.tables_dir, **tbls.tables_dir}
elif type(tbls) is TableInfo:
self.joins.append(alias(tbls.table_name))
self.tables.append(tbls)
self.tables_dir[tbls.table_name] = tbls
for a in tbls.alias:
self.tables_dir[a] = tbls
elif type(tbls) is projection:
self.joins.append(alias(tbls.finalize()))
def produce(self, node):
if type(node) is list:
for d in node:
self.append(join(self, d).__str__())
elif type(node) is dict:
alias = ''
if 'value' in node:
table_name = node['value']
tbl = None
if 'name' in node:
alias = node['name']
if type(table_name) is dict:
if 'select' in table_name:
# TODO: subquery, create and register TableInfo in projection
tbl = projection(self, table_name).finalize()
else:
tbl = self.context.tables_byname[table_name]
if 'name' in node:
tbl.add_alias(node['name'])
self.append(tbl, alias)
else:
keys = node.keys()
if keys[0].lower().endswith('join'):
j = join(self, node[keys[0]])
tablename = f' {keys[0]} {j}'
if keys[1].lower() == 'on':
tablename += f' on {expr(self, node[keys[1]])}'
self.joins.append(tablename)
self.tables += j.tables
self.tables_dir = {**self.tables_dir, **j.tables_dir}
elif type(node) is str:
self.append(self.context.tables_byname[node])
def get_cols(self, colExpr: str) -> ColRef:
for t in self.tables:
if colExpr in t.columns_byname:
return t.columns_byname[colExpr]
def parse_col_names(self, colExpr:str) -> ColRef:
parsedColExpr = colExpr.split('.')
if len(parsedColExpr) <= 1:
return self.get_cols(colExpr)
else:
datasource = self.tables_dir[parsedColExpr[0]]
if datasource is None:
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
else:
return datasource.parse_col_names(parsedColExpr[1])
def consume(self, _):
self.sql = ', '.join(self.joins)
return super().consume(_)
def __str__(self):
return ', '.join(self.joins)
def __repr__(self):
return self.__str__()
class filter(ast_node):
name = 'where'
def produce(self, node):
self.add(expr(self, node).sql)
class create_table(ast_node):
name = 'create_table'
first_order = name
def init(self, node):
self.sql = 'CREATE TABLE '
def produce(self, node):
ct = node[self.name]
tbl = self.context.add_table(ct['name'], ct['columns'])
self.sql = f'CREATE TABLE {tbl.table_name}('
columns = []
for c in tbl.columns:
columns.append(f'{c.name} {c.type.upper()}')
self.sql += ', '.join(columns)
self.sql += ')'
class insert(ast_node):
name = 'insert'
first_order = name
def produce(self, node):
values = node['query']['select']
tbl = node['insert']
self.sql = f'INSERT INTO {tbl} VALUES('
# if len(values) != table.n_cols:
# raise ValueError("Column Mismatch")
list_values = []
for i, s in enumerate(values):
if 'value' in s:
list_values.append(f"{s['value']}")
else:
# subquery, dispatch to select astnode
pass
self.sql += ', '.join(list_values) + ')'
class load(ast_node):
name="load"
first_order = name
def produce(self, node):
node = node['load']
s1 = 'LOAD DATA INFILE '
s2 = 'INTO TABLE '
s3 = 'FIELDS TERMINATED BY '
self.sql = f'{s1} \"{node["file"]["literal"]}\" {s2} {node["table"]}'
if 'term' in node:
self.sql += f' {s3} \"{node["term"]["literal"]}\"'
class outfile(ast_node):
name="_outfile"
def produce(self, node):
filename = node['loc']['literal'] if 'loc' in node else node['literal']
self.sql = f'INTO OUTFILE "{filename}"'
if 'term' in node:
self.sql += f' FIELDS TERMINATED BY \"{node["term"]["literal"]}\"'
def include(objs):
import inspect
for _, cls in inspect.getmembers(objs):
if inspect.isclass(cls) and issubclass(cls, ast_node) and type(cls.first_order) is str:
ast_node.types[cls.first_order] = cls
import sys
include(sys.modules[__name__])

@ -0,0 +1,128 @@
from reconstruct.ast import ast_node
from reconstruct.storage import ColRef, TableInfo
class expr(ast_node):
name='expr'
builtin_func_maps = {
'max': 'MAX',
'min': 'MIN',
'avg': 'AVG',
'sum': 'SUM',
'count' : 'COUNT',
'mins': ['mins', 'minw'],
'maxs': ['maxs', 'maxw'],
'avgs': ['avgs', 'avgw'],
'sums': ['sums', 'sumw'],
}
binary_ops = {
'sub':'-',
'add':'+',
'mul':'*',
'div':'/',
'mod':'%',
'and':' AND ',
'or':' OR ',
'xor' : ' XOR ',
'gt':'>',
'lt':'<',
'le':'<=',
'gt':'>='
}
compound_ops = {
}
unary_ops = {
'neg' : '-',
'not' : ' NOT '
}
coumpound_generating_ops = ['avgs', 'mins', 'maxs', 'sums'] + \
list(binary_ops.keys()) + list(compound_ops.keys()) + list(unary_ops.keys() )
def __init__(self, parent, node):
self.raw_col = None
self.inside_agg = False
if(type(parent) is expr):
self.inside_agg = parent.inside_agg
ast_node.__init__(self, parent, node, None)
def init(self, _):
from engine.projection import projection
parent = self.parent
self.isvector = parent.isvector if type(parent) is expr else False
self.is_compound = parent.is_compound if type(parent) is expr else False
if type(parent) in [projection, expr]:
self.datasource = parent.datasource
else:
self.datasource = self.context.datasource
self.udf_map = parent.context.udf_map
self.func_maps = {**self.udf_map, **self.builtin_func_maps}
def produce(self, node):
if type(node) is dict:
for key, val in node.items():
if key in self.func_maps:
# TODO: distinguish between UDF agg functions and other UDF functions.
self.inside_agg = True
if type(val) is list and len(val) > 1:
cfunc = self.func_maps[key]
cfunc = cfunc[len(val) - 1] if type(cfunc) is list else cfunc
self.sql += f"{cfunc}("
for i, p in enumerate(val):
self.sql += expr(self, p).sql + (',' if i < len(val) - 1 else '')
else:
funcname = self.func_maps[key]
funcname = funcname[0] if type(funcname) is list else funcname
self.sql += f"{funcname}("
self.sql += expr(self, val).sql
self.sql += ')'
self.inside_agg = False
elif key in self.binary_ops:
l = expr(self, val[0]).sql
r = expr(self, val[1]).sql
self.sql += f'({l}{self.binary_ops[key]}{r})'
elif key in self.compound_ops:
x = []
if type(val) is list:
for v in val:
x.append(expr(self, v).sql)
self.sql = self.compound_ops[key][1](x)
elif key in self.unary_ops:
self.sql += f'{self.unary_ops[key]}({expr(self, val).sql})'
else:
print(f'Undefined expr: {key}{val}')
if key in self.coumpound_generating_ops and not self.is_compound:
self.is_compound = True
p = self.parent
while type(p) is expr and not p.is_compound:
p.is_compound = True
p = p.parent
elif type(node) is str:
p = self.parent
while type(p) is expr and not p.isvector:
p.isvector = True
p = p.parent
self.raw_col = self.datasource.parse_col_names(node)
self.raw_col = self.raw_col if type(self.raw_col) is ColRef else None
if self.raw_col is not None:
self.sql = self.raw_col.name
else:
self.sql = node
elif type(node) is bool:
self.sql = '1' if node else '0'
else:
self.sql = f'{node}'
def __str__(self):
return self.sql
def __repr__(self):
return self.__str__()

@ -0,0 +1,89 @@
class ColRef:
def __init__(self, _ty, cobj, table:'TableInfo', name, id, compound = False):
self.type = _ty
self.cobj = cobj
self.table = table
self.name = name
self.alias = set()
self.id = id # position in table
self.compound = compound # compound field (list as a field)
# e.g. order by, group by, filter by expressions
self.__arr__ = (_ty, cobj, table, name, id)
def __getitem__(self, key):
if type(key) is str:
return getattr(self, key)
else:
return self.__arr__[key]
def __setitem__(self, key, value):
self.__arr__[key] = value
class TableInfo:
def __init__(self, table_name, cols, cxt:'Context'):
# statics
self.table_name = table_name
self.alias = set([table_name])
self.columns_byname = dict() # column_name, type
self.columns = []
self.cxt = cxt
# keep track of temp vars
self.rec = None
self.add_cols(cols)
# runtime
self.order = [] # assumptions
cxt.tables_byname[self.table_name] = self # construct reverse map
def add_cols(self, cols, new = True):
for i, c in enumerate(cols):
self.add_col(c, new, i)
def add_col(self, c, new = True, i = 0):
_ty = c['type']
if new:
_ty = _ty if type(c) is ColRef else list(_ty.keys())[0]
col_object = ColRef(_ty, c, self, c['name'], len(self.columns))
else:
col_object = c
c.table = self
self.columns_byname[c['name']] = col_object
self.columns.append(col_object)
def add_alias(self, alias):
if alias in self.cxt.tables_byname.keys():
print("Error: table alias already exists")
return
self.cxt.tables_byname[alias] = self
self.alias.add(alias)
def parse_col_names(self, colExpr) -> ColRef:
parsedColExpr = colExpr.split('.')
if len(parsedColExpr) <= 1:
return self.columns_byname[colExpr]
else:
datasource = self.cxt.tables_byname[parsedColExpr[0]]
if datasource is None:
raise ValueError(f'Table name/alias not defined{parsedColExpr[0]}')
else:
return datasource.parse_col_names(parsedColExpr[1])
class Context:
def __init__(self):
self.sql = ''
self.tables_byname = dict()
self.col_byname = dict()
self.tables = []
self.cols = []
self.datasource = None
self.udf_map = dict()
def emit(self, sql:str):
self.sql += sql + ' '
def add_table(self, table_name, cols):
tbl = TableInfo(table_name, cols, self)
self.tables.append(tbl)
return tbl

@ -0,0 +1,27 @@
{
"BackgroundImageAbsolutePath": "c:\\users\\bill\\appdata\\local\\microsoft\\visualstudio\\17.0_03c65567\\extensions\\atkxhose.05t\\Images\\background.png",
"BackgroundImagesDirectoryAbsolutePath": "c:\\users\\bill\\appdata\\local\\microsoft\\visualstudio\\17.0_03c65567\\extensions\\atkxhose.05t\\Images",
"ExpandToIDE": false,
"Extensions": ".png, .jpg, .gif, .bmp",
"ImageBackgroundType": 0,
"ImageFadeAnimationInterval": "PT5S",
"ImageStretch": 0,
"IsLimitToMainlyEditorWindow": false,
"LoopSlideshow": true,
"MaxHeight": 0,
"MaxWidth": 0,
"Opacity": 0.35,
"PositionHorizon": 1,
"PositionVertical": 1,
"ShuffleSlideshow": false,
"SoftEdgeX": 0,
"SoftEdgeY": 0,
"TileMode": 0,
"UpdateImageInterval": "PT1M",
"ViewBoxPointX": 0,
"ViewBoxPointY": 0,
"ViewPortHeight": 1,
"ViewPortPointX": 0,
"ViewPortPointY": 0,
"ViewPortWidth": 1
}

@ -59,11 +59,11 @@ int main(int argc, char** argv) {
cxt->log("running: %s\n", running? "true":"false");
cxt->log("ready: %s\n", ready? "true":"false");
void* handle = dlopen("./dll.so", RTLD_LAZY);
cxt->log("handle: %x\n", handle);
cxt->log("handle: %lx\n", handle);
if (handle) {
cxt->log("inner\n");
code_snippet c = reinterpret_cast<code_snippet>(dlsym(handle, "dllmain"));
cxt->log("routine: %x\n", c);
cxt->log("routine: %lx\n", c);
if (c) {
cxt->log("inner\n");
cxt->err("return: %d\n", c(cxt));
@ -79,7 +79,6 @@ int main(int argc, char** argv) {
#include "utils.h"
int _main()
{
//vector_type<int> t;
//t = 1;
//t.emplace_back(2);
@ -89,11 +88,11 @@ int _main()
cxt->log_level = LOG_INFO;
puts(cpp_17 ?"true":"false");
void* handle = dlopen("dll.so", RTLD_LAZY);
printf("handle: %x\n", handle);
cxt->log("handle: %llx\n", handle);
if (handle) {
cxt->log("inner\n");
code_snippet c = reinterpret_cast<code_snippet>(dlsym(handle, "dllmain"));
printf("routine: %x\n", c);
cxt->log("routine: %llx\n", c);
if (c) {
cxt->log("inner\n");
cxt->log("return: %d\n", c(cxt));

@ -44,7 +44,7 @@
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v141</PlatformToolset>
<PlatformToolset>v143</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
<EnableASAN>false</EnableASAN>
</PropertyGroup>

@ -274,5 +274,5 @@ public:
}
};
#pragma pack(pop)
#endif

Loading…
Cancel
Save