bill sun 3 years ago
parent d4d11286d5
commit 15b124e2e6

@ -92,7 +92,7 @@ class TableInfo:
type_tags = type_tags[:-1]
type_tags += '>'
self.cxt.emit(f'auto& {base_name} = *(TableInfo{type_tags} *)(cxt->tables[{self.table_name}]);')
self.cxt.emit(f'auto& {base_name} = *(TableInfo{type_tags} *)(cxt->tables["{self.table_name}"]);')
return self.cxt_name
def refer_all(self):
self.reference()

@ -1,13 +1,15 @@
# code-gen for data decl languages
from engine.orderby import orderby
from engine.ast import ColRef, TableInfo, ast_node, Context, include
from engine.scan import scan
from engine.utils import base62uuid
class create_table(ast_node):
name = 'create_table'
def __init__(self, parent: "ast_node", node, context: Context = None, cexprs = None):
def __init__(self, parent: "ast_node", node, context: Context = None, cexprs = None, lineage = False):
self.cexprs = cexprs
self.lineage = lineage
super().__init__(parent, node, context)
def produce(self, node):
if type(node) is not TableInfo:
@ -29,12 +31,28 @@ class create_table(ast_node):
self.emit(f"{c.cxt_name}.init();")
# create an output table
else:
# 1 to 1 lineage.
if len(self.context.scans) == 0:
if self.lineage:
order = 'order_' + base62uuid(6)
self.emit(f'auto {order} = {self.parent.datasource.cxt_name}->order_by<{orderby(self.parent, self.parent.assumptions).result()}>();')
self.lineage = '*' + order
else:
self.lineage = None
for i, c in enumerate(tbl.columns):
self.emit(f"{c.cxt_name}.init();")
self.emit(f"{c.cxt_name} = {self.cexprs[i]()};")
self.emit(f"{c.cxt_name} = {self.cexprs[i](self.lineage)};")
self.lineage = None
self.parent.assumptions = None
else:
scanner:scan = self.context.scans[-1]
if self.lineage:
lineage_var = 'lineage_' + base62uuid(6)
counter_var = 'counter_' + base62uuid(6)
scanner.add(f'auto {lineage_var} = {self.datasource.cxt_name}->bind({tbl.cxt_name});', "init")
scanner.add(f'auto {counter_var} = 0;', "init")
scanner.add(f"{lineage_var}.emplace_back({counter_var}++);", "front")
self.lineage = f"{lineage_var}.rid"
for i, c in enumerate(tbl.columns):
scanner.add(f"{c.cxt_name}.init();", "init")
scanner.add(f"{c.cxt_name} = {self.cexprs[i](scanner.it_ver)};")

@ -1,8 +1,9 @@
from engine.ast import TableInfo, ast_node
from engine.ast import ColRef, TableInfo, ast_node
from engine.orderby import assumption
from engine.scan import scan
from engine.utils import base62uuid
from engine.expr import expr
import engine.types
class groupby(ast_node):
name = '_groupby'
def init(self, _):
@ -24,7 +25,8 @@ class groupby(ast_node):
for i, g in enumerate(node):
v = g['value']
e = expr(self, v)
self.raw_groups.append(e.raw_col)
if type(e.raw_col) is ColRef:
self.raw_groups.append(e.raw_col)
e = e._expr
# if v is compound expr, create tmp cols
if type(v) is not str:
@ -49,6 +51,13 @@ class groupby(ast_node):
self.datasource.rec = None
self.scanner.finalize()
def deal_with_assumptions(self, assumption:assumption, out:TableInfo):
gscanner = scan(self, self.group)
val_var = 'val_'+base62uuid(7)
gscanner.add(f'auto &{val_var} = {gscanner.it_ver}.second;')
gscanner.add(f'{out.cxt_name}->order_by<{assumption.result()}>(&{val_var});')
gscanner.finalize()
def finalize(self, cexprs, out:TableInfo):
gscanner = scan(self, self.group)
key_var = 'key_'+base62uuid(7)
@ -59,3 +68,5 @@ class groupby(ast_node):
gscanner.add(';\n'.join([f'{out.columns[i].reference()}.emplace_back({ce(x=val_var, y=key_var)})' for i, ce in enumerate(cexprs)])+';')
gscanner.finalize()
self.datasource.groupinfo = None

@ -35,7 +35,31 @@ class orderby(ast_node):
for n in node:
order = not ('sort' in n and n['sort'] == 'desc')
col_id = self.datasource.columns_byname[n['value']].id
self.col_list.append(col_id if order else -col_id-1)
self.order.append(order_item(n['value'], self, order))
col_id = col_id if order else -col_id-1
if col_id not in self.col_list:
self.col_list.append(col_id)
self.order.append(order_item(n['value'], self, order))
def merge(self, node):
self.produce(node)
def finialize(self, references):
self.order = [ o for o in self.order if o.name in references ]
def result(self, sep:str = ','):
return sep.join([f"{c}" for c in self.col_list])
class assumption(orderby):
name = '_assumption'
def __init__(self, parent: "ast_node", node, context: Context = None, exclude = []):
self.exclude = exclude
super().__init__(parent, node, context)
def produce(self, node):
if type(node) is not list:
node = [node]
[n for n in node if n not in self.exclude]
return super().produce(node)
def empty(self):
return len(self.col_list) == 0

@ -2,7 +2,7 @@ from engine.ast import ColRef, TableInfo, ast_node, Context, include
from engine.groupby import groupby
from engine.join import join
from engine.expr import expr
from engine.orderby import orderby
from engine.orderby import assumption, orderby
from engine.scan import filter
from engine.utils import base62uuid, enlist, base62alp, has_other
from engine.ddl import create_table, outfile
@ -14,7 +14,7 @@ class projection(ast_node):
self.disp = disp
self.outname = outname
self.group_node = None
self.assumption = None
self.assumptions = None
self.where = None
ast_node.__init__(self, parent, node, context)
def init(self, _):
@ -46,7 +46,7 @@ class projection(ast_node):
elif type(value) is str:
self.datasource = self.context.tables_byname[value]
if 'assumptions' in from_clause:
self.assumption = enlist(from_clause['assumptions'])
self.assumptions = enlist(from_clause['assumptions'])
elif type(from_clause) is str:
self.datasource = self.context.tables_byname[from_clause]
@ -61,7 +61,7 @@ class projection(ast_node):
if 'where' in node:
self.where = filter(self, node['where'], True)
# self.datasource = filter(self, node['where'], True).output
#self.context.datasource = self.datasource
# self.context.datasource = self.datasource
if 'groupby' in node:
self.group_node = groupby(self, node['groupby'])
@ -73,10 +73,7 @@ class projection(ast_node):
def consume(self, node):
self.inv = True
disp_varname = 'd'+base62uuid(7)
has_groupby = False
if self.group_node is not None:
# There is group by;
has_groupby = True
has_groupby = self.group_node is not None
cexprs = []
flatten = False
cols = []
@ -85,6 +82,7 @@ class projection(ast_node):
flatten = True
new_names = []
proj_raw_cols = []
for i, proj in enumerate(self.projections):
cname = ''
compound = False
@ -92,7 +90,10 @@ class projection(ast_node):
if type(proj) is dict:
if 'value' in proj:
e = proj['value']
sname = expr(self, e)._expr
sname = expr(self, e)
if type(sname.raw_col) is ColRef:
proj_raw_cols.append(sname.raw_col)
sname = sname._expr
fname = expr.toCExpr(sname) # fastest access method at innermost context
absname = expr(self, e, abs_col=True)._expr # absolute name at function scope
# TODO: Make it single pass here.
@ -118,26 +119,50 @@ class projection(ast_node):
self.out_table.add_cols(cols, False)
lineage = None
if has_groupby:
create_table(self, self.out_table) # creates empty out_table.
if self.assumptions is not None:
self.assumptions = assumption(self, self.assumptions, exclude=self.group_node.raw_groups)
if not self.assumptions.empty():
self.group_node.deal_with_assumptions(self.assumptions, self.out_table)
self.assumptions = None
self.group_node.finalize(cexprs, self.out_table)
else:
create_table(self, self.out_table, cexprs = cexprs) # create and populate out_table.
self.datasource.group_node = None
# if all assumptions in projections, treat as orderby
lineage = self.assumptions is not None and has_other(self.assumptions, proj_raw_cols)
spawn = create_table(self, self.out_table, cexprs = cexprs, lineage = lineage) # create and populate out_table.
if lineage and type(spawn.lineage) is str:
lineage = spawn.lineage
self.assumptions = orderby(self, self.assumptions) # do not exclude proj_raw_cols
else:
lineage = None
if self.where is not None:
self.where.finalize()
has_orderby = 'orderby' in node
if has_orderby:
if type(lineage) is str:
order = 'order_' + base62uuid(6)
self.emit(f'auto {order} = {self.datasource.cxt_name}->order_by<{self.assumptions.result()}>({lineage});')
self.emit(f'{self.out_table.cxt_name}->materialize(*{order});')
self.assumptions = None
if self.assumptions is not None:
orderby_node = orderby(self, self.assumptions)
else:
orderby_node = None
if 'orderby' in node:
self.datasource = self.out_table
self.context.datasource = self.out_table # discard current ds
orderby_node = orderby(self, node['orderby'])
self.emit(f'auto {disp_varname} = {self.out_table.reference()}->order_by_view<{",".join([f"{c}" for c in orderby_node.col_list])}>();')
orderbys = node['orderby']
orderby_node = orderby(self, orderbys) if orderby_node is None else orderby_node.merge(orderbys)
if orderby_node is not None:
self.emit(f'auto {disp_varname} = {self.out_table.reference()}->order_by_view<{orderby_node.result()}>();')
else:
disp_varname = f'*{self.out_table.cxt_name}'
if self.disp:
self.emit(f'print({disp_varname});')

@ -5,14 +5,16 @@ from engine.expr import expr
class scan(ast_node):
name = 'scan'
def __init__(self, parent: "ast_node", node, size = None, context: Context = None):
def __init__(self, parent: "ast_node", node, size = None, context: Context = None, const = False):
self.type = type
self.size = size
self.const = "const " if const else ""
super().__init__(parent, node, context)
def init(self, _):
self.datasource = self.context.datasource
self.initializers = ''
self.start = ''
self.front = ''
self.body = ''
self.end = '}'
self.mode = None
@ -27,13 +29,13 @@ class scan(ast_node):
self.colref = node
if self.size is None:
self.mode = ["col", node.table]
self.start += f'for (auto& {self.it_ver} : {node.reference()}) {{\n'
self.start += f'for ({self.const}auto& {self.it_ver} : {node.reference()}) {{\n'
else:
self.mode = ["idx", node.table]
self.start += f"for (uint32_t {self.it_ver} = 0; {self.it_ver} < {node.reference()}.size; ++{self.it_ver}){{\\n"
elif type(node) is str:
self.mode = ["idx", None]
self.start+= f'for(auto& {self.it_ver} : {node}) {{\n'
self.start+= f'for({self.const}auto& {self.it_ver} : {node}) {{\n'
else:
self.mode = ["idx", node] # Node is the TableInfo
self.start += f"for (uint32_t {self.it_ver} = 0; {self.it_ver} < {self.size}; ++{self.it_ver}){{\n"
@ -41,13 +43,16 @@ class scan(ast_node):
def add(self, stmt, position = "body"):
if position == "body":
self.body += stmt + '\n'
else:
elif position == "init":
self.initializers += stmt + '\n'
else:
self.front += stmt + '\n'
def finalize(self):
for f in self.filters:
self.start += f
self.end += '}'
self.context.remove_scan(self, self.initializers + self.start + self.body + self.end)
self.context.remove_scan(self, self.initializers + self.start + self.front + self.body + self.end)
class filter(ast_node):
name = 'filter'

@ -6,7 +6,7 @@ FIELDS TERMINATED BY "\t"
SELECT Month,avgs(3,sales)
FROM sale
-- ASSUMING ASC Month
ASSUMING ASC Month
INTO OUTFILE "moving_avg_output.csv"
FIELDS TERMINATED BY ";"

@ -1,60 +1,61 @@
#include <unordered_map>
#include "./server/libaquery.h"
#include "./server/hasher.h"
#include "./server/aggregations.h"
#include "csv.h"
#include "./server/libaquery.h"
#include <unordered_map>
extern "C" int __DLLEXPORT__ dllmain(Context* cxt) {
using namespace std;
using namespace types;
auto test = new TableInfo<int,int,int,int>("test", 4);
cxt->tables.insert({"test", test});
auto& test_a = *(ColRef<int> *)(&test->colrefs[0]);
auto& test_b = *(ColRef<int> *)(&test->colrefs[1]);
auto& test_c = *(ColRef<int> *)(&test->colrefs[2]);
auto& test_d = *(ColRef<int> *)(&test->colrefs[3]);
test_a.init();
test_b.init();
test_c.init();
test_d.init();
io::CSVReader<4> csv_reader_4bTMJ9("test.csv");
csv_reader_4bTMJ9.read_header(io::ignore_extra_column, "a","b","c","d");
int tmp_78E1nhZJ;
int tmp_4wnHGd9t;
int tmp_5OL9GlRp;
int tmp_155GVQC6;
while(csv_reader_4bTMJ9.read_row(tmp_78E1nhZJ,tmp_4wnHGd9t,tmp_5OL9GlRp,tmp_155GVQC6)) {
auto sale = new TableInfo<int,int>("sale", 2);
cxt->tables.insert({"sale", sale});
auto& sale_Month = *(ColRef<int> *)(&sale->colrefs[0]);
auto& sale_sales = *(ColRef<int> *)(&sale->colrefs[1]);
sale_Month.init();
sale_sales.init();
io::CSVReader<2> csv_reader_6T89Ll("moving_avg.csv");
csv_reader_6T89Ll.read_header(io::ignore_extra_column, "Month","sales");
int tmp_5vttJ2yV;
int tmp_2ckq15YU;
while(csv_reader_6T89Ll.read_row(tmp_5vttJ2yV,tmp_2ckq15YU)) {
test_a.emplace_back(tmp_78E1nhZJ);
test_b.emplace_back(tmp_4wnHGd9t);
test_c.emplace_back(tmp_5OL9GlRp);
test_d.emplace_back(tmp_155GVQC6);
sale_Month.emplace_back(tmp_5vttJ2yV);
sale_sales.emplace_back(tmp_2ckq15YU);
}
typedef record<decltype(test_a[0]),decltype(test_b[0]),decltype(test_d[0])> record_type6jn8Y49;
unordered_map<record_type6jn8Y49, vector_type<uint32_t>, transTypes<record_type6jn8Y49, hasher>> g5gn6KEb;
for (uint32_t i3V = 0; i3V < test_a.size; ++i3V){
g5gn6KEb[forward_as_tuple(test_a[i3V],test_b[i3V],test_d[i3V])].emplace_back(i3V);
auto out_2UiD = new TableInfo<value_type<decays<decltype(sale_Month)>>,value_type<decays<decltype(avgw(3,sale_sales))>>>("out_2UiD", 2);
cxt->tables.insert({"out_2UiD", out_2UiD});
auto& out_2UiD_Month = *(ColRef<value_type<decays<decltype(sale_Month)>>> *)(&out_2UiD->colrefs[0]);
auto& out_2UiD_avgw3salesales = *(ColRef<value_type<decays<decltype(avgw(3,sale_sales))>>> *)(&out_2UiD->colrefs[1]);
auto order_1NNZ9F = sale->order_by<0>();
out_2UiD_Month.init();
out_2UiD_Month = sale_Month[*order_1NNZ9F];
out_2UiD_avgw3salesales.init();
out_2UiD_avgw3salesales = avgw(3,sale_sales[*order_1NNZ9F]);
print(*out_2UiD);
FILE* fp_6xIJn4 = fopen("moving_avg_output.csv", "w");
out_2UiD->printall(";", "\n", nullptr, fp_6xIJn4);
fclose(fp_6xIJn4);
typedef record<decltype(sale_sales[0])> record_type6Lepq5T;
unordered_map<record_type6Lepq5T, vector_type<uint32_t>, transTypes<record_type6Lepq5T, hasher>> g4loWjmn;
for (uint32_t i5g = 0; i5g < sale_sales.size; ++i5g){
g4loWjmn[forward_as_tuple(sale_sales[i5g])].emplace_back(i5g);
}
auto out_4DCN = new TableInfo<decays<decltype(sum(test_c))>,value_type<decays<decltype(test_b)>>,value_type<decays<decltype(test_d)>>>("out_4DCN", 3);
cxt->tables.insert({"out_4DCN", out_4DCN});
auto& out_4DCN_sumtestc = *(ColRef<decays<decltype(sum(test_c))>> *)(&out_4DCN->colrefs[0]);
auto& out_4DCN_b = *(ColRef<value_type<decays<decltype(test_b)>>> *)(&out_4DCN->colrefs[1]);
auto& out_4DCN_d = *(ColRef<value_type<decays<decltype(test_d)>>> *)(&out_4DCN->colrefs[2]);
auto lineage = test->bind(out_4DCN);
out_4DCN_sumtestc.init();
out_4DCN_b.init();
out_4DCN_d.init();
for(auto& i1s : g5gn6KEb) {
auto &key_4Q0aEyH = i1s.first;
auto &val_7BUMR6d = i1s.second;
out_4DCN_sumtestc.emplace_back(sum(test_c[val_7BUMR6d]));
out_4DCN_b.emplace_back(get<1>(key_4Q0aEyH));
out_4DCN_d.emplace_back(get<2>(key_4Q0aEyH));
lineage.emplace_back(val_7BUMR6d[0]);
auto out_2YlO = new TableInfo<value_type<decays<decltype(sale_sales)>>,decays<decltype(minw(2,sale_Month))>>("out_2YlO", 2);
cxt->tables.insert({"out_2YlO", out_2YlO});
auto& out_2YlO_sales = *(ColRef<value_type<decays<decltype(sale_sales)>>> *)(&out_2YlO->colrefs[0]);
auto& out_2YlO_minw2saleMonth = *(ColRef<decays<decltype(minw(2,sale_Month))>> *)(&out_2YlO->colrefs[1]);
out_2YlO_sales.init();
out_2YlO_minw2saleMonth.init();
for(auto& iFU : g4loWjmn) {
auto &key_3AwvKMR = iFU.first;
auto &val_7jtE12E = iFU.second;
out_2YlO_sales.emplace_back(get<0>(key_3AwvKMR));
out_2YlO_minw2saleMonth.emplace_back(minw(2,sale_Month[val_7jtE12E]));
}
print(lineage.rid);
auto d6X0PMzl = out_4DCN->order_by_view<-3,1>();
print(d6X0PMzl);
print(*out_2YlO);
FILE* fp_45ld6S = fopen("flatten.csv", "w");
out_2YlO->printall(",", "\n", nullptr, fp_45ld6S);
fclose(fp_45ld6S);
return 0;
}

@ -100,6 +100,6 @@ int _main()
}
//static_assert(std::is_same_v<decltype(fill_integer_array<5, 1>()), std::integer_sequence<bool, 1,1,1,1,1>>, "");
return 0;
std::unordered_map<int, int> a;
}

@ -183,7 +183,7 @@ struct TableInfo {
TableInfo(const char* name, uint32_t n_cols);
template <int prog = 0>
inline void materialize(const vector_type<uint32_t>& idxs, TableInfo<Types...>* tbl = nullptr) { // inplace materialize
if constexpr(prog == 0) tbl = 0 ? this : tbl;
if constexpr(prog == 0) tbl = (tbl == 0 ? this : tbl);
if constexpr (prog == sizeof...(Types)) return;
else {
auto& col = get<prog>(*this);
@ -191,7 +191,7 @@ struct TableInfo {
for(uint32_t i = 0; i < idxs.size; ++i)
new_col[i] = col[idxs[i]];
get<prog>(*tbl) = new_col;
materialize<prog + 1>();
materialize<prog + 1>(idxs, tbl);
}
}
inline TableInfo<Types...>* materialize_copy(const vector_type<uint32_t>& idxs) {
@ -200,10 +200,12 @@ struct TableInfo {
return tbl;
}
template<int ...cols>
inline vector_type<uint32_t>* order_by() {
vector_type<uint32_t>* ord = new vector_type<uint32_t>(colrefs[0].size);
for (uint32_t i = 0; i < colrefs[0].size; ++i)
(*ord)[i] = i;
inline vector_type<uint32_t>* order_by(vector_type<uint32_t>* ord = nullptr) {
if (!ord) {
ord = new vector_type<uint32_t>(colrefs[0].size);
for (uint32_t i = 0; i < colrefs[0].size; ++i)
(*ord)[i] = i;
}
std::sort(ord->begin(), ord->end(), [this](const uint32_t& lhs, const uint32_t& rhs) {
return
std::forward_as_tuple((cols >= 0 ? get<cols, (cols >= 0)>(*this)[lhs] : -get<cols, (cols >= 0)>(*this)[lhs]) ...)
@ -345,60 +347,60 @@ inline void TableInfo<Types...>::print(const char* __restrict sep, const char* _
std::cout << end;
}
}
template <class T1, class T2, template<typename ...> class VT>
VT<typename types::Coercion<T1, T2>::type> operator -(const VT<T1>& lhs, const VT<T2>& rhs) {
auto ret = VT<typename types::Coercion<T1, T2>::type>(lhs.size, "");
template <class T1, class T2, template<typename ...> class VT, template<typename ...> class VT2>
decayed_t<VT, typename types::Coercion<T1, T2>::type> operator -(const VT<T1>& lhs, const VT2<T2>& rhs) {
auto ret = decayed_t<VT, typename types::Coercion<T1, T2>::type>(lhs.size, "");
for (int i = 0; i < lhs.size; ++i)
ret.container[i] = lhs.container[i] - rhs.container[i];
ret[i] = lhs[i] - rhs[i];
return ret;
}
template <class T1, class T2, template<typename ...> class VT>
VT<typename types::Coercion<T1, T2>::type> operator -(const VT<T1>& lhs, const T2& rhs) {
auto ret = VT<typename types::Coercion<T1, T2>::type>(lhs.size, "");
decayed_t<VT, typename types::Coercion<T1, T2>::type> operator -(const VT<T1>& lhs, const T2& rhs) {
auto ret = decayed_t<VT, typename types::Coercion<T1, T2>::type>(lhs.size, "");
for (int i = 0; i < lhs.size; ++i)
ret.container[i] = lhs.container[i] - rhs;
ret[i] = lhs[i] - rhs;
return ret;
}
template <class T1, class T2, template<typename ...> class VT>
VT<typename types::Coercion<T1, T2>::type> operator +(const VT<T1>& lhs, const VT<T2>& rhs) {
auto ret = VT<typename types::Coercion<T1, T2>::type>(lhs.size, "");
template <class T1, class T2, template<typename ...> class VT, template<typename ...> class VT2>
decayed_t<VT, typename types::Coercion<T1, T2>::type> operator +(const VT<T1>& lhs, const VT2<T2>& rhs) {
auto ret = decayed_t<VT, typename types::Coercion<T1, T2>::type>(lhs.size, "");
for (int i = 0; i < lhs.size; ++i)
ret.container[i] = lhs.container[i] + rhs.container[i];
ret[i] = lhs[i] + rhs[i];
return ret;
}
template <class T1, class T2, template<typename ...> class VT>
VT<typename types::Coercion<T1, T2>::type> operator +(const VT<T1>& lhs, const T2& rhs) {
auto ret = VT<typename types::Coercion<T1, T2>::type>(lhs.size, "");
decayed_t<VT, typename types::Coercion<T1, T2>::type> operator +(const VT<T1>& lhs, const T2& rhs) {
auto ret = decayed_t<VT, typename types::Coercion<T1, T2>::type>(lhs.size, "");
for (int i = 0; i < lhs.size; ++i)
ret.container[i] = lhs.container[i] + rhs;
ret[i] = lhs[i] + rhs;
return ret;
}
template <class T1, class T2, template<typename ...> class VT>
VT<typename types::Coercion<T1, T2>::type> operator *(const VT<T1>& lhs, const VT<T2>& rhs) {
auto ret = VT<typename types::Coercion<T1, T2>::type>(lhs.size, "");
template <class T1, class T2, template<typename ...> class VT, template<typename ...> class VT2>
decayed_t<VT, typename types::Coercion<T1, T2>::type> operator *(const VT<T1>& lhs, const VT2<T2>& rhs) {
auto ret = decayed_t<VT, typename types::Coercion<T1, T2>::type>(lhs.size, "");
for (int i = 0; i < lhs.size; ++i)
ret.container[i] = lhs.container[i] * rhs.container[i];
ret[i] = lhs[i] * rhs[i];
return ret;
}
template <class T1, class T2, template<typename ...> class VT>
VT<typename types::Coercion<T1, T2>::type> operator *(const VT<T1>& lhs, const T2& rhs) {
auto ret = VT<typename types::Coercion<T1, T2>::type>(lhs.size, "");
decayed_t<VT, typename types::Coercion<T1, T2>::type> operator *(const VT<T1>& lhs, const T2& rhs) {
auto ret = decayed_t<VT, typename types::Coercion<T1, T2>::type>(lhs.size, "");
for (int i = 0; i < lhs.size; ++i)
ret.container[i] = lhs.container[i] * rhs;
ret[i] = lhs[i] * rhs;
return ret;
}
template <class T1, class T2, template<typename ...> class VT>
VT<typename types::Coercion<T1, T2>::type> operator /(const VT<T1>& lhs, const VT<T2>& rhs) {
auto ret = VT<typename types::Coercion<T1, T2>::type>(lhs.size, "");
template <class T1, class T2, template<typename ...> class VT, template<typename ...> class VT2>
decayed_t<VT, typename types::Coercion<T1, T2>::type> operator /(const VT<T1>& lhs, const VT2<T2>& rhs) {
auto ret = decayed_t<VT, typename types::Coercion<T1, T2>::type>(lhs.size, "");
for (int i = 0; i < lhs.size; ++i)
ret.container[i] = lhs.container[i] / rhs.container[i];
ret[i] = lhs[i] / rhs[i];
return ret;
}
template <class T1, class T2, template<typename ...> class VT>
VT<typename types::Coercion<T1, T2>::type> operator /(const VT<T1>& lhs, const T2& rhs) {
auto ret = VT<typename types::Coercion<T1, T2>::type>(lhs.size, "");
decayed_t<VT, typename types::Coercion<T1, T2>::type> operator /(const VT<T1>& lhs, const T2& rhs) {
auto ret = decayed_t<VT, typename types::Coercion<T1, T2>::type>(lhs.size, "");
for (int i = 0; i < lhs.size; ++i)
ret.container[i] = lhs.container[i] / rhs;
ret[i] = lhs[i] / rhs;
return ret;
}

@ -81,6 +81,18 @@ public:
_move(std::move(vt));
return *this;
}
template <template <class> class VT>
vector_type<_Ty> operator =(const VT<_Ty>& vt) {
if (capacity > 0) free(container);
container = static_cast<_Ty*>(malloc(size * sizeof(_Ty)));
size = vt.size;
capacity = size;
for(uint32_t i = 0; i < size; ++i)
container[i] = vt[i];
return *this;
}
void emplace_back(_Ty _val) {
if (size >= capacity) { // geometric growth
capacity += 1 + (capacity >> 1);

@ -17,16 +17,16 @@ INSERT INTO stocks VALUES(14,5)
INSERT INTO stocks VALUES(15,2)
INSERT INTO stocks VALUES(16,5)
/*<k> "q1" </k>*/
SELECT max(price-min(timestamp)) FROM stocks
/*<k> "q1" </k>
*/
/*<k> "q2" </k>*/
SELECT max(price-mins(price)) FROM stocks
/*<k> "q3"</k>*/
SELECT price, timestamp FROM stocks where price - timestamp > 1 and not (price*timestamp<100)
/*
<k> "q3"</k>
/*<k> "q4"</k>*/
SELECT max(price-mins(price))
FROM stocks
ASSUMING ASC timestamp
*/
ASSUMING ASC price

Loading…
Cancel
Save