From 15b124e2e6631ae310f0f7f2c0f9602fd6a6f8e2 Mon Sep 17 00:00:00 2001 From: bill sun Date: Fri, 29 Apr 2022 04:53:23 +0800 Subject: [PATCH] update --- engine/ast.py | 2 +- engine/ddl.py | 26 ++++++++++-- engine/groupby.py | 21 +++++++--- engine/orderby.py | 30 ++++++++++++-- engine/projection.py | 61 +++++++++++++++++++-------- engine/scan.py | 15 ++++--- moving_avg.a | 2 +- out.cpp | 93 +++++++++++++++++++++--------------------- server/server.cpp | 2 +- server/table.h | 70 ++++++++++++++++--------------- server/vector_type.hpp | 12 ++++++ stock.a | 12 +++--- 12 files changed, 222 insertions(+), 124 deletions(-) diff --git a/engine/ast.py b/engine/ast.py index 02d3464..651fa26 100644 --- a/engine/ast.py +++ b/engine/ast.py @@ -92,7 +92,7 @@ class TableInfo: type_tags = type_tags[:-1] type_tags += '>' - self.cxt.emit(f'auto& {base_name} = *(TableInfo{type_tags} *)(cxt->tables[{self.table_name}]);') + self.cxt.emit(f'auto& {base_name} = *(TableInfo{type_tags} *)(cxt->tables["{self.table_name}"]);') return self.cxt_name def refer_all(self): self.reference() diff --git a/engine/ddl.py b/engine/ddl.py index 4a1e344..60be2be 100644 --- a/engine/ddl.py +++ b/engine/ddl.py @@ -1,15 +1,17 @@ # code-gen for data decl languages +from engine.orderby import orderby from engine.ast import ColRef, TableInfo, ast_node, Context, include from engine.scan import scan from engine.utils import base62uuid class create_table(ast_node): name = 'create_table' - def __init__(self, parent: "ast_node", node, context: Context = None, cexprs = None): + def __init__(self, parent: "ast_node", node, context: Context = None, cexprs = None, lineage = False): self.cexprs = cexprs + self.lineage = lineage super().__init__(parent, node, context) - def produce(self, node): + def produce(self, node): if type(node) is not TableInfo: ct = node[self.name] tbl = self.context.add_table(ct['name'], ct['columns']) @@ -29,16 +31,32 @@ class create_table(ast_node): self.emit(f"{c.cxt_name}.init();") # create an output table else: + # 1 to 1 lineage. if len(self.context.scans) == 0: + if self.lineage: + order = 'order_' + base62uuid(6) + self.emit(f'auto {order} = {self.parent.datasource.cxt_name}->order_by<{orderby(self.parent, self.parent.assumptions).result()}>();') + self.lineage = '*' + order + else: + self.lineage = None for i, c in enumerate(tbl.columns): self.emit(f"{c.cxt_name}.init();") - self.emit(f"{c.cxt_name} = {self.cexprs[i]()};") + self.emit(f"{c.cxt_name} = {self.cexprs[i](self.lineage)};") + self.lineage = None + self.parent.assumptions = None else: scanner:scan = self.context.scans[-1] + if self.lineage: + lineage_var = 'lineage_' + base62uuid(6) + counter_var = 'counter_' + base62uuid(6) + scanner.add(f'auto {lineage_var} = {self.datasource.cxt_name}->bind({tbl.cxt_name});', "init") + scanner.add(f'auto {counter_var} = 0;', "init") + scanner.add(f"{lineage_var}.emplace_back({counter_var}++);", "front") + self.lineage = f"{lineage_var}.rid" for i, c in enumerate(tbl.columns): scanner.add(f"{c.cxt_name}.init();", "init") scanner.add(f"{c.cxt_name} = {self.cexprs[i](scanner.it_ver)};") - + class insert(ast_node): name = 'insert' def produce(self, node): diff --git a/engine/groupby.py b/engine/groupby.py index 8d2999a..3a23c34 100644 --- a/engine/groupby.py +++ b/engine/groupby.py @@ -1,8 +1,9 @@ -from engine.ast import TableInfo, ast_node +from engine.ast import ColRef, TableInfo, ast_node +from engine.orderby import assumption from engine.scan import scan from engine.utils import base62uuid from engine.expr import expr -import engine.types + class groupby(ast_node): name = '_groupby' def init(self, _): @@ -24,7 +25,8 @@ class groupby(ast_node): for i, g in enumerate(node): v = g['value'] e = expr(self, v) - self.raw_groups.append(e.raw_col) + if type(e.raw_col) is ColRef: + self.raw_groups.append(e.raw_col) e = e._expr # if v is compound expr, create tmp cols if type(v) is not str: @@ -48,7 +50,14 @@ class groupby(ast_node): self.referenced = self.datasource.rec self.datasource.rec = None self.scanner.finalize() - + + def deal_with_assumptions(self, assumption:assumption, out:TableInfo): + gscanner = scan(self, self.group) + val_var = 'val_'+base62uuid(7) + gscanner.add(f'auto &{val_var} = {gscanner.it_ver}.second;') + gscanner.add(f'{out.cxt_name}->order_by<{assumption.result()}>(&{val_var});') + gscanner.finalize() + def finalize(self, cexprs, out:TableInfo): gscanner = scan(self, self.group) key_var = 'key_'+base62uuid(7) @@ -58,4 +67,6 @@ class groupby(ast_node): gscanner.add(f'auto &{val_var} = {gscanner.it_ver}.second;') gscanner.add(';\n'.join([f'{out.columns[i].reference()}.emplace_back({ce(x=val_var, y=key_var)})' for i, ce in enumerate(cexprs)])+';') - gscanner.finalize() \ No newline at end of file + gscanner.finalize() + + self.datasource.groupinfo = None \ No newline at end of file diff --git a/engine/orderby.py b/engine/orderby.py index 7993fcd..99e4ab5 100644 --- a/engine/orderby.py +++ b/engine/orderby.py @@ -35,7 +35,31 @@ class orderby(ast_node): for n in node: order = not ('sort' in n and n['sort'] == 'desc') col_id = self.datasource.columns_byname[n['value']].id - self.col_list.append(col_id if order else -col_id-1) - self.order.append(order_item(n['value'], self, order)) + col_id = col_id if order else -col_id-1 + if col_id not in self.col_list: + self.col_list.append(col_id) + self.order.append(order_item(n['value'], self, order)) + + def merge(self, node): + self.produce(node) + def finialize(self, references): - self.order = [ o for o in self.order if o.name in references ] \ No newline at end of file + self.order = [ o for o in self.order if o.name in references ] + + def result(self, sep:str = ','): + return sep.join([f"{c}" for c in self.col_list]) + +class assumption(orderby): + name = '_assumption' + def __init__(self, parent: "ast_node", node, context: Context = None, exclude = []): + self.exclude = exclude + super().__init__(parent, node, context) + + def produce(self, node): + if type(node) is not list: + node = [node] + [n for n in node if n not in self.exclude] + return super().produce(node) + + def empty(self): + return len(self.col_list) == 0 \ No newline at end of file diff --git a/engine/projection.py b/engine/projection.py index 0b6c02f..11c613a 100644 --- a/engine/projection.py +++ b/engine/projection.py @@ -2,7 +2,7 @@ from engine.ast import ColRef, TableInfo, ast_node, Context, include from engine.groupby import groupby from engine.join import join from engine.expr import expr -from engine.orderby import orderby +from engine.orderby import assumption, orderby from engine.scan import filter from engine.utils import base62uuid, enlist, base62alp, has_other from engine.ddl import create_table, outfile @@ -14,7 +14,7 @@ class projection(ast_node): self.disp = disp self.outname = outname self.group_node = None - self.assumption = None + self.assumptions = None self.where = None ast_node.__init__(self, parent, node, context) def init(self, _): @@ -46,7 +46,7 @@ class projection(ast_node): elif type(value) is str: self.datasource = self.context.tables_byname[value] if 'assumptions' in from_clause: - self.assumption = enlist(from_clause['assumptions']) + self.assumptions = enlist(from_clause['assumptions']) elif type(from_clause) is str: self.datasource = self.context.tables_byname[from_clause] @@ -61,7 +61,7 @@ class projection(ast_node): if 'where' in node: self.where = filter(self, node['where'], True) # self.datasource = filter(self, node['where'], True).output - #self.context.datasource = self.datasource + # self.context.datasource = self.datasource if 'groupby' in node: self.group_node = groupby(self, node['groupby']) @@ -73,10 +73,7 @@ class projection(ast_node): def consume(self, node): self.inv = True disp_varname = 'd'+base62uuid(7) - has_groupby = False - if self.group_node is not None: - # There is group by; - has_groupby = True + has_groupby = self.group_node is not None cexprs = [] flatten = False cols = [] @@ -85,6 +82,7 @@ class projection(ast_node): flatten = True new_names = [] + proj_raw_cols = [] for i, proj in enumerate(self.projections): cname = '' compound = False @@ -92,7 +90,10 @@ class projection(ast_node): if type(proj) is dict: if 'value' in proj: e = proj['value'] - sname = expr(self, e)._expr + sname = expr(self, e) + if type(sname.raw_col) is ColRef: + proj_raw_cols.append(sname.raw_col) + sname = sname._expr fname = expr.toCExpr(sname) # fastest access method at innermost context absname = expr(self, e, abs_col=True)._expr # absolute name at function scope # TODO: Make it single pass here. @@ -118,26 +119,50 @@ class projection(ast_node): self.out_table.add_cols(cols, False) + lineage = None + if has_groupby: create_table(self, self.out_table) # creates empty out_table. + if self.assumptions is not None: + self.assumptions = assumption(self, self.assumptions, exclude=self.group_node.raw_groups) + if not self.assumptions.empty(): + self.group_node.deal_with_assumptions(self.assumptions, self.out_table) + self.assumptions = None self.group_node.finalize(cexprs, self.out_table) else: - create_table(self, self.out_table, cexprs = cexprs) # create and populate out_table. - - - self.datasource.group_node = None - + # if all assumptions in projections, treat as orderby + lineage = self.assumptions is not None and has_other(self.assumptions, proj_raw_cols) + spawn = create_table(self, self.out_table, cexprs = cexprs, lineage = lineage) # create and populate out_table. + if lineage and type(spawn.lineage) is str: + lineage = spawn.lineage + self.assumptions = orderby(self, self.assumptions) # do not exclude proj_raw_cols + else: + lineage = None if self.where is not None: self.where.finalize() - has_orderby = 'orderby' in node - if has_orderby: + if type(lineage) is str: + order = 'order_' + base62uuid(6) + self.emit(f'auto {order} = {self.datasource.cxt_name}->order_by<{self.assumptions.result()}>({lineage});') + self.emit(f'{self.out_table.cxt_name}->materialize(*{order});') + self.assumptions = None + + if self.assumptions is not None: + orderby_node = orderby(self, self.assumptions) + else: + orderby_node = None + + if 'orderby' in node: self.datasource = self.out_table self.context.datasource = self.out_table # discard current ds - orderby_node = orderby(self, node['orderby']) - self.emit(f'auto {disp_varname} = {self.out_table.reference()}->order_by_view<{",".join([f"{c}" for c in orderby_node.col_list])}>();') + orderbys = node['orderby'] + orderby_node = orderby(self, orderbys) if orderby_node is None else orderby_node.merge(orderbys) + + if orderby_node is not None: + self.emit(f'auto {disp_varname} = {self.out_table.reference()}->order_by_view<{orderby_node.result()}>();') else: disp_varname = f'*{self.out_table.cxt_name}' + if self.disp: self.emit(f'print({disp_varname});') diff --git a/engine/scan.py b/engine/scan.py index fdbca2b..812165e 100644 --- a/engine/scan.py +++ b/engine/scan.py @@ -5,14 +5,16 @@ from engine.expr import expr class scan(ast_node): name = 'scan' - def __init__(self, parent: "ast_node", node, size = None, context: Context = None): + def __init__(self, parent: "ast_node", node, size = None, context: Context = None, const = False): self.type = type self.size = size + self.const = "const " if const else "" super().__init__(parent, node, context) def init(self, _): self.datasource = self.context.datasource self.initializers = '' self.start = '' + self.front = '' self.body = '' self.end = '}' self.mode = None @@ -27,13 +29,13 @@ class scan(ast_node): self.colref = node if self.size is None: self.mode = ["col", node.table] - self.start += f'for (auto& {self.it_ver} : {node.reference()}) {{\n' + self.start += f'for ({self.const}auto& {self.it_ver} : {node.reference()}) {{\n' else: self.mode = ["idx", node.table] self.start += f"for (uint32_t {self.it_ver} = 0; {self.it_ver} < {node.reference()}.size; ++{self.it_ver}){{\\n" elif type(node) is str: self.mode = ["idx", None] - self.start+= f'for(auto& {self.it_ver} : {node}) {{\n' + self.start+= f'for({self.const}auto& {self.it_ver} : {node}) {{\n' else: self.mode = ["idx", node] # Node is the TableInfo self.start += f"for (uint32_t {self.it_ver} = 0; {self.it_ver} < {self.size}; ++{self.it_ver}){{\n" @@ -41,13 +43,16 @@ class scan(ast_node): def add(self, stmt, position = "body"): if position == "body": self.body += stmt + '\n' - else: + elif position == "init": self.initializers += stmt + '\n' + else: + self.front += stmt + '\n' + def finalize(self): for f in self.filters: self.start += f self.end += '}' - self.context.remove_scan(self, self.initializers + self.start + self.body + self.end) + self.context.remove_scan(self, self.initializers + self.start + self.front + self.body + self.end) class filter(ast_node): name = 'filter' diff --git a/moving_avg.a b/moving_avg.a index 3fdd697..f9b21e5 100644 --- a/moving_avg.a +++ b/moving_avg.a @@ -6,7 +6,7 @@ FIELDS TERMINATED BY "\t" SELECT Month,avgs(3,sales) FROM sale --- ASSUMING ASC Month + ASSUMING ASC Month INTO OUTFILE "moving_avg_output.csv" FIELDS TERMINATED BY ";" diff --git a/out.cpp b/out.cpp index 9a2e44f..03d68f4 100644 --- a/out.cpp +++ b/out.cpp @@ -1,60 +1,61 @@ -#include -#include "./server/libaquery.h" #include "./server/hasher.h" #include "./server/aggregations.h" #include "csv.h" +#include "./server/libaquery.h" +#include extern "C" int __DLLEXPORT__ dllmain(Context* cxt) { using namespace std; using namespace types; - auto test = new TableInfo("test", 4); -cxt->tables.insert({"test", test}); -auto& test_a = *(ColRef *)(&test->colrefs[0]); -auto& test_b = *(ColRef *)(&test->colrefs[1]); -auto& test_c = *(ColRef *)(&test->colrefs[2]); -auto& test_d = *(ColRef *)(&test->colrefs[3]); -test_a.init(); -test_b.init(); -test_c.init(); -test_d.init(); -io::CSVReader<4> csv_reader_4bTMJ9("test.csv"); -csv_reader_4bTMJ9.read_header(io::ignore_extra_column, "a","b","c","d"); -int tmp_78E1nhZJ; -int tmp_4wnHGd9t; -int tmp_5OL9GlRp; -int tmp_155GVQC6; -while(csv_reader_4bTMJ9.read_row(tmp_78E1nhZJ,tmp_4wnHGd9t,tmp_5OL9GlRp,tmp_155GVQC6)) { + auto sale = new TableInfo("sale", 2); +cxt->tables.insert({"sale", sale}); +auto& sale_Month = *(ColRef *)(&sale->colrefs[0]); +auto& sale_sales = *(ColRef *)(&sale->colrefs[1]); +sale_Month.init(); +sale_sales.init(); +io::CSVReader<2> csv_reader_6T89Ll("moving_avg.csv"); +csv_reader_6T89Ll.read_header(io::ignore_extra_column, "Month","sales"); +int tmp_5vttJ2yV; +int tmp_2ckq15YU; +while(csv_reader_6T89Ll.read_row(tmp_5vttJ2yV,tmp_2ckq15YU)) { -test_a.emplace_back(tmp_78E1nhZJ); -test_b.emplace_back(tmp_4wnHGd9t); -test_c.emplace_back(tmp_5OL9GlRp); -test_d.emplace_back(tmp_155GVQC6); +sale_Month.emplace_back(tmp_5vttJ2yV); +sale_sales.emplace_back(tmp_2ckq15YU); } -typedef record record_type6jn8Y49; -unordered_map, transTypes> g5gn6KEb; -for (uint32_t i3V = 0; i3V < test_a.size; ++i3V){ -g5gn6KEb[forward_as_tuple(test_a[i3V],test_b[i3V],test_d[i3V])].emplace_back(i3V); +auto out_2UiD = new TableInfo>,value_type>>("out_2UiD", 2); +cxt->tables.insert({"out_2UiD", out_2UiD}); +auto& out_2UiD_Month = *(ColRef>> *)(&out_2UiD->colrefs[0]); +auto& out_2UiD_avgw3salesales = *(ColRef>> *)(&out_2UiD->colrefs[1]); +auto order_1NNZ9F = sale->order_by<0>(); +out_2UiD_Month.init(); +out_2UiD_Month = sale_Month[*order_1NNZ9F]; +out_2UiD_avgw3salesales.init(); +out_2UiD_avgw3salesales = avgw(3,sale_sales[*order_1NNZ9F]); +print(*out_2UiD); +FILE* fp_6xIJn4 = fopen("moving_avg_output.csv", "w"); +out_2UiD->printall(";", "\n", nullptr, fp_6xIJn4); +fclose(fp_6xIJn4); +typedef record record_type6Lepq5T; +unordered_map, transTypes> g4loWjmn; +for (uint32_t i5g = 0; i5g < sale_sales.size; ++i5g){ +g4loWjmn[forward_as_tuple(sale_sales[i5g])].emplace_back(i5g); } -auto out_4DCN = new TableInfo,value_type>,value_type>>("out_4DCN", 3); -cxt->tables.insert({"out_4DCN", out_4DCN}); -auto& out_4DCN_sumtestc = *(ColRef> *)(&out_4DCN->colrefs[0]); -auto& out_4DCN_b = *(ColRef>> *)(&out_4DCN->colrefs[1]); -auto& out_4DCN_d = *(ColRef>> *)(&out_4DCN->colrefs[2]); -auto lineage = test->bind(out_4DCN); -out_4DCN_sumtestc.init(); -out_4DCN_b.init(); -out_4DCN_d.init(); -for(auto& i1s : g5gn6KEb) { -auto &key_4Q0aEyH = i1s.first; -auto &val_7BUMR6d = i1s.second; -out_4DCN_sumtestc.emplace_back(sum(test_c[val_7BUMR6d])); -out_4DCN_b.emplace_back(get<1>(key_4Q0aEyH)); -out_4DCN_d.emplace_back(get<2>(key_4Q0aEyH)); -lineage.emplace_back(val_7BUMR6d[0]); +auto out_2YlO = new TableInfo>,decays>("out_2YlO", 2); +cxt->tables.insert({"out_2YlO", out_2YlO}); +auto& out_2YlO_sales = *(ColRef>> *)(&out_2YlO->colrefs[0]); +auto& out_2YlO_minw2saleMonth = *(ColRef> *)(&out_2YlO->colrefs[1]); +out_2YlO_sales.init(); +out_2YlO_minw2saleMonth.init(); +for(auto& iFU : g4loWjmn) { +auto &key_3AwvKMR = iFU.first; +auto &val_7jtE12E = iFU.second; +out_2YlO_sales.emplace_back(get<0>(key_3AwvKMR)); +out_2YlO_minw2saleMonth.emplace_back(minw(2,sale_Month[val_7jtE12E])); } -print(lineage.rid); -auto d6X0PMzl = out_4DCN->order_by_view<-3,1>(); -print(d6X0PMzl); +print(*out_2YlO); +FILE* fp_45ld6S = fopen("flatten.csv", "w"); +out_2YlO->printall(",", "\n", nullptr, fp_45ld6S); +fclose(fp_45ld6S); return 0; } \ No newline at end of file diff --git a/server/server.cpp b/server/server.cpp index 934cfe1..8a32f10 100644 --- a/server/server.cpp +++ b/server/server.cpp @@ -100,6 +100,6 @@ int _main() } //static_assert(std::is_same_v()), std::integer_sequence>, ""); return 0; - + std::unordered_map a; } diff --git a/server/table.h b/server/table.h index 655d11e..0179ab8 100644 --- a/server/table.h +++ b/server/table.h @@ -183,7 +183,7 @@ struct TableInfo { TableInfo(const char* name, uint32_t n_cols); template inline void materialize(const vector_type& idxs, TableInfo* tbl = nullptr) { // inplace materialize - if constexpr(prog == 0) tbl = 0 ? this : tbl; + if constexpr(prog == 0) tbl = (tbl == 0 ? this : tbl); if constexpr (prog == sizeof...(Types)) return; else { auto& col = get(*this); @@ -191,7 +191,7 @@ struct TableInfo { for(uint32_t i = 0; i < idxs.size; ++i) new_col[i] = col[idxs[i]]; get(*tbl) = new_col; - materialize(); + materialize(idxs, tbl); } } inline TableInfo* materialize_copy(const vector_type& idxs) { @@ -200,10 +200,12 @@ struct TableInfo { return tbl; } template - inline vector_type* order_by() { - vector_type* ord = new vector_type(colrefs[0].size); - for (uint32_t i = 0; i < colrefs[0].size; ++i) - (*ord)[i] = i; + inline vector_type* order_by(vector_type* ord = nullptr) { + if (!ord) { + ord = new vector_type(colrefs[0].size); + for (uint32_t i = 0; i < colrefs[0].size; ++i) + (*ord)[i] = i; + } std::sort(ord->begin(), ord->end(), [this](const uint32_t& lhs, const uint32_t& rhs) { return std::forward_as_tuple((cols >= 0 ? get= 0)>(*this)[lhs] : -get= 0)>(*this)[lhs]) ...) @@ -345,60 +347,60 @@ inline void TableInfo::print(const char* __restrict sep, const char* _ std::cout << end; } } -template class VT> -VT::type> operator -(const VT& lhs, const VT& rhs) { - auto ret = VT::type>(lhs.size, ""); +template class VT, template class VT2> +decayed_t::type> operator -(const VT& lhs, const VT2& rhs) { + auto ret = decayed_t::type>(lhs.size, ""); for (int i = 0; i < lhs.size; ++i) - ret.container[i] = lhs.container[i] - rhs.container[i]; + ret[i] = lhs[i] - rhs[i]; return ret; } template class VT> -VT::type> operator -(const VT& lhs, const T2& rhs) { - auto ret = VT::type>(lhs.size, ""); +decayed_t::type> operator -(const VT& lhs, const T2& rhs) { + auto ret = decayed_t::type>(lhs.size, ""); for (int i = 0; i < lhs.size; ++i) - ret.container[i] = lhs.container[i] - rhs; + ret[i] = lhs[i] - rhs; return ret; } -template class VT> -VT::type> operator +(const VT& lhs, const VT& rhs) { - auto ret = VT::type>(lhs.size, ""); +template class VT, template class VT2> +decayed_t::type> operator +(const VT& lhs, const VT2& rhs) { + auto ret = decayed_t::type>(lhs.size, ""); for (int i = 0; i < lhs.size; ++i) - ret.container[i] = lhs.container[i] + rhs.container[i]; + ret[i] = lhs[i] + rhs[i]; return ret; } template class VT> -VT::type> operator +(const VT& lhs, const T2& rhs) { - auto ret = VT::type>(lhs.size, ""); +decayed_t::type> operator +(const VT& lhs, const T2& rhs) { + auto ret = decayed_t::type>(lhs.size, ""); for (int i = 0; i < lhs.size; ++i) - ret.container[i] = lhs.container[i] + rhs; + ret[i] = lhs[i] + rhs; return ret; } -template class VT> -VT::type> operator *(const VT& lhs, const VT& rhs) { - auto ret = VT::type>(lhs.size, ""); +template class VT, template class VT2> +decayed_t::type> operator *(const VT& lhs, const VT2& rhs) { + auto ret = decayed_t::type>(lhs.size, ""); for (int i = 0; i < lhs.size; ++i) - ret.container[i] = lhs.container[i] * rhs.container[i]; + ret[i] = lhs[i] * rhs[i]; return ret; } template class VT> -VT::type> operator *(const VT& lhs, const T2& rhs) { - auto ret = VT::type>(lhs.size, ""); +decayed_t::type> operator *(const VT& lhs, const T2& rhs) { + auto ret = decayed_t::type>(lhs.size, ""); for (int i = 0; i < lhs.size; ++i) - ret.container[i] = lhs.container[i] * rhs; + ret[i] = lhs[i] * rhs; return ret; } -template class VT> -VT::type> operator /(const VT& lhs, const VT& rhs) { - auto ret = VT::type>(lhs.size, ""); +template class VT, template class VT2> +decayed_t::type> operator /(const VT& lhs, const VT2& rhs) { + auto ret = decayed_t::type>(lhs.size, ""); for (int i = 0; i < lhs.size; ++i) - ret.container[i] = lhs.container[i] / rhs.container[i]; + ret[i] = lhs[i] / rhs[i]; return ret; } template class VT> -VT::type> operator /(const VT& lhs, const T2& rhs) { - auto ret = VT::type>(lhs.size, ""); +decayed_t::type> operator /(const VT& lhs, const T2& rhs) { + auto ret = decayed_t::type>(lhs.size, ""); for (int i = 0; i < lhs.size; ++i) - ret.container[i] = lhs.container[i] / rhs; + ret[i] = lhs[i] / rhs; return ret; } diff --git a/server/vector_type.hpp b/server/vector_type.hpp index f4c8214..da25092 100644 --- a/server/vector_type.hpp +++ b/server/vector_type.hpp @@ -81,6 +81,18 @@ public: _move(std::move(vt)); return *this; } + template