parent
							
								
									a81fd176e9
								
							
						
					
					
						commit
						70d7167c1e
					
				@ -0,0 +1,145 @@
 | 
				
			|||||||
 | 
					from engine.ast import ColRef, TableInfo, ast_node, Context, include
 | 
				
			||||||
 | 
					from engine.groupby import groupby
 | 
				
			||||||
 | 
					from engine.join import join
 | 
				
			||||||
 | 
					from engine.expr import expr
 | 
				
			||||||
 | 
					from engine.orderby import orderby
 | 
				
			||||||
 | 
					from engine.scan import filter
 | 
				
			||||||
 | 
					from engine.utils import base62uuid, enlist, base62alp
 | 
				
			||||||
 | 
					from engine.ddl import create_table, outfile
 | 
				
			||||||
 | 
					import copy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class projection(ast_node):
 | 
				
			||||||
 | 
					    name='select'
 | 
				
			||||||
 | 
					    def __init__(self, parent:ast_node, node, context:Context = None, outname = None, disp = True):
 | 
				
			||||||
 | 
					        self.disp = disp
 | 
				
			||||||
 | 
					        self.outname = outname
 | 
				
			||||||
 | 
					        self.group_node = None
 | 
				
			||||||
 | 
					        self.assumption = None
 | 
				
			||||||
 | 
					        self.where = None
 | 
				
			||||||
 | 
					        ast_node.__init__(self, parent, node, context)
 | 
				
			||||||
 | 
					    def init(self, _):
 | 
				
			||||||
 | 
					        if self.outname is None:
 | 
				
			||||||
 | 
					            self.outname = self.context.gen_tmptable()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def produce(self, node):
 | 
				
			||||||
 | 
					        p = node['select']
 | 
				
			||||||
 | 
					        self.projections = p if type(p) is list else [p]
 | 
				
			||||||
 | 
					        print(node)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def spawn(self, node):
 | 
				
			||||||
 | 
					        self.datasource = None
 | 
				
			||||||
 | 
					        if 'from' in node:
 | 
				
			||||||
 | 
					            from_clause = node['from']
 | 
				
			||||||
 | 
					            if type(from_clause) is list:
 | 
				
			||||||
 | 
					                # from joins
 | 
				
			||||||
 | 
					                join(self, from_clause)
 | 
				
			||||||
 | 
					            elif type(from_clause) is dict:
 | 
				
			||||||
 | 
					                if 'value' in from_clause:
 | 
				
			||||||
 | 
					                    value = from_clause['value']
 | 
				
			||||||
 | 
					                    if type(value) is dict:
 | 
				
			||||||
 | 
					                        if 'select' in value:
 | 
				
			||||||
 | 
					                            # from subquery
 | 
				
			||||||
 | 
					                            projection(self, from_clause, disp = False)
 | 
				
			||||||
 | 
					                        else:
 | 
				
			||||||
 | 
					                            # TODO: from func over table
 | 
				
			||||||
 | 
					                            print(f'from func over table{node}')
 | 
				
			||||||
 | 
					                    elif type(value) is str:
 | 
				
			||||||
 | 
					                        self.datasource = self.context.tables_byname[value]
 | 
				
			||||||
 | 
					                if 'assumptions' in from_clause:
 | 
				
			||||||
 | 
					                    self.assumption = orderby(self, enlist(from_clause['assumptions']))
 | 
				
			||||||
 | 
					                    
 | 
				
			||||||
 | 
					            elif type(from_clause) is str:
 | 
				
			||||||
 | 
					                self.datasource = self.context.tables_byname[from_clause]
 | 
				
			||||||
 | 
					            
 | 
				
			||||||
 | 
					            if self.datasource is None:
 | 
				
			||||||
 | 
					                raise ValueError('spawn error: from clause')
 | 
				
			||||||
 | 
					           
 | 
				
			||||||
 | 
					        if self.datasource is not None:
 | 
				
			||||||
 | 
					            self.datasource_changed = True
 | 
				
			||||||
 | 
					            self.prev_datasource = self.context.datasource
 | 
				
			||||||
 | 
					            self.context.datasource = self.datasource            
 | 
				
			||||||
 | 
					        if 'where' in node:
 | 
				
			||||||
 | 
					            self.where = filter(self, node['where'], True)
 | 
				
			||||||
 | 
					            # self.datasource = filter(self, node['where'], True).output
 | 
				
			||||||
 | 
					            #self.context.datasource = self.datasource            
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if 'groupby' in node:
 | 
				
			||||||
 | 
					            self.group_node = groupby(self, node['groupby'])
 | 
				
			||||||
 | 
					            self.datasource = copy.copy(self.datasource) # shallow copy
 | 
				
			||||||
 | 
					            self.datasource.groupinfo = self.group_node
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            self.group_node = None
 | 
				
			||||||
 | 
					            
 | 
				
			||||||
 | 
					    def consume(self, node):
 | 
				
			||||||
 | 
					        self.inv = True
 | 
				
			||||||
 | 
					        disp_varname = 'd'+base62uuid(7)
 | 
				
			||||||
 | 
					        has_groupby = False
 | 
				
			||||||
 | 
					        if self.group_node is not None:
 | 
				
			||||||
 | 
					            # There is group by;
 | 
				
			||||||
 | 
					            has_groupby = True
 | 
				
			||||||
 | 
					        cexprs = []
 | 
				
			||||||
 | 
					        flatten = False
 | 
				
			||||||
 | 
					        cols = []
 | 
				
			||||||
 | 
					        self.out_table = TableInfo('out_'+base62uuid(4), [], self.context)
 | 
				
			||||||
 | 
					        if 'outfile' in node:
 | 
				
			||||||
 | 
					            flatten = True
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        new_names = []
 | 
				
			||||||
 | 
					        for i, proj in enumerate(self.projections):
 | 
				
			||||||
 | 
					            cname = ''
 | 
				
			||||||
 | 
					            compound = False
 | 
				
			||||||
 | 
					            self.datasource.rec = set()
 | 
				
			||||||
 | 
					            if type(proj) is dict:
 | 
				
			||||||
 | 
					                if 'value' in proj:
 | 
				
			||||||
 | 
					                    e = proj['value']
 | 
				
			||||||
 | 
					                    sname = expr(self, e)._expr
 | 
				
			||||||
 | 
					                    fname = expr.toCExpr(sname) # fastest access method at innermost context
 | 
				
			||||||
 | 
					                    absname = expr(self, e, abs_col=True)._expr # absolute name at function scope
 | 
				
			||||||
 | 
					                    compound = True
 | 
				
			||||||
 | 
					                    cexprs.append(fname)
 | 
				
			||||||
 | 
					                    cname = e if type(e) is str else ''.join([a if a in base62alp else '' for a in expr.toCExpr(absname)()])
 | 
				
			||||||
 | 
					                if 'name' in proj: # renaming column by AS keyword
 | 
				
			||||||
 | 
					                    cname = proj['name']
 | 
				
			||||||
 | 
					                    new_names.append(cname)
 | 
				
			||||||
 | 
					            elif type(proj) is str:
 | 
				
			||||||
 | 
					                col = self.datasource.get_col_d(proj)
 | 
				
			||||||
 | 
					                if type(col) is ColRef:
 | 
				
			||||||
 | 
					                    col.reference()
 | 
				
			||||||
 | 
					            compound = compound and has_groupby and self.datasource.rec not in self.group_node.referenced
 | 
				
			||||||
 | 
					            self.datasource.rec = None
 | 
				
			||||||
 | 
					            cols.append(ColRef(cname, expr.toCExpr(f'decays<decltype({absname})>')(0), self.out_table, 0, None, cname, i, compound=compound))
 | 
				
			||||||
 | 
					        self.out_table.add_cols(cols, False)
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        if has_groupby:
 | 
				
			||||||
 | 
					            create_table(self, self.out_table)
 | 
				
			||||||
 | 
					            self.group_node.finalize(cexprs, self.out_table)
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            create_table(self, self.out_table, cexpr = cexprs)
 | 
				
			||||||
 | 
					        self.datasource.group_node = None
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        if self.where is not None:
 | 
				
			||||||
 | 
					            self.where.finalize()
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					        has_orderby = 'orderby' in node
 | 
				
			||||||
 | 
					        if has_orderby:
 | 
				
			||||||
 | 
					            self.datasource = self.out_table
 | 
				
			||||||
 | 
					            self.context.datasource = self.out_table # discard current ds
 | 
				
			||||||
 | 
					            orderby_node = orderby(self, node['orderby'])
 | 
				
			||||||
 | 
					            self.emit(f'auto {disp_varname} ={self.out_table.reference()}->order_by_view<{",".join([f"{c}" for c in orderby_node.col_list])}>();')
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            disp_varname = f'*{self.out_table.cxt_name}'
 | 
				
			||||||
 | 
					        if self.disp:
 | 
				
			||||||
 | 
					            self.emit(f'print({disp_varname});')
 | 
				
			||||||
 | 
					            
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if flatten:
 | 
				
			||||||
 | 
					            if len(self.projections) > 1 and not self.inv:
 | 
				
			||||||
 | 
					                self.emit(f"{disp_varname}:+{disp_varname}")
 | 
				
			||||||
 | 
					            outfile(self, node['outfile'])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if self.datasource_changed:
 | 
				
			||||||
 | 
					            self.context.datasource = self.prev_datasource
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					include(sys.modules[__name__])
 | 
				
			||||||
@ -0,0 +1,90 @@
 | 
				
			|||||||
 | 
					<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
 | 
				
			||||||
 | 
					  <PropertyGroup>
 | 
				
			||||||
 | 
					    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
 | 
				
			||||||
 | 
					    <SchemaVersion>2.0</SchemaVersion>
 | 
				
			||||||
 | 
					    <ProjectGuid>ccc243f5-663e-45b7-a6de-b2468c58b3a7</ProjectGuid>
 | 
				
			||||||
 | 
					    <ProjectHome>.</ProjectHome>
 | 
				
			||||||
 | 
					    <StartupFile>
 | 
				
			||||||
 | 
					    </StartupFile>
 | 
				
			||||||
 | 
					    <SearchPath>..\msvs-py</SearchPath>
 | 
				
			||||||
 | 
					    <WorkingDirectory>.</WorkingDirectory>
 | 
				
			||||||
 | 
					    <OutputPath>.</OutputPath>
 | 
				
			||||||
 | 
					    <Name>msvs-py</Name>
 | 
				
			||||||
 | 
					    <RootNamespace>msvs-py</RootNamespace>
 | 
				
			||||||
 | 
					  </PropertyGroup>
 | 
				
			||||||
 | 
					  <PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
 | 
				
			||||||
 | 
					    <DebugSymbols>true</DebugSymbols>
 | 
				
			||||||
 | 
					    <EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
 | 
				
			||||||
 | 
					  </PropertyGroup>
 | 
				
			||||||
 | 
					  <PropertyGroup Condition=" '$(Configuration)' == 'Release' ">
 | 
				
			||||||
 | 
					    <DebugSymbols>true</DebugSymbols>
 | 
				
			||||||
 | 
					    <EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
 | 
				
			||||||
 | 
					  </PropertyGroup>
 | 
				
			||||||
 | 
					  <ItemGroup>
 | 
				
			||||||
 | 
					    <Folder Include="aquery_parser\" />
 | 
				
			||||||
 | 
					    <Folder Include="aquery_parser\__pycache__\" />
 | 
				
			||||||
 | 
					    <Folder Include="engine\" />
 | 
				
			||||||
 | 
					    <Folder Include="engine\__pycache__\" />
 | 
				
			||||||
 | 
					  </ItemGroup>
 | 
				
			||||||
 | 
					  <ItemGroup>
 | 
				
			||||||
 | 
					    <Compile Include="aquery_parser\keywords.py" />
 | 
				
			||||||
 | 
					    <Compile Include="aquery_parser\sql_parser.py" />
 | 
				
			||||||
 | 
					    <Compile Include="aquery_parser\types.py" />
 | 
				
			||||||
 | 
					    <Compile Include="aquery_parser\utils.py" />
 | 
				
			||||||
 | 
					    <Compile Include="aquery_parser\windows.py" />
 | 
				
			||||||
 | 
					    <Compile Include="aquery_parser\__init__.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\ast.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\ddl.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\expr.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\groupby.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\join.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\orderby.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\projection.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\scan.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\types.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\utils.py" />
 | 
				
			||||||
 | 
					    <Compile Include="engine\__init__.py" />
 | 
				
			||||||
 | 
					    <Compile Include="prompt.py" />
 | 
				
			||||||
 | 
					  </ItemGroup>
 | 
				
			||||||
 | 
					  <ItemGroup>
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\keywords.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\keywords.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\sql_parser.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\sql_parser.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\types.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\types.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\utils.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\utils.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\windows.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\windows.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\__init__.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="aquery_parser\__pycache__\__init__.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\ast.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\ast.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\ddl.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\ddl.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\expr.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\expr.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\groupby.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\join.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\join.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\orderby.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\projection.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\projection.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\scan.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\types.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\utils.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\utils.cpython-39.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\__init__.cpython-310.pyc" />
 | 
				
			||||||
 | 
					    <Content Include="engine\__pycache__\__init__.cpython-39.pyc" />
 | 
				
			||||||
 | 
					  </ItemGroup>
 | 
				
			||||||
 | 
					  <Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.targets" />
 | 
				
			||||||
 | 
					  <!-- Uncomment the CoreCompile target to enable the Build command in
 | 
				
			||||||
 | 
					       Visual Studio and specify your pre- and post-build commands in
 | 
				
			||||||
 | 
					       the BeforeBuild and AfterBuild targets below. -->
 | 
				
			||||||
 | 
					  <!--<Target Name="CoreCompile" />-->
 | 
				
			||||||
 | 
					  <Target Name="BeforeBuild">
 | 
				
			||||||
 | 
					  </Target>
 | 
				
			||||||
 | 
					  <Target Name="AfterBuild">
 | 
				
			||||||
 | 
					  </Target>
 | 
				
			||||||
 | 
					</Project>
 | 
				
			||||||
@ -0,0 +1,60 @@
 | 
				
			|||||||
 | 
					#include "./server/libaquery.h"
 | 
				
			||||||
 | 
					#include <unordered_map>
 | 
				
			||||||
 | 
					#include "./server/hasher.h"
 | 
				
			||||||
 | 
					#include "csv.h"
 | 
				
			||||||
 | 
					#include "./server/aggregations.h"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    extern "C" int __DLLEXPORT__ dllmain(Context* cxt) { 
 | 
				
			||||||
 | 
					        using namespace std;
 | 
				
			||||||
 | 
					        using namespace types;
 | 
				
			||||||
 | 
					        
 | 
				
			||||||
 | 
					    auto sale = new TableInfo<int,int>("sale", 2);
 | 
				
			||||||
 | 
					cxt->tables.insert({"sale", sale});
 | 
				
			||||||
 | 
					auto& sale_Month = *(ColRef<int> *)(&sale->colrefs[0]);
 | 
				
			||||||
 | 
					auto& sale_sales = *(ColRef<int> *)(&sale->colrefs[1]);
 | 
				
			||||||
 | 
					sale_Month.init();
 | 
				
			||||||
 | 
					sale_sales.init();
 | 
				
			||||||
 | 
					io::CSVReader<2> csv_reader_53ychC("moving_avg.csv");
 | 
				
			||||||
 | 
					csv_reader_53ychC.read_header(io::ignore_extra_column, "Month","sales");
 | 
				
			||||||
 | 
					int tmp_7ttMnHd3;
 | 
				
			||||||
 | 
					int tmp_5nHjeAtP;
 | 
				
			||||||
 | 
					while(csv_reader_53ychC.read_row(tmp_7ttMnHd3,tmp_5nHjeAtP)) { 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					sale_Month.emplace_back(tmp_7ttMnHd3);
 | 
				
			||||||
 | 
					sale_sales.emplace_back(tmp_5nHjeAtP);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					auto out_3Xio = new TableInfo<decays<decltype(sale_Month[0])>,decays<decltype(avgw(3,sale_sales))>>("out_3Xio", 2);
 | 
				
			||||||
 | 
					cxt->tables.insert({"out_3Xio", out_3Xio});
 | 
				
			||||||
 | 
					auto& out_3Xio_Month = *(ColRef<decays<decltype(sale_Month[0])>> *)(&out_3Xio->colrefs[0]);
 | 
				
			||||||
 | 
					auto& out_3Xio_avgsw3salesales = *(ColRef<decays<decltype(avgw(3,sale_sales))>> *)(&out_3Xio->colrefs[1]);
 | 
				
			||||||
 | 
					out_3Xio_Month.init();
 | 
				
			||||||
 | 
					out_3Xio_Month = sale_Month;
 | 
				
			||||||
 | 
					out_3Xio_avgsw3salesales.init();
 | 
				
			||||||
 | 
					out_3Xio_avgsw3salesales = avgw(3,sale_sales);
 | 
				
			||||||
 | 
					// print(*out_3Xio);
 | 
				
			||||||
 | 
					FILE* fp_4nKGhD = fopen("moving_avg_output.csv", "w");
 | 
				
			||||||
 | 
					out_3Xio->printall(",", "\n", nullptr, fp_4nKGhD);
 | 
				
			||||||
 | 
					fclose(fp_4nKGhD);
 | 
				
			||||||
 | 
					typedef record<decltype(sale_sales[0])> record_type1H2vDGL;
 | 
				
			||||||
 | 
					unordered_map<record_type1H2vDGL, vector_type<uint32_t>, transTypes<record_type1H2vDGL, hasher>> g6Mjxfk5;
 | 
				
			||||||
 | 
					for (uint32_t i7u = 0; i7u < sale_sales.size; ++i7u){
 | 
				
			||||||
 | 
					g6Mjxfk5[forward_as_tuple(sale_sales[i7u])].emplace_back(i7u);
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					auto out_2IU2 = new TableInfo<decays<decltype(sale_sales[0])>,decays<decltype(minw(2,sale_Month))>>("out_2IU2", 2);
 | 
				
			||||||
 | 
					cxt->tables.insert({"out_2IU2", out_2IU2});
 | 
				
			||||||
 | 
					auto& out_2IU2_sales = *(ColRef<decays<decltype(sale_sales[0])>> *)(&out_2IU2->colrefs[0]);
 | 
				
			||||||
 | 
					auto& out_2IU2_minsw2saleMonth = *(ColRef<decays<decltype(minw(2,sale_Month))>> *)(&out_2IU2->colrefs[1]);
 | 
				
			||||||
 | 
					out_2IU2_sales.init();
 | 
				
			||||||
 | 
					out_2IU2_minsw2saleMonth.init();
 | 
				
			||||||
 | 
					for(auto& i5J : g6Mjxfk5) {
 | 
				
			||||||
 | 
					auto &key_4jl5toH = i5J.first;
 | 
				
			||||||
 | 
					auto &val_VJGwVwH = i5J.second;
 | 
				
			||||||
 | 
					out_2IU2_sales.emplace_back(get<0>(key_4jl5toH));
 | 
				
			||||||
 | 
					out_2IU2_minsw2saleMonth.emplace_back(minw(2,sale_Month[val_VJGwVwH]));
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					// print(*out_2IU2);
 | 
				
			||||||
 | 
					FILE* fp_18R4fY = fopen("flatten.csv", "w");
 | 
				
			||||||
 | 
					out_2IU2->printall(",","\n", nullptr, fp_18R4fY);
 | 
				
			||||||
 | 
					fclose(fp_18R4fY);
 | 
				
			||||||
 | 
					return 0;
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@ -1 +1,11 @@
 | 
				
			|||||||
#include "vector_type.hpp"
 | 
					#include "vector_type.hpp"
 | 
				
			||||||
 | 
					#include <iostream>
 | 
				
			||||||
 | 
					template<typename _Ty>
 | 
				
			||||||
 | 
					inline void vector_type<_Ty>::out(uint32_t n, const char* sep) const
 | 
				
			||||||
 | 
					{
 | 
				
			||||||
 | 
						n = n > size ? size : n;
 | 
				
			||||||
 | 
						std::cout << '(';
 | 
				
			||||||
 | 
							for (uint32_t i = 0; i < n; ++i)
 | 
				
			||||||
 | 
								std::cout << this->operator[](i) << sep;
 | 
				
			||||||
 | 
						std::cout << ')';
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
					Loading…
					
					
				
		Reference in new issue