├── .gitignore ├── LICENSE ├── NOTICE ├── README.md ├── build.sbt ├── dbtoaster.js ├── benchalljs.sh ├── benchjs.sh ├── compileJS.sh ├── copyalljs.sh ├── copyjs.sh ├── js.sh └── runJS.sh ├── ddbtoaster ├── .gitignore ├── .travis.yml ├── core │ ├── Tree.scala │ ├── build.sbt │ ├── codegen │ │ ├── AkkaGen.scala │ │ ├── CodeGen.scala │ │ ├── CodeGenOptions.scala │ │ ├── CppGen.scala │ │ └── ScalaGen.scala │ ├── frontend │ │ ├── Parsers.scala │ │ ├── Partitioning.scala │ │ └── TypeCheck.scala │ └── util │ │ ├── ManifestHelper.scala │ │ └── TypeHelper.scala ├── docs │ ├── benchmarks │ │ ├── cpp-std-tpch-o3.csv │ │ ├── finance-huge.txt │ │ ├── finance_queries.xlsx │ │ ├── parse.php │ │ ├── scala-std-o2.csv │ │ ├── scala-std-o3.csv │ │ ├── tpch-big.txt │ │ ├── tpch-big_del.txt │ │ └── tpch_queries.xlsx │ ├── correctness │ │ ├── lms.txt │ │ └── untested.csv │ ├── drafts │ │ ├── Makefile │ │ ├── lifts.md │ │ ├── m3p.tex │ │ └── m4.tex │ ├── graphs │ │ └── alpha5_release │ │ │ ├── Makefile │ │ │ ├── engine-comparison.csv │ │ │ └── engine-comparison.ps │ ├── help │ │ └── win_dev_install.md │ ├── inc │ │ ├── bibliography.bib │ │ └── style.sty │ ├── presentations │ │ ├── DDBT │ │ │ ├── wip1.key │ │ │ ├── wip2.key │ │ │ └── wip3.key │ │ └── LMSinDDBToaster │ │ │ ├── CPS1.png │ │ │ ├── CPS2.png │ │ │ ├── DBToasterHighLevel.graffle │ │ │ ├── DBToasterHighLevelUsingDDBToaster.graffle │ │ │ ├── DBToasterHighLevelUsingToasterBooster.graffle │ │ │ ├── LMSinDDBToaster.tex │ │ │ ├── bib.tex │ │ │ └── finance-large.png │ └── tck │ │ ├── BugAkka.scala │ │ ├── ContVsFuture.scala │ │ ├── Library.scala │ │ ├── SleepyMaster.scala │ │ ├── VLDBJ_2013 experiments.webloc │ │ ├── akka1 │ │ ├── AXFinderAkka1.scala │ │ ├── AXFinderAkka2.scala │ │ ├── AkkaSystem.scala │ │ ├── MapsAkka.scala │ │ ├── TPCH18Akka.scala │ │ └── TPCH18Akka2.scala │ │ ├── batch │ │ ├── build.sbt │ │ └── src │ │ │ └── Batch.scala │ │ ├── calc.zip │ │ ├── cluster_old │ │ ├── AkkaCluster.txt │ │ └── cluster.sh │ │ ├── cuckoo.cpp │ │ ├── kryo │ │ ├── README.md │ │ ├── akka.conf │ │ ├── akka │ │ │ ├── AkkaSerializers.scala │ │ │ ├── KryoSerializer.scala │ │ │ └── KryoSerializerExtension.scala │ │ ├── reference.conf │ │ └── scala │ │ │ ├── KryoClassResolver.scala │ │ │ ├── ScalaCollectionsSerializer.scala │ │ │ ├── ScalaCommonSerializers.scala │ │ │ └── ScalaProductSerializers.scala │ │ ├── log4j.properties │ │ ├── perf │ │ ├── Maps2.scala.txt │ │ ├── cuckoo.cpp │ │ ├── hash_c11.cpp │ │ ├── hash_java_old.java │ │ ├── hashmap_boost.cpp │ │ ├── hashmap_c.c │ │ ├── hashmap_jvm.java │ │ ├── hashmap_list_cpp.cpp │ │ ├── hashmap_m3.java │ │ └── simple_boost.cpp │ │ ├── spark │ │ ├── AXSpark.scala │ │ ├── MapsSpark.scala │ │ ├── TPCH18Spark.scala │ │ └── TPCH18SparkSimple.scala │ │ ├── test_nio │ │ ├── build.sbt │ │ ├── perf.csv │ │ └── src │ │ │ ├── AkkaTest.scala │ │ │ ├── Messages.java │ │ │ └── NioNodes.scala │ │ └── test_serial │ │ ├── build.sbt │ │ ├── run.sh │ │ ├── src │ │ ├── Serial.java │ │ ├── Serial.scala │ │ ├── include │ │ │ ├── jni.h │ │ │ └── jni_md.h │ │ └── serial.c │ │ └── toy │ │ ├── Reflect.java │ │ ├── Toy.scala │ │ ├── Vec3.c │ │ ├── Vec3.h │ │ ├── Vec3.scala │ │ └── make ├── experiments │ ├── README │ ├── datasets │ │ └── README │ ├── run_tpcds.sh │ ├── run_tpcds_batch.sh │ ├── run_tpch.sh │ ├── run_tpch_batch.sh │ ├── run_tpch_memory.sh │ └── src │ │ ├── lastfm │ │ ├── lastfm.hpp │ │ └── lastfm_template.hpp │ │ ├── lib │ │ ├── csvreader.hpp │ │ ├── functions.hpp │ │ ├── hash.hpp │ │ ├── hashmap.hpp │ │ ├── macro.hpp │ │ ├── mmap.hpp │ │ ├── pool.hpp │ │ ├── serialization.hpp │ │ ├── stopwatch.hpp │ │ ├── string.hpp │ │ └── types.hpp │ │ ├── main.cpp │ │ ├── tpcds │ │ ├── codegen │ │ ├── codegen_batch │ │ ├── codegen_revision_r3408 │ │ │ ├── Tpcds19VCpp.hpp │ │ │ ├── Tpcds27VCpp.hpp │ │ │ ├── Tpcds34VCpp.hpp │ │ │ ├── Tpcds3VCpp.hpp │ │ │ ├── Tpcds42VCpp.hpp │ │ │ ├── Tpcds43VCpp.hpp │ │ │ ├── Tpcds46VCpp.hpp │ │ │ ├── Tpcds52VCpp.hpp │ │ │ ├── Tpcds55VCpp.hpp │ │ │ ├── Tpcds68VCpp.hpp │ │ │ ├── Tpcds73VCpp.hpp │ │ │ ├── Tpcds79VCpp.hpp │ │ │ └── Tpcds7VCpp.hpp │ │ ├── codegen_revision_r3408_batch │ │ │ ├── Tpcds19VCpp.hpp │ │ │ ├── Tpcds27VCpp.hpp │ │ │ ├── Tpcds34VCpp.hpp │ │ │ ├── Tpcds3VCpp.hpp │ │ │ ├── Tpcds42VCpp.hpp │ │ │ ├── Tpcds43VCpp.hpp │ │ │ ├── Tpcds46VCpp.hpp │ │ │ ├── Tpcds52VCpp.hpp │ │ │ ├── Tpcds55VCpp.hpp │ │ │ ├── Tpcds68VCpp.hpp │ │ │ ├── Tpcds73VCpp.hpp │ │ │ ├── Tpcds79VCpp.hpp │ │ │ └── Tpcds7VCpp.hpp │ │ ├── query19.hpp │ │ ├── query27.hpp │ │ ├── query3.hpp │ │ ├── query34.hpp │ │ ├── query42.hpp │ │ ├── query43.hpp │ │ ├── query46.hpp │ │ ├── query52.hpp │ │ ├── query55.hpp │ │ ├── query68.hpp │ │ ├── query7.hpp │ │ ├── query73.hpp │ │ ├── query79.hpp │ │ ├── tpcds.hpp │ │ └── tpcds_template.hpp │ │ └── tpch │ │ ├── codegen │ │ ├── codegen_batch │ │ ├── codegen_initial_submission_r3391 │ │ ├── Tpch10VCpp.hpp │ │ ├── Tpch11VCpp.hpp │ │ ├── Tpch12VCpp.hpp │ │ ├── Tpch13VCpp.hpp │ │ ├── Tpch14VCpp.hpp │ │ ├── Tpch15VCpp.hpp │ │ ├── Tpch16VCpp.hpp │ │ ├── Tpch17VCpp.hpp │ │ ├── Tpch18VCpp.hpp │ │ ├── Tpch19VCpp.hpp │ │ ├── Tpch1VCpp.hpp │ │ ├── Tpch20VCpp.hpp │ │ ├── Tpch21VCpp.hpp │ │ ├── Tpch22VCpp.hpp │ │ ├── Tpch2VCpp.hpp │ │ ├── Tpch3VCpp.hpp │ │ ├── Tpch4VCpp.hpp │ │ ├── Tpch5VCpp.hpp │ │ ├── Tpch6VCpp.hpp │ │ ├── Tpch7VCpp.hpp │ │ ├── Tpch8VCpp.hpp │ │ └── Tpch9VCpp.hpp │ │ ├── codegen_initial_submission_r3391_batch │ │ ├── Tpch10VCpp.hpp │ │ ├── Tpch11VCpp.hpp │ │ ├── Tpch12VCpp.hpp │ │ ├── Tpch13VCpp.hpp │ │ ├── Tpch14VCpp.hpp │ │ ├── Tpch15VCpp.hpp │ │ ├── Tpch16VCpp.hpp │ │ ├── Tpch17VCpp.hpp │ │ ├── Tpch18VCpp.hpp │ │ ├── Tpch19VCpp.hpp │ │ ├── Tpch1VCpp.hpp │ │ ├── Tpch20VCpp.hpp │ │ ├── Tpch21VCpp.hpp │ │ ├── Tpch22VCpp.hpp │ │ ├── Tpch2VCpp.hpp │ │ ├── Tpch3VCpp.hpp │ │ ├── Tpch4VCpp.hpp │ │ ├── Tpch5VCpp.hpp │ │ ├── Tpch6VCpp.hpp │ │ ├── Tpch7VCpp.hpp │ │ ├── Tpch8VCpp.hpp │ │ └── Tpch9VCpp.hpp │ │ ├── codegen_revision_r3408 │ │ ├── Tpch10VCpp.hpp │ │ ├── Tpch11VCpp.hpp │ │ ├── Tpch12VCpp.hpp │ │ ├── Tpch13VCpp.hpp │ │ ├── Tpch14VCpp.hpp │ │ ├── Tpch15VCpp.hpp │ │ ├── Tpch16VCpp.hpp │ │ ├── Tpch17VCpp.hpp │ │ ├── Tpch18VCpp.hpp │ │ ├── Tpch19VCpp.hpp │ │ ├── Tpch1VCpp.hpp │ │ ├── Tpch20VCpp.hpp │ │ ├── Tpch21VCpp.hpp │ │ ├── Tpch22VCpp.hpp │ │ ├── Tpch2VCpp.hpp │ │ ├── Tpch3VCpp.hpp │ │ ├── Tpch4VCpp.hpp │ │ ├── Tpch5VCpp.hpp │ │ ├── Tpch6VCpp.hpp │ │ ├── Tpch7VCpp.hpp │ │ ├── Tpch8VCpp.hpp │ │ └── Tpch9VCpp.hpp │ │ ├── codegen_revision_r3408_batch │ │ ├── Tpch10VCpp.hpp │ │ ├── Tpch11VCpp.hpp │ │ ├── Tpch12VCpp.hpp │ │ ├── Tpch13VCpp.hpp │ │ ├── Tpch14VCpp.hpp │ │ ├── Tpch15VCpp.hpp │ │ ├── Tpch16VCpp.hpp │ │ ├── Tpch17VCpp.hpp │ │ ├── Tpch18VCpp.hpp │ │ ├── Tpch19VCpp.hpp │ │ ├── Tpch1VCpp.hpp │ │ ├── Tpch20VCpp.hpp │ │ ├── Tpch21VCpp.hpp │ │ ├── Tpch22VCpp.hpp │ │ ├── Tpch2VCpp.hpp │ │ ├── Tpch3VCpp.hpp │ │ ├── Tpch4VCpp.hpp │ │ ├── Tpch5VCpp.hpp │ │ ├── Tpch6VCpp.hpp │ │ ├── Tpch7VCpp.hpp │ │ ├── Tpch8VCpp.hpp │ │ └── Tpch9VCpp.hpp │ │ ├── query1.hpp │ │ ├── query10.hpp │ │ ├── query11.hpp │ │ ├── query12.hpp │ │ ├── query13.hpp │ │ ├── query14.hpp │ │ ├── query15.hpp │ │ ├── query16.hpp │ │ ├── query17.hpp │ │ ├── query18.hpp │ │ ├── query19.hpp │ │ ├── query2.hpp │ │ ├── query20.hpp │ │ ├── query21.hpp │ │ ├── query22.hpp │ │ ├── query3.hpp │ │ ├── query4.hpp │ │ ├── query5.hpp │ │ ├── query6.hpp │ │ ├── query7.hpp │ │ ├── query8.hpp │ │ ├── query9.hpp │ │ ├── tpch.hpp │ │ ├── tpch_template.hpp │ │ └── tpch_template_memory.hpp ├── lms │ ├── DefaultLMSGen.scala │ ├── DistributedM3Gen.scala │ ├── ExpGen.scala │ ├── LMSGen.scala │ ├── M3Ops.scala │ ├── M3StoreOps.scala │ ├── SparkGen.scala │ ├── build.sbt │ ├── dbtoptimizer │ │ ├── CodeGen.scala │ │ ├── GraphGen.scala │ │ ├── Packages.scala │ │ └── lifters │ │ │ ├── ImplicitConversionLifters.scala │ │ │ ├── SimpleValLifter.scala │ │ │ └── StdFunctionsLifter.scala │ ├── oltp │ │ └── opt │ │ │ └── lifters │ │ │ ├── SEntryGen.scala │ │ │ └── StoreLifter.scala │ ├── store │ │ ├── Entry.java │ │ ├── Idx.java │ │ ├── JFun.java │ │ ├── SEntry.scala │ │ └── Store.scala │ └── tpcc │ │ └── TpccXactGenerator.scala ├── pardis │ ├── Optimizer.scala │ ├── PardisGen.scala │ ├── build.sbt │ ├── lifter │ │ ├── DeepMMultiRes.scala │ │ ├── DeepMirrorAggregator.scala │ │ ├── DeepMirrorBooleanExtra.scala │ │ ├── DeepMirrorDate.scala │ │ ├── DeepMirrorEntryIdx.scala │ │ ├── DeepMirrorGenericEntry.scala │ │ ├── DeepMirrorIdx.scala │ │ ├── DeepMirrorStore.scala │ │ ├── DeepStringExtra.scala │ │ ├── OnlineOptimizations.scala │ │ ├── SCLMSInterop.scala │ │ ├── StoreDSL.scala │ │ ├── TypeToTypeRep.scala │ │ └── package.scala │ ├── microbenchmarks │ │ ├── MB1.scala │ │ ├── MicroBenchGen.scala │ │ └── MicroBenchRunner.scala │ ├── newqq │ │ └── DBToasterSquidBinding.scala │ ├── prettyprinter │ │ ├── StoreCodeGenerator.scala │ │ ├── StoreCppCodeGenerator.scala │ │ └── StoreScalaCodeGenerator.scala │ ├── store │ │ ├── MMultiRes.scala │ │ ├── MirrorAggregator.scala │ │ ├── MirrorBooleanExtra.scala │ │ ├── MirrorDate.scala │ │ ├── MirrorEntryIdx.scala │ │ ├── MirrorGenericEntry.scala │ │ ├── MirrorIdx.scala │ │ ├── MirrorStore.scala │ │ └── StringExtra.scala │ ├── tpcc │ │ ├── TpccPardisGen.scala │ │ └── TpccXactGenerator_SC.scala │ └── transformer │ │ ├── CTransformer.scala │ │ ├── CodeMotion.scala │ │ ├── ColdMotion.scala │ │ ├── CommonPureExpression.scala │ │ ├── DSKReordering.scala │ │ ├── DeadIdxUpdate.scala │ │ ├── Deforestation.scala │ │ ├── EntryAnalysis.scala │ │ ├── IndexAnalysis.scala │ │ ├── IndexInliner.scala │ │ ├── IndexLookupFusion.scala │ │ ├── InsertNoChecks.scala │ │ ├── MVGet.scala │ │ ├── MultiResSplitter.scala │ │ ├── Profiler.scala │ │ ├── SampleEntryHoister.scala │ │ ├── ScalaConstructsToCTransformer.scala │ │ ├── ScalaStructToMallocTransformer.scala │ │ ├── SliceToSliceNoUpd.scala │ │ ├── StoreDCE.scala │ │ ├── StringFormatEvaluator.scala │ │ ├── StringToCTransformer.scala │ │ ├── StructDynamicAccessTransformer.scala │ │ ├── TmpMapHoister.scala │ │ └── TreeDumper.scala ├── release │ ├── CHANGELOG │ ├── README │ └── bin │ │ └── dbtoaster ├── scripts │ ├── postgres.sh │ ├── pushover.sh │ ├── ramdisk.sh │ ├── regress.sh │ ├── run_spark_strong_experiments.sh │ ├── run_spark_weak_experiments.sh │ ├── scalapatch.sh │ ├── test │ │ ├── db_parser.rb │ │ ├── query_test.rb │ │ ├── testall.sh │ │ └── util.rb │ ├── unit.sh │ └── zeus.rb ├── spark │ ├── build.sbt │ ├── conf │ │ ├── log4j.properties │ │ ├── spark.config │ │ ├── spark.config.10 │ │ ├── spark.config.100 │ │ ├── spark.config.1000 │ │ ├── spark.config.200 │ │ ├── spark.config.25 │ │ ├── spark.config.400 │ │ ├── spark.config.50 │ │ ├── spark.config.600 │ │ ├── spark.config.800 │ │ └── spark.config.default │ └── src │ │ └── lib │ │ ├── LogWriter.scala │ │ ├── MapContext.scala │ │ ├── Registrator.scala │ │ ├── SparkConfig.scala │ │ └── store │ │ ├── Buffer.scala │ │ ├── ByteUtils.scala │ │ ├── CharArray.scala │ │ ├── HashIndex.java │ │ ├── Index.java │ │ ├── IndexOperations.java │ │ ├── JFun.java │ │ ├── KryoSerializable.scala │ │ ├── ListIndex.java │ │ ├── LogStore.scala │ │ ├── MapEntry.java │ │ ├── Registrator.scala │ │ ├── SliceIndex.java │ │ ├── Store.scala │ │ └── StoreTest.scala ├── src │ ├── Compiler.scala │ └── UnitTest.scala ├── srccpp │ ├── driver │ │ ├── application.hpp │ │ ├── compatibility.hpp │ │ ├── main.cpp │ │ ├── multiplexer.hpp │ │ ├── ordered_event.hpp │ │ ├── platform.hpp │ │ ├── relation_iterator.hpp │ │ ├── runtime_opts.hpp │ │ ├── stopwatch.hpp │ │ └── utils.hpp │ ├── lib │ │ ├── date_format.hpp │ │ ├── date_type.hpp │ │ ├── event.hpp │ │ ├── hash.hpp │ │ ├── macro.hpp │ │ ├── map_type.hpp │ │ ├── memory_pool.hpp │ │ ├── message.hpp │ │ ├── multi_map.hpp │ │ ├── pardis │ │ │ ├── aggregator.hpp │ │ │ ├── execution_profiler.hpp │ │ │ ├── generic_entry.hpp │ │ │ ├── mmap.hpp │ │ │ └── sc_extra.hpp │ │ ├── relation.hpp │ │ ├── serialization.hpp │ │ ├── singleton.hpp │ │ ├── source.hpp │ │ ├── standard_functions.hpp │ │ ├── types.hpp │ │ └── utils.hpp │ ├── old_driver │ │ ├── event.cpp │ │ ├── event.hpp │ │ ├── filepath.hpp │ │ ├── iprogram.cpp │ │ ├── iprogram.hpp │ │ ├── main.cpp │ │ ├── makefile │ │ ├── optionparser.hpp │ │ ├── program_base.cpp │ │ ├── program_base.hpp │ │ ├── runtime.cpp │ │ ├── runtime.hpp │ │ ├── standard_adaptors.cpp │ │ ├── standard_adaptors.hpp │ │ ├── streams.cpp │ │ └── streams.hpp │ └── old_lib │ │ ├── circular_buffer.hpp │ │ ├── date.hpp │ │ ├── hash.cpp │ │ ├── hash.hpp │ │ ├── hpds │ │ ├── KDouble.cpp │ │ ├── KDouble.hpp │ │ ├── charpool.hpp │ │ ├── pool.hpp │ │ ├── pstring.cpp │ │ ├── pstring.hpp │ │ ├── pstringops.cpp │ │ └── pstringops.hpp │ │ ├── macro.hpp │ │ ├── makefile │ │ ├── misc │ │ ├── benchHashCmp.cpp │ │ ├── hashmap │ │ │ ├── dense_hash_map │ │ │ ├── dense_hash_set │ │ │ ├── internal │ │ │ │ ├── densehashtable.h │ │ │ │ ├── hashtable-common.h │ │ │ │ ├── libc_allocator_with_realloc.h │ │ │ │ ├── sparseconfig.h │ │ │ │ └── sparsehashtable.h │ │ │ ├── sparse_hash_map │ │ │ ├── sparse_hash_set │ │ │ ├── sparsetable │ │ │ └── type_traits.h │ │ ├── mmap.cpp │ │ ├── run-benchHashCmp.sh │ │ ├── run-mmap.sh │ │ ├── statistics_split.cpp │ │ ├── statistics_split.hpp │ │ └── util.hpp │ │ ├── mmap │ │ ├── mmap.hpp │ │ └── pool.hpp │ │ ├── sc │ │ ├── Aggregator.hpp │ │ ├── ExecutionProfiler.h │ │ ├── GenericEntry.hpp │ │ ├── MB1.h │ │ ├── Predicate.h │ │ ├── ScExtra.h │ │ ├── SpinLock.h │ │ ├── TPCC.h │ │ ├── Transaction.h │ │ ├── TransactionManager.h │ │ ├── Version.h │ │ ├── cmmap.hpp │ │ ├── mmap.hpp │ │ ├── mmap2.hpp │ │ └── types.h │ │ ├── serialization.hpp │ │ ├── smhasher │ │ ├── MurmurHash2.cpp │ │ ├── MurmurHash2.hpp │ │ ├── MurmurHash3.cpp │ │ ├── MurmurHash3.hpp │ │ ├── PMurHash.cpp │ │ └── PMurHash.hpp │ │ ├── source.hpp │ │ ├── standard_functions.cpp │ │ ├── standard_functions.hpp │ │ ├── statistics.hpp │ │ └── types.hpp └── test │ └── cpp │ ├── config.h │ ├── htest.cpp │ ├── run-htest.sh │ ├── run-sparse-htest.sh │ ├── run-time_hash_map.sh │ ├── sparse_htest.cpp │ └── time_hash_map.cc ├── project ├── build.properties └── plugins.sbt ├── runtime ├── build.sbt ├── conf │ ├── tpcc.properties │ └── tpcc.properties.example ├── microbench │ ├── MicroBench.cpp │ ├── MicroBench.scala │ └── README.txt └── tpcc │ ├── OltpBenchmark.scala │ ├── README.txt │ ├── database │ ├── add_fkey_idx.sql │ ├── create_tables.sql │ └── load_data.sql │ ├── itx │ ├── ITpccInMemTx.scala │ └── ITpccTx.scala │ ├── lib │ ├── BinaryHeap.scala │ ├── SuperHashMap.scala │ ├── SuperHashMapPooled.scala │ ├── SuperHashSet.scala │ ├── SuperIndex │ └── SuperIndex.scala │ ├── lmsgen │ ├── LMSTx.scala │ └── TpccBench.scala │ ├── loadtest │ ├── AbortedTransactionException.scala │ ├── Counter.scala │ ├── DatabaseConnector.scala │ ├── Delivery.scala │ ├── Driver.scala │ ├── Load.scala │ ├── NamedThreadFactory.scala │ ├── NewOrder.scala │ ├── OrderStat.scala │ ├── Payment.scala │ ├── RtHist.scala │ ├── Slev.scala │ ├── Tpcc.scala │ ├── TpccConstants.scala │ ├── TpccLoad.scala │ ├── TpccLoadConfig.scala │ ├── TpccStatements.scala │ ├── TpccThread.scala │ ├── TpccUnitTest.scala │ ├── Util.scala │ └── load │ │ ├── FileLoader.scala │ │ ├── JdbcPreparedStatementLoader.scala │ │ ├── JdbcStatementLoader.scala │ │ ├── Record.scala │ │ └── RecordLoader.scala │ ├── mtx │ └── MixedTx.scala │ ├── pardisgen │ ├── README.txt │ ├── SCTx.scala │ ├── SCTxGenEntry.txt │ ├── SCTxSplEntry-SE.txt │ ├── SCTxSplEntry.txt │ ├── TpccGenSC.cpp │ ├── TpccGenSC.scala │ └── include │ │ ├── hpds │ │ ├── KDouble.cpp │ │ ├── KDouble.hpp │ │ ├── charpool.hpp │ │ ├── pool.hpp │ │ ├── pstring.cpp │ │ ├── pstring.hpp │ │ └── pstringops.hpp │ │ ├── macro.hpp │ │ ├── sc │ │ ├── Aggregator.hpp │ │ ├── ExecutionProfiler.h │ │ ├── GenericEntry.hpp │ │ ├── MB1.h │ │ ├── Predicate.h │ │ ├── ScExtra.h │ │ ├── SpinLock.h │ │ ├── TPCC.h │ │ ├── Transaction.h │ │ ├── TransactionManager.h │ │ ├── Version.h │ │ ├── cmmap.hpp │ │ ├── mmap.hpp │ │ ├── mmap2.hpp │ │ └── types.h │ │ ├── serialization.hpp │ │ ├── smhasher │ │ ├── MurmurHash2.cpp │ │ ├── MurmurHash2.hpp │ │ ├── MurmurHash3.cpp │ │ ├── MurmurHash3.hpp │ │ ├── PMurHash.cpp │ │ └── PMurHash.hpp │ │ └── types.hpp │ ├── results │ ├── Tpcc-HStoreCppHandWritten-w01.txt │ ├── Tpcc-HStoreCppHandWritten-w05.txt │ ├── Tpcc-HStoreCppHandWritten-w10.txt │ ├── TpccInMem-LMS-w01.txt │ ├── TpccInMem-LMS-w05.txt │ ├── TpccInMem-LMS-w10.txt │ ├── TpccInMem-LMSArr-w01-withoutLoopInversion.txt │ ├── TpccInMem-LMSArr-w01.txt │ ├── TpccInMem-LMSArr-w05.txt │ ├── TpccInMem-LMSArr-w10.txt │ ├── TpccInMem-tx1-w01.txt │ ├── TpccInMem-tx1-w05.txt │ ├── TpccInMem-tx1-w10.txt │ ├── TpccInMem-tx1.txt │ ├── TpccInMem-tx2.txt │ ├── TpccInMem-tx3-1-after-NewOrder-findItem.txt │ ├── TpccInMem-tx3-2-after-optimizingUpdates.txt │ ├── TpccInMem-tx3-3-after-optimizingMoreInsideDelivery.txt │ ├── TpccInMem-tx3-4-after-optimizingMore.txt │ ├── TpccInMem-tx4.txt │ ├── TpccInMem-tx5-w01.txt │ ├── TpccInMem-tx5-w05.txt │ ├── TpccInMem-tx5-w10.txt │ ├── TpccInMem-tx5.txt │ ├── TpccMySQL-1conn-w01.txt │ ├── TpccMySQL-1conn-w05.txt │ ├── TpccMySQL-1conn-w10.txt │ ├── TpccMySQL-30conn-w01.txt │ ├── TpccMySQL-30conn-w05.txt │ ├── TpccMySQL-30conn-w10.txt │ ├── TpccVoltDB-w01-1.txt │ ├── TpccVoltDB-w01-2.txt │ ├── TpccVoltDB-w01-3.txt │ ├── TpccVoltDB-w05-1.txt │ ├── TpccVoltDB-w05-2.txt │ ├── TpccVoltDB-w05-3.txt │ ├── TpccVoltDB-w10-1.txt │ ├── TpccVoltDB-w10-2.txt │ └── TpccVoltDB-w10-3.txt │ ├── tx │ ├── TpccInMem.scala │ └── TpccTable.scala │ ├── tx1 │ ├── Delivery.scala │ ├── NewOrder.scala │ ├── OrderStatus.scala │ ├── Payment.scala │ └── StockLevel.scala │ ├── tx2 │ ├── Delivery.scala │ ├── NewOrder.scala │ ├── OrderStatus.scala │ ├── Payment.scala │ └── StockLevel.scala │ ├── tx3 │ ├── Delivery.scala │ ├── NewOrder.scala │ ├── OrderStatus.scala │ ├── Payment.scala │ └── StockLevel.scala │ ├── tx4 │ ├── Delivery.scala │ ├── NewOrder.scala │ ├── OrderStatus.scala │ ├── Payment.scala │ └── StockLevel.scala │ └── tx5 │ ├── Delivery.scala │ ├── NewOrder.scala │ ├── OrderStatus.scala │ ├── Payment.scala │ └── StockLevel.scala └── storelib ├── build.sbt └── src ├── Utils.scala └── lib ├── AkkaSystem.scala ├── Decoder.scala ├── ExecutionProfiler.scala ├── Functions.scala ├── Helper.scala ├── IQuery.scala ├── Maps.java ├── Maps.scala ├── Messages.java ├── Messages.scala ├── Stopwatch.scala ├── store ├── Aggregator.scala ├── Entry.java ├── Idx.java ├── JFun.java ├── MultiRes.scala ├── SEntry.scala └── Store.scala └── storeScala ├── Entry.scala ├── Idx.scala ├── MultiRes.scala ├── SEntry.scala └── Store.scala /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | *.swp 4 | *.swo 5 | *.swn 6 | 7 | # sbt specific 8 | .cache/ 9 | .history/ 10 | .lib/ 11 | dist/* 12 | target/ 13 | lib_managed/ 14 | src_managed/ 15 | project/boot/ 16 | project/plugins/project/ 17 | runtime/project 18 | 19 | # Scala-IDE specific 20 | .scala_dependencies 21 | .worksheet 22 | 23 | .DS_Store 24 | 25 | # IntelliJ specific 26 | .idea/ 27 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright 2010-2017 EPFL DATA Lab (http://data.epfl.ch) 2 | -------------------------------------------------------------------------------- /dbtoaster.js/benchalljs.sh: -------------------------------------------------------------------------------- 1 | for i in {1..22}; do ./benchjs.sh $i; done 2 | -------------------------------------------------------------------------------- /dbtoaster.js/benchjs.sh: -------------------------------------------------------------------------------- 1 | ./copyjs.sh $1 2 | echo "Query $1:" 3 | cd ../dbtoaster.js 4 | ./compilejs.sh > /dev/null 2>&1 5 | ./fixjs.sh 6 | echo "Compilation completed." 7 | sbt run >> res.txt 8 | echo "Running completed." 9 | -------------------------------------------------------------------------------- /dbtoaster.js/compileJS.sh: -------------------------------------------------------------------------------- 1 | SCALA_FILE="$1.scala" 2 | JS_PROJECT="../dbtoaster.js" 3 | cp $SCALA_FILE $JS_PROJECT/Main.scala 4 | cd $JS_PROJECT 5 | sbt "fastOptJS::webpack" > compile.txt 2>&1 6 | JS_FILE=$JS_PROJECT/target/scala-2.12/scalajs-bundler/main/dbtoaster-js-fastopt.js 7 | ls $JS_PROJECT/target/scala-2.12/scalajs-bundler/main/*.js 8 | sed -i -e 's/var req = new $g.XMLHttpRequest();/var XMLHttpRequest = require("xhr2"); var req = new XMLHttpRequest();/g' $JS_FILE 9 | cd - 10 | cp $JS_FILE $1.js 11 | 12 | -------------------------------------------------------------------------------- /dbtoaster.js/copyalljs.sh: -------------------------------------------------------------------------------- 1 | cp ddbtoaster/target/tmp/*ScalaJS.scala ../dbtoaster.js/ 2 | -------------------------------------------------------------------------------- /dbtoaster.js/copyjs.sh: -------------------------------------------------------------------------------- 1 | queryName="ddbtoaster/target/tmp/Tpch$1""ScalaJS.scala" 2 | cp $queryName ../dbtoaster.js/Main.scala 3 | -------------------------------------------------------------------------------- /dbtoaster.js/js.sh: -------------------------------------------------------------------------------- 1 | jsProjectPath="/home/amir/dbtoaster.js" 2 | jsfile=$jsProjectPath/target/scala-2.12/scalajs-bundler/main/dbtoaster-js-fastopt.js 3 | scalaOutputPath="." 4 | queryName=$1 5 | cp $scalaOutputPath/$queryName.scala $jsProjectPath/$queryName.scala 6 | cd $jsProjectPath 7 | sbt clean 8 | sbt fastOptJS::webpack 9 | sed -i 's/var req = new $g.XMLHttpRequest();/var XMLHttpRequest = require(\x27xhr2\x27); var req = new XMLHttpRequest();/g' $jsfile 10 | sbt run 11 | rem $jsProjectPath/$queryName.scala 12 | 13 | -------------------------------------------------------------------------------- /dbtoaster.js/runJS.sh: -------------------------------------------------------------------------------- 1 | JS_PROJECT="../dbtoaster.js" 2 | JS_FILE="ddbtoaster/target/tmp/$1" 3 | cp $JS_FILE $JS_PROJECT/target/scala-2.12/scalajs-bundler/main/dbtoaster-js-fastopt.js 4 | cd $JS_PROJECT 5 | sbt run 2>run_error.txt -------------------------------------------------------------------------------- /ddbtoaster/.gitignore: -------------------------------------------------------------------------------- 1 | project/project 2 | project/target 3 | target 4 | examples/queries 5 | examples/data 6 | conf/ddbt.properties 7 | test/gen 8 | test/queries 9 | bin/* 10 | pkg 11 | benchmarks-*.csv 12 | .idea 13 | *.numbers 14 | *.iml 15 | *.pdf 16 | *Temp* 17 | *.DS_Store 18 | *.aux 19 | *.log 20 | *.nav 21 | *.out 22 | *.snm 23 | *.synctex.gz 24 | *.toc 25 | tmp/* 26 | srccpp/lib/*.o 27 | srccpp/lib/*.a 28 | srccpp/driver/*.o 29 | srccpp/driver/*.a 30 | srccpp/old_lib/*.o 31 | srccpp/old_lib/*.a 32 | srccpp/old_driver/*.o 33 | srccpp/old_driver/*.a 34 | release/examples 35 | release/lib 36 | dbtoaster_release 37 | release/LICENSE 38 | release/README 39 | release/doc 40 | release/bin/dbtoaster_frontend 41 | *.o 42 | dist/* 43 | CHANGELOG 44 | -------------------------------------------------------------------------------- /ddbtoaster/.travis.yml: -------------------------------------------------------------------------------- 1 | language: scala 2 | scala: 3 | - 2.10.2 4 | jdk: 5 | - oraclejdk7 6 | # - openjdk7 7 | # - openjdk6 8 | #virtualenv: 9 | # system_site_packages: true 10 | #before_install: 11 | # - sudo apt-get install scala 12 | #before_script: ./.travis.sh 13 | script: scripts/unit.sh 14 | # - sbt 'test:run-main ddbt.test.Benchmark' 15 | # - sbt test 16 | -------------------------------------------------------------------------------- /ddbtoaster/core/build.sbt: -------------------------------------------------------------------------------- 1 | Seq( 2 | // --------- Project information 3 | name := "dbtoaster-core", 4 | 5 | // --------- Paths 6 | scalaSource in Compile <<= baseDirectory / "." 7 | ) -------------------------------------------------------------------------------- /ddbtoaster/core/codegen/CodeGenOptions.scala: -------------------------------------------------------------------------------- 1 | package ddbt.codegen 2 | 3 | /** 4 | * Set of options based to each code generator 5 | */ 6 | class CodeGenOptions( 7 | val className: String, 8 | val packageName: String, 9 | val dataset: String, 10 | val datasetWithDeletions: Boolean, 11 | val timeoutMilli: Long, 12 | val isReleaseMode: Boolean, 13 | val printTiminingInfo: Boolean, 14 | val printProgress: Long = 0L, 15 | val isBatchingEnabled: Boolean = false, 16 | val useOldRuntimeLibrary: Boolean = false 17 | ) -------------------------------------------------------------------------------- /ddbtoaster/docs/benchmarks/cpp-std-tpch-o3.csv: -------------------------------------------------------------------------------- 1 | Dataset,standard 2 | Query,SQLtoCPP,Compile,Median,Min,Max 3 | tpch/ssb4,1.035,32.249, 0.459, 0.445, 0.524 4 | tpch/query1,0.566,17.156, 0.322, 0.315, 0.352 5 | tpch/query10,0.237,13.513, 0.316, 0.305, 0.339 6 | tpch/query11,1.285,12.605, 0.138, 0.133, 0.165 7 | tpch/query11a,0.018, 5.540, 0.019, 0.019, 0.038 8 | tpch/query11c,0.980,11.802, 0.536, 0.531, 0.560 9 | tpch/query12,0.172, 8.725, 0.281, 0.276, 0.300 10 | tpch/query13,0.316, 7.462, 6.104, 6.078, 6.154 11 | tpch/query14,0.289, 5.881, 0.235, 0.233, 0.250 12 | tpch/query15,5.697,27.933, 4.182, 4.159, 4.196 13 | tpch/query16,0.287,28.420, 1.725, 1.709, 1.780 14 | tpch/query17,5.471, 8.419, 0.232, 0.229, 0.251 15 | tpch/query17a,0.484, 6.660, 0.234, 0.230, 0.246 16 | tpch/query18,0.191,10.964, 6.421, 6.259, 6.577 17 | tpch/query18a,4.168,11.048, 0.278, 0.272, 0.300 18 | tpch/query19,0.122, 6.003, 0.517, 0.513, 0.538 19 | tpch/query2,1.047,26.002, 0.033, 0.033, 0.070 20 | tpch/query20,1.228,12.954, 0.389, 0.384, 0.407 21 | tpch/query21,1.383,13.829, 0.405, 0.400, 0.445 22 | tpch/query22,3.834,24.881, 0.112, 0.110, 0.157 23 | tpch/query22a,0.086, 5.886, 0.046, 0.045, 0.063 24 | tpch/query3,0.186,10.269, 0.287, 0.281, 0.314 25 | tpch/query4,0.075, 5.910, 0.264, 0.262, 0.281 26 | tpch/query5,0.741,33.356, 0.327, 0.321, 0.385 27 | tpch/query6,0.023, 4.009, 0.250, 0.243, 0.268 28 | tpch/query7,3.403,46.868,87.709,87.064,88.861 29 | tpch/query8,13.016,170.872, 0.496, 0.484, 0.598 30 | tpch/query9,2.576,78.523, 0.541, 0.530, 0.664 31 | -------------------------------------------------------------------------------- /ddbtoaster/docs/benchmarks/finance_queries.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/benchmarks/finance_queries.xlsx -------------------------------------------------------------------------------- /ddbtoaster/docs/benchmarks/parse.php: -------------------------------------------------------------------------------- 1 | M3 : +'.$n.'/',$l,$m)) $tmp[1]=$m[1]; 12 | else if (preg_match('/Scala codegen : +'.$n.'/',$l,$m)) $tmp[2]=$m[1]; 13 | else if (preg_match('/Scala compile : +'.$n.'/',$l,$m)) $tmp[3]=$m[1]; 14 | else if (preg_match('/Scala running : +'.$n.' \[ *'.$n.', *'.$n.'\] \(sec\)/',$l,$m)) { 15 | $tmp[4]=$m[1]; 16 | $tmp[5]=$m[2]; 17 | $tmp[6]=$m[3]; 18 | } else if (preg_match('/\[(Full )?GC '.$n.'K->'.$n.'K\([0-9]+K\), '.$n.'/',$l,$m)) { 19 | // K'.$n.' secs\] 20 | $tmp[7]=max(round($m[2]/(1024*1024),3),@doubleval($tmp[7])); 21 | $tmp[8]=max(round($m[3]/(1024*1024),3),@doubleval($tmp[8])); 22 | $t = doubleval(str_replace(",",".",$m[4]))/10; // because 10 runs 23 | if (isset($tmp[9])) $tmp[9]+=$t; else $tmp[9]=$t; 24 | } 25 | } 26 | $res[]=$tmp; 27 | foreach($res as $r) { for ($i=0;$i 35 | -------------------------------------------------------------------------------- /ddbtoaster/docs/benchmarks/tpch_queries.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/benchmarks/tpch_queries.xlsx -------------------------------------------------------------------------------- /ddbtoaster/docs/drafts/Makefile: -------------------------------------------------------------------------------- 1 | TEX_CMD=pdflatex -halt-on-error 2 | BIB_CMD=bibtex 3 | 4 | all: morgol.pdf 5 | 6 | m4.pdf: m4.tex ../inc/style.sty ../inc/bibliography.bib 7 | $(TEX_CMD) m4; 8 | $(BIB_CMD) m4; 9 | $(TEX_CMD) m4; 10 | 11 | clean: 12 | rm -f m4.pdf *.aux *.log *.out *.bbl *.blg *.toc; 13 | -------------------------------------------------------------------------------- /ddbtoaster/docs/drafts/lifts.md: -------------------------------------------------------------------------------- 1 | # Problems with Lifts/Exists 2 | 3 | *Note:* The term "classic semantics" in this document refers to the semantics currently implemented in alpha5, while "new semantics" refers to the semantics defined in the VLDBJ paper. 4 | 5 | ## Example queries 6 | 7 | ### Simple aggregate query 8 | 9 | ``` 10 | SELECT A, SUM(B) FROM R GROUP BY A; 11 | ``` 12 | 13 | Here, the problem with the classic semantics is that we are not able to distinguish between groups that have an aggregate that is 0 and groups that do not exist. 14 | alpha5 currently generates the following calculus expression for this query: 15 | 16 | ``` 17 | __SQL_SUM_AGGREGATE_1: 18 | AggSum([R_A], (R(R_A, R_B) * R_B)) 19 | ``` 20 | 21 | We would actually need an `EXISTS(R(R_A, R_B))` in front of the expression for it to be correct. 22 | 23 | ### Not exists 24 | 25 | ``` 26 | SELECT A FROM R WHERE NOT EXISTS (SELECT R2.B FROM R AS R2 WHERE R2.B < R.A); 27 | ``` 28 | 29 | This is a query that we can not express using the new semantics. 30 | The problem is that we can not check if something has multiplicity 0 because 0 multiplicity is "contagous". 31 | alpha5 produces the following calculus expression: 32 | 33 | ``` 34 | COUNT: 35 | AggSum([R_A], 36 | (R(R_A, R_B, R_C) * 37 | AggSum([], 38 | ((__domain_1 ^= AggSum([], (R(R2_A, R2_B, R2_C) * {R2_B < R_A}))) * 39 | (__domain_1 ^= 0))))) 40 | ``` 41 | 42 | A similar problem arises with the SQL operator `ALL`: 43 | 44 | ``` 45 | SELECT A FROM R WHERE R.A < ALL (SELECT R2.B FROM R AS R2); 46 | ``` 47 | 48 | This query essentially gets translated to a `NOT EXISTS` query with the inverse of the predicate: 49 | 50 | ``` 51 | COUNT: 52 | AggSum([R_A], 53 | (R(R_A, R_B, R_C) * 54 | AggSum([], 55 | ((__domain_1 ^= AggSum([], (R(R2_A, R2_B, R2_C) * {R_A >= R2_B}))) * 56 | (__domain_1 ^= 0))))) 57 | ``` 58 | -------------------------------------------------------------------------------- /ddbtoaster/docs/graphs/alpha5_release/Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | gnuplot engine-comparison.ps 3 | -------------------------------------------------------------------------------- /ddbtoaster/docs/graphs/alpha5_release/engine-comparison.csv: -------------------------------------------------------------------------------- 1 | Q1,6.41,17.74,29.46,91171.62,94556.42,60534.24,11.16 2 | Q2,0.75,10.74,1.01,105630.03,107604.4,183382.45,60.33 3 | Q3,3.58,18.22,22.55,163188.9,164654.9,811056.68,14.03 4 | Q4,8.42,32.3,252.04,6322.88,6308.95,2737.51,7678.18 5 | Q5,1.1,9.67,19.05,,,,4.14 6 | Q6,28.29,44.24,361.97,260534.77,270158.83,1122627.81,26910.07 7 | Q7,0.17,3.84,14.1, , ,13086.03,10.17 8 | Q8,0.23,34.83,23.88,722.51,720.54,2630.66,0.25 9 | Q9,0.07,6.75,23.74,11896.31,11237.1,2161.68,0.8 10 | Q10,1.34,4.6,38.23,181175.36,182465.68,690424.47,4.92 11 | Q11,0.23,3.74,12.32,774.28,781.93,520.28,0.12 12 | Q11a,1.6,1.57,5.28,313897.84,354411.24,1007199.95,8808.1 13 | Q11c,,,,40.299,35.808,41.006, 14 | Q12,2.06,24.71,74.47,248609.89,267970.74,694702.73,7576.58 15 | Q13,0.12,9.64,10.9,171.16,173.31,27.895,0.1 16 | Q14,0.99,39.6,464.88,250495.64,246278.47,655655.91,1.01 17 | Q15,0.12,2.49,6.14,3.149,3.128,3.146,0.08 18 | Q16,2.01,2.94,8.82,444.5,443.71,85.755,2 19 | Q17,3.4,11.77,19.64,387.37,386.86,68547.76,1373.52 20 | Q17a,1.99,1.51,13.06,252.5,256.67,306.76,2060.98 21 | Q18,2.45,1.2,11.16,223.99,225.18,229.01,2.95 22 | Q18a,2.09,1.31,7.42,333.05,336.56,469.2,7.38 23 | Q19,0.06,23.84,0.57,1047.68,1036.43,79.505,187.78 24 | Q20,13.91,7.16,33.63,2091.8,2099.14,2920.28,502.33 25 | Q21,1.65,8.72,14.72,258.94,258.37,736.34,8.5 26 | Q22,0.39,36.05,58.22,1385.93,1397.35,1277.47,0.47 27 | Q22a,1.08,1.98,41.68,725.33,732.3,991.08,1.31 28 | SSB4,1.13,3.43,16.92,,,,4.15 29 | AXF,3.66,5.62,6.91,,,,5677.57 30 | BSP,2.99,6,5.18,,,,703.86 31 | BSV,2.87,5.23,10.63,,,,702.49 32 | MST,2.07,4.37,3.73,,,,2.06 33 | PSP,2.68,5.93,5.38,,,,2.67 34 | VWAP (*),3.08,7.93,4.81,,,,2.75 35 | MDDB1,1.54,972.22,5.96,,,,230.31 36 | MDDB2,1.14,0.31,2.11,,,,131.68 37 | -------------------------------------------------------------------------------- /ddbtoaster/docs/graphs/alpha5_release/engine-comparison.ps: -------------------------------------------------------------------------------- 1 | set terminal pdf color dashed enhanced size 12,2 2 | set output "engine-comparison.pdf" 3 | 4 | # --- start common commands --- 5 | set border 1+2 6 | set xlabel font ", 10" 7 | set ylabel font ", 10" 8 | set xtics font ", 8" 9 | set ytics font ", 8" 10 | set ytics nomirror 11 | set xtics nomirror 12 | set rmargin at screen 0.975 13 | set lmargin at screen 0.05 14 | # --- end common commands --- 15 | 16 | set datafile separator "," 17 | set datafile missing "0" 18 | 19 | set key Left reverse horizontal maxcols 2 at screen 0.95, 0.95 opaque 20 | set boxwidth 1 relative 21 | 22 | set xtics auto 23 | set style data histogram 24 | set style histogram cluster gap 1 25 | 26 | set ylabel "Average Refresh Rate (1/s)" 27 | set xtics 0.25 nomirror scale 0 rotate by -30 28 | set ytics nomirror 29 | 30 | set xlabel "Queries" 31 | set bmargin at screen 0.20 32 | set tmargin at screen 0.95 33 | 34 | set logscale y 35 | 36 | set style fill solid noborder 37 | plot 'engine-comparison.csv' using 2:xtic(1) title "REP" lc rgb "#a6cee3", \ 38 | '' using 3:xtic(1) title "DBX" lc rgb "#1f78b4", \ 39 | '' using 4:xtic(1) title "SPY" lc rgb "#b2df8a", \ 40 | '' using 5:xtic(1) title "DBToaster Scala" lc rgb "#33a02c", \ 41 | '' using 6:xtic(1) title "DBToaster Scala + LMS" lc rgb "#fdbf6f", \ 42 | '' using 7:xtic(1) title "DBToaster C++" lc rgb "#ff7f00", \ 43 | '' using 8:xtic(1) title "IVM" lc rgb "#fb9a99" 44 | -------------------------------------------------------------------------------- /ddbtoaster/docs/help/win_dev_install.md: -------------------------------------------------------------------------------- 1 | # Installation on Windows 2 | 3 | This document describes the installation of DBToaster and DDBToaster on Windows 4 | for developers. Other versions of the tools and binaries described below might 5 | work as well but are untested. 6 | 7 | ## Requirements 8 | * Windows 7 9 | * Java SE Development Kit 7 (64-bit) 10 | * sbt 0.13.2 11 | * Scala 2.10.3 12 | * Cygwin (32-bit) with the following packages (and their dependencies): 13 | * gcc-g++ 4.8.2-2 14 | * Ocaml 4.01.0-1 (to compile DBToaster) 15 | * ruby 1.9.3-p484-1 (for the test scripts) 16 | * git 1.7.9-1 (to clone the DDBToaster repository) 17 | * git-svn 1.7.9-1 (to checkout the DBToaster repository) 18 | * libboost and libboost-devel 1.53.0-2 19 | 20 | ## Configuration 21 | 22 | ### Bash profile 23 | 24 | Add the following lines to your `.bash_profile` file: 25 | 26 | ``` 27 | export SBT_HOME= 28 | export SCALA_HOME= 29 | export JAVA_HOME= 30 | export PATH=$PATH:${SBT_HOME}/bin:${SCALA_HOME}/bin:${JAVA_HOME}/bin 31 | ``` 32 | 33 | ### DDBToaster configuration 34 | 35 | Use the following options in the DDBToaster configuration file 36 | `conf/ddbt.properties`: 37 | 38 | ``` 39 | ddbt.base_repo = 40 | ddb.lib_boost = /usr/lib 41 | ddbt.find_bin = \\bin\\find 42 | ``` 43 | -------------------------------------------------------------------------------- /ddbtoaster/docs/presentations/DDBT/wip1.key: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/presentations/DDBT/wip1.key -------------------------------------------------------------------------------- /ddbtoaster/docs/presentations/DDBT/wip2.key: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/presentations/DDBT/wip2.key -------------------------------------------------------------------------------- /ddbtoaster/docs/presentations/DDBT/wip3.key: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/presentations/DDBT/wip3.key -------------------------------------------------------------------------------- /ddbtoaster/docs/presentations/LMSinDDBToaster/CPS1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/presentations/LMSinDDBToaster/CPS1.png -------------------------------------------------------------------------------- /ddbtoaster/docs/presentations/LMSinDDBToaster/CPS2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/presentations/LMSinDDBToaster/CPS2.png -------------------------------------------------------------------------------- /ddbtoaster/docs/presentations/LMSinDDBToaster/bib.tex: -------------------------------------------------------------------------------- 1 | \begin{thebibliography}{99} 2 | 3 | %books 4 | \beamertemplatebookbibitems 5 | \bibitem{AMA2006} [AMA06] José Nelson Amaral, "TopicC: Loop Fusion" slides, Compiler Design and Optimization, Department of Computing Science, University of Alberta, 2006. URL: http://webdocs.cs.ualberta.ca/~amaral/courses/680/. 6 | 7 | 8 | \beamertemplatearticlebibitems 9 | 10 | \bibitem{MEG1997} [MEG97] Nimrod Megiddo and Vivek Sarkar. 1997. Optimal weighted loop fusion for parallel programs. In Proceedings of the ninth annual ACM symposium on Parallel algorithms and architectures (SPAA '97). ACM, New York, NY, USA, 282-291. 11 | 12 | \bibitem{GAO92} [GAO92] G. R. Gao, R. Olsen, V. Sarkar, and R. Thekkath. 1993. Collective loop fusion for array contraction. Springer-Verlag Lecture Notes in Computer Science, 757. Proceedings of the Fifth Workshop on Languages and Compilers for Parallel Computing, Yale University, 281-295. 13 | \bibitem{KEN93} [KEN93] Ken Kennedy and Kathryn S. McKinley. 1994. Maximizing Loop Parallelism and Improving Data Locality via Loop Fusion and Distribution. Springer-Verlag Lecture Notes in Computer Science, 768. Proceedings of the Sixth Workshop on Languages and Compilers for Parallel Computing, Portland, Oregon, 301-320. 14 | 15 | \end{thebibliography} 16 | -------------------------------------------------------------------------------- /ddbtoaster/docs/presentations/LMSinDDBToaster/finance-large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/presentations/LMSinDDBToaster/finance-large.png -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/VLDBJ_2013 experiments.webloc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/tck/VLDBJ_2013 experiments.webloc -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/batch/build.sbt: -------------------------------------------------------------------------------- 1 | name := "Batching" 2 | 3 | Seq( 4 | scalaSource in Compile <<= baseDirectory / "src", 5 | javaSource in Compile <<= baseDirectory / "src", 6 | sourceDirectory in Compile <<= baseDirectory / "src", 7 | scalaSource in Test <<= baseDirectory / "test", 8 | javaSource in Test <<= baseDirectory / "test", 9 | sourceDirectory in Test <<= baseDirectory / "test", 10 | resourceDirectory in Compile <<= baseDirectory / "conf" 11 | ) 12 | 13 | scalaVersion := "2.10.3" 14 | 15 | scalacOptions ++= Seq("-deprecation", "-unchecked", "-feature", "-optimise", "-Yinline-warnings") 16 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/calc.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbtoaster/dbtoaster-backend/e47273968b360b1c3685449f23d0510c78f59216/ddbtoaster/docs/tck/calc.zip -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/kryo/akka/AkkaSerializers.scala: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright 2012 Roman Levenstein 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | ******************************************************************************/ 16 | 17 | package com.romix.akka.serialization.kryo 18 | 19 | import akka.actor.ExtendedActorSystem 20 | import akka.actor.ActorRef 21 | import akka.serialization.Serialization 22 | import com.esotericsoftware.kryo.Kryo 23 | import com.esotericsoftware.kryo.Serializer 24 | import com.esotericsoftware.kryo.io.Input 25 | import com.esotericsoftware.kryo.io.Output 26 | 27 | /*** 28 | * This module provides helper classes for serialization of Akka-specific classes. 29 | * 30 | * @author Roman Levenstein 31 | * 32 | */ 33 | 34 | class ActorRefSerializer(val system: ExtendedActorSystem) extends Serializer[ActorRef] { 35 | 36 | override def read(kryo: Kryo, input: Input, typ: Class[ActorRef]): ActorRef = { 37 | val path = input.readString() 38 | system.provider.resolveActorRef(path) 39 | } 40 | 41 | override def write(kryo: Kryo, output: Output, obj: ActorRef) = { 42 | output.writeString(Serialization.serializedActorPath(obj)) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=OFF 2 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/perf/hash_java_old.java: -------------------------------------------------------------------------------- 1 | package ddbt.test.cperf; 2 | import java.util.*; 3 | import scala.Tuple2; 4 | import scala.Tuple3; 5 | 6 | class HashMapOld { 7 | static Random r = new Random(); 8 | static final int N = 1000000; 9 | static final double F = 1.5; 10 | 11 | @SuppressWarnings("unchecked") 12 | public static void main(String[] args) { 13 | HashMap,Tuple2> map = new HashMap,Tuple2>(); 14 | 15 | for (int z=0;z<10;++z) { 16 | long t0 = System.nanoTime(); 17 | long i,j; 18 | for (i=0;i k = new Tuple3(i,i+1,"Hello world"); 20 | Tuple2 v = new Tuple2(i*3.0,i*4.0); 21 | map.put(k,v); 22 | } 23 | for (j=0;j k = new Tuple3(i,i+1,"Hello world"); 26 | Tuple2 v = new Tuple2(i*1.0,i*1.0); 27 | if (!map.containsKey(k)) map.put(k,v); 28 | if (i%10==0) { 29 | Iterator it = map.entrySet().iterator(); 30 | while (it.hasNext()) { 31 | Map.Entry pairs = (Map.Entry)it.next(); 32 | Tuple3 k2 = (Tuple3)pairs.getKey(); 33 | Tuple2 v1 = (Tuple2)pairs.getValue(); 34 | Tuple2 v2 = new Tuple2(v1._1()*2.0,v1._2()*0.5); 35 | map.put(k2,v2); 36 | } 37 | } 38 | } 39 | 40 | long t1 = System.nanoTime(); 41 | long us = (t1-t0)/1000; 42 | map.clear(); 43 | System.out.printf("Time = %.6f\n",us/1000000.0); 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_nio/build.sbt: -------------------------------------------------------------------------------- 1 | // --------- project informations 2 | Seq( 3 | name := "NioTest", 4 | organization := "ch.epfl.data", 5 | version := "0.1" 6 | ) 7 | 8 | // --------- Paths 9 | Seq( 10 | scalaSource in Compile <<= baseDirectory / "src", 11 | javaSource in Compile <<= baseDirectory / "src", 12 | sourceDirectory in Compile <<= baseDirectory / "src", 13 | scalaSource in Test <<= baseDirectory / "test", 14 | javaSource in Test <<= baseDirectory / "test", 15 | sourceDirectory in Test <<= baseDirectory / "test", 16 | resourceDirectory in Compile <<= baseDirectory / "conf" 17 | ) 18 | 19 | // --------- Compilation options 20 | Seq( 21 | scalaVersion := "2.10.3", 22 | scalacOptions ++= Seq("-deprecation","-unchecked","-feature","-optimise","-Yinline-warnings"), // ,"-target:jvm-1.7" 23 | javaOptions ++= Seq("-ea") 24 | ) 25 | 26 | // --------- Dependencies 27 | libraryDependencies <++= scalaVersion(v=>Seq( 28 | "com.typesafe.akka" %% "akka-actor" % "2.2.3", 29 | "com.typesafe.akka" %% "akka-remote" % "2.2.3" 30 | )) 31 | /* 32 | // http://stackoverflow.com/questions/7344477/adding-new-task-dependencies-to-built-in-sbt-tasks 33 | JAVA_INCLUDE="/System/Library/Frameworks/JavaVM.framework/Headers" 34 | // --------- Execution options 35 | javacOptions ++= Seq("-Xlint:unchecked","-Xlint:-options","-source","1.6","-target","1.6") // forces JVM 1.6 compatibility for JDK 1.7 compiler 36 | Seq( 37 | fork := true, // required to enable javaOptions 38 | javaOptions ++= Seq("-Xss128m"), // ,"-Xss512m","-XX:MaxPermSize=2G" 39 | javaOptions ++= Seq("-Xmx14G","-Xms14G","-verbose:gc"),parallelExecution in Test := false, // for large benchmarks 40 | javaOptions <+= (fullClasspath in Runtime) map (cp => "-Dsbt.classpath="+cp.files.absString) // propagate paths 41 | ) 42 | TaskKey[Unit]("scripts") <<= (baseDirectory, fullClasspath in Runtime) map { (base, cp) => 43 | def s(file:String,main:String) { 44 | val content = "#!/bin/sh\njava -classpath \""+cp.files.absString+"\" "+main+" \"$@\"\n" 45 | val out = base/file; IO.write(out,content); out.setExecutable(true) 46 | } 47 | s("toast.sh","ddbt.Compiler") 48 | s("unit.sh","ddbt.UnitTest") 49 | } 50 | */ 51 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_nio/perf.csv: -------------------------------------------------------------------------------- 1 | NIO: 2 | 1,1,1,74819570,5.001206 3 | 4,1,1,137128,5.001125 4 | 1,4,1,1028102,5.000420 5 | 4,4,1,337888,5.001126 6 | 1,1,4,98798771,5.001163 7 | 4,1,4,274608,5.000154 8 | 1,4,4,4128368,5.000128 9 | 4,4,4,1110318,5.000132 10 | 11 | AKKA_custom: 12 | 1,1,1,16158932,5.000483 13 | 4,1,1,721461,5.000123 14 | 1,4,1,9622657,5.000277 15 | 4,4,1,1334689,5.000131 16 | 1,1,4,18417886,5.000145 17 | 4,1,4,2876084,5.000151 18 | 1,4,4,28279465,5.000799 19 | 4,4,4,5055321,5.000141 20 | 21 | AKKA_default: 22 | 1,1,1,16634569,5.000253 23 | 4,1,1,720350,5.000127 24 | 1,4,1,9460552,5.001120 25 | 4,4,1,1345271,5.000121 26 | 1,1,4,22125796,5.001059 27 | 4,1,4,2974464,5.000237 28 | 1,4,4,27419074,5.001130 29 | 4,4,4,5060680,5.000123 30 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/build.sbt: -------------------------------------------------------------------------------- 1 | // --------- project informations 2 | Seq( 3 | name := "SerializationTest", 4 | organization := "ch.epfl.data", 5 | version := "0.1" 6 | ) 7 | 8 | // --------- Paths 9 | Seq( 10 | scalaSource in Compile <<= baseDirectory / "src", 11 | javaSource in Compile <<= baseDirectory / "src", 12 | sourceDirectory in Compile <<= baseDirectory / "src", 13 | scalaSource in Test <<= baseDirectory / "test", 14 | javaSource in Test <<= baseDirectory / "test", 15 | sourceDirectory in Test <<= baseDirectory / "test", 16 | resourceDirectory in Compile <<= baseDirectory / "conf" 17 | ) 18 | 19 | // --------- Compilation options 20 | Seq( 21 | scalaVersion := "2.10.3", 22 | scalacOptions ++= Seq("-deprecation","-unchecked","-feature","-optimise","-Yinline-warnings") // ,"-target:jvm-1.7" 23 | ) 24 | 25 | val cc = (sourceDirectory,classDirectory in Compile) map { (src,dest) => 26 | Seq("gcc","-O3","-shared","-I"+src+"/include",src+"/serial.c","-o",dest+"/serial/NodeIOImpl.jnilib").! 27 | } 28 | 29 | Seq( 30 | compile <<= (compile in Compile) dependsOn cc, 31 | run <<= (run in Compile) dependsOn cc 32 | ) 33 | /* 34 | // http://stackoverflow.com/questions/7344477/adding-new-task-dependencies-to-built-in-sbt-tasks 35 | JAVA_INCLUDE="/System/Library/Frameworks/JavaVM.framework/Headers" 36 | // --------- Execution options 37 | javacOptions ++= Seq("-Xlint:unchecked","-Xlint:-options","-source","1.6","-target","1.6") // forces JVM 1.6 compatibility for JDK 1.7 compiler 38 | Seq( 39 | fork := true, // required to enable javaOptions 40 | javaOptions ++= Seq("-Xss128m"), // ,"-Xss512m","-XX:MaxPermSize=2G" 41 | javaOptions ++= Seq("-Xmx14G","-Xms14G","-verbose:gc"),parallelExecution in Test := false, // for large benchmarks 42 | javaOptions <+= (fullClasspath in Runtime) map (cp => "-Dsbt.classpath="+cp.files.absString) // propagate paths 43 | ) 44 | TaskKey[Unit]("scripts") <<= (baseDirectory, fullClasspath in Runtime) map { (base, cp) => 45 | def s(file:String,main:String) { 46 | val content = "#!/bin/sh\njava -classpath \""+cp.files.absString+"\" "+main+" \"$@\"\n" 47 | val out = base/file; IO.write(out,content); out.setExecutable(true) 48 | } 49 | s("toast.sh","ddbt.Compiler") 50 | s("unit.sh","ddbt.UnitTest") 51 | } 52 | */ 53 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | gcc -shared -Isrc/include src/serial.c -o target/scala-2.10/classes/serial/NodeIOImpl.jnilib 4 | scala -cp target/scala-2.10/classes serial.SerialTest 5 | 6 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/src/include/jni_md.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 1996, 2000, Oracle and/or its affiliates. All rights reserved. 3 | * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. 4 | * 5 | * 6 | * 7 | * 8 | * 9 | * 10 | * 11 | * 12 | * 13 | * 14 | * 15 | * 16 | * 17 | * 18 | * 19 | * 20 | * 21 | * 22 | * 23 | * 24 | */ 25 | 26 | #ifndef _JAVASOFT_JNI_MD_H_ 27 | #define _JAVASOFT_JNI_MD_H_ 28 | 29 | #define JNIEXPORT 30 | #define JNIIMPORT 31 | #define JNICALL 32 | 33 | typedef int jint; 34 | #ifdef _LP64 /* 64-bit Solaris */ 35 | typedef long jlong; 36 | #else 37 | typedef long long jlong; 38 | #endif 39 | 40 | typedef signed char jbyte; 41 | 42 | #endif /* !_JAVASOFT_JNI_MD_H_ */ 43 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/toy/Reflect.java: -------------------------------------------------------------------------------- 1 | import java.lang.reflect.Constructor; 2 | 3 | public class T { 4 | public static void main(String[] args) throws Exception { 5 | doRegular(); doReflection(); 6 | doRegular(); doReflection(); 7 | doRegular(); doReflection(); 8 | doRegular(); doReflection(); 9 | doRegular(); doReflection(); 10 | } 11 | 12 | public static void doRegular() throws Exception { 13 | long start = System.nanoTime(); 14 | for (int i=0; i<1000000; i++) { 15 | new X().nop(); 16 | } 17 | System.out.println("Regular: "+(System.nanoTime() - start)); 18 | } 19 | 20 | public static void doReflection() throws Exception { 21 | long start = System.nanoTime(); 22 | Class cl = X.class; 23 | //Constructor co = (Constructor)(cl.getConstructor(Object.class)); 24 | for (int i=0; i<1000000; i++) { 25 | cl.newInstance().nop(); 26 | } 27 | System.out.println("Reflect: "+(System.nanoTime() - start)); 28 | } 29 | 30 | } 31 | 32 | class X { 33 | public static int ctr=0; 34 | public X() {} 35 | public X(Object o) { } 36 | public void nop() { ctr+=1; } 37 | } 38 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/toy/Toy.scala: -------------------------------------------------------------------------------- 1 | object Toy { 2 | def main(args: Array[String]) { 3 | val a = new Vec3(1,2,3) 4 | val b = new Vec3(3,2,1) 5 | val c = a.dot(b) 6 | System.out.println(c); 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/toy/Vec3.c: -------------------------------------------------------------------------------- 1 | #include "Vec3.h" 2 | 3 | // http://java.sun.com/docs/books/jni/html/jniTOC.html 4 | // http://www.iam.ubc.ca/guides/javatut99/native1.1/implementing/field.html 5 | // http://www.iam.ubc.ca/guides/javatut99/native1.1/implementing/method.html 6 | // http://www.iam.ubc.ca/guides/javatut99/native1.1/implementing/example-1dot1/FieldAccess.c 7 | // http://shootout.alioth.debian.org/u64/program.php?test=pidigits&lang=scala&id=4 8 | // http://stackoverflow.com/questions/3950635/how-to-compile-dynamic-library-for-a-jni-application-on-linux 9 | 10 | JNIEXPORT jobject JNICALL Java_Vec3_dot(JNIEnv *env, jobject a, jobject b) { 11 | jclass cls = (*env)->GetObjectClass(env, a); // Vec3 class 12 | 13 | // retrieve values and compute dot product 14 | jfieldID fid; 15 | fid = (*env)->GetFieldID(env, cls, "x", "I"); 16 | int x = (*env)->GetIntField(env, a, fid) * (*env)->GetIntField(env, b, fid); 17 | fid = (*env)->GetFieldID(env, cls, "y", "I"); 18 | int y = (*env)->GetIntField(env, a, fid) * (*env)->GetIntField(env, b, fid); 19 | fid = (*env)->GetFieldID(env, cls, "z", "I"); 20 | int z = (*env)->GetIntField(env, a, fid) * (*env)->GetIntField(env, b, fid); 21 | 22 | // invoke constructor 23 | cls = (*env)->FindClass(env, "Vec3"); 24 | 25 | jmethodID cid = (*env)->GetMethodID(env, cls, "", "(III)V"); 26 | jobject result = (*env)->NewObject(env, cls, cid, x,y,z); 27 | return result; 28 | } 29 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/toy/Vec3.h: -------------------------------------------------------------------------------- 1 | /* DO NOT EDIT THIS FILE - it is machine generated */ 2 | #include 3 | /* Header for class Vec3 */ 4 | 5 | #ifndef _Included_Vec3 6 | #define _Included_Vec3 7 | #ifdef __cplusplus 8 | extern "C" { 9 | #endif 10 | /* 11 | * Class: Vec3 12 | * Method: dot 13 | * Signature: (LVec3;)LVec3; 14 | */ 15 | JNIEXPORT jobject JNICALL Java_Vec3_dot 16 | (JNIEnv *, jobject, jobject); 17 | 18 | #ifdef __cplusplus 19 | } 20 | #endif 21 | #endif 22 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/toy/Vec3.scala: -------------------------------------------------------------------------------- 1 | object Loader { 2 | val libs = new scala.collection.mutable.HashSet[String]() 3 | val paths = (this.getClass.getClassLoader match { 4 | case ctx: java.net.URLClassLoader => ctx.getURLs.map(_.getPath) 5 | case _ => System.getProperty("java.class.path").split(":") 6 | }).reverse.toList 7 | 8 | def load(lib:String) { 9 | def ld(pl:List[String]):Unit = pl match { 10 | case p::ps => val f = new java.io.File(p+"/lib"+lib+".jnilib") 11 | if (f.exists) System.load(f.getCanonicalPath()) else ld(ps); 12 | case Nil => throw new Exception("JNI Library "+lib+" not found"); 13 | } 14 | if (! libs.contains(lib)) { ld(paths); libs+=lib; } 15 | } 16 | } 17 | 18 | class Vec3(val x:Int, val y:Int, val z:Int) { 19 | Loader.load("Vec3") 20 | 21 | def +:(v: Vec3) = new Vec3(x+v.x,y+v.y,z+v.z) 22 | def -:(v: Vec3) = new Vec3(x-v.x,y-v.y,z-v.z) 23 | override def toString() = "("+x+","+y+","+z+")" 24 | @native def dot(v: Vec3):Vec3; //= new Vec3(x*v.x,y*v.y,z*v.z) 25 | } 26 | -------------------------------------------------------------------------------- /ddbtoaster/docs/tck/test_serial/toy/make: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | SCALA_HOME=/Developer/Scala 4 | JAVA_INCLUDE="$JAVA_HOME/include" 5 | 6 | # get base dir 7 | if [ "`echo $0 | grep '^/'`" != "" ]; then BASE="$0"; else BASE="$PWD/$0"; fi 8 | BASE="`echo $BASE | sed 's/\/[^/]*$/\//g' | sed 's/\/.\//\//g' | sed 's/\/$//g'`"; 9 | 10 | # cleanup 11 | rm -r "$BASE/bin" 2>/dev/null 12 | mkdir -p "$BASE/bin" 13 | 14 | # compile scala 15 | scalac -d "$BASE/bin" "$BASE/src/"*.scala 16 | 17 | # get the class signature 18 | javah -classpath "$SCALA_HOME/lib/scala-library.jar:$BASE/bin" -d "$BASE/src" Vec3 19 | 20 | # compile C library 21 | if [ "$OSTYPE" = "darwin" ]; then 22 | JAVA_INCLUDE="/System/Library/Frameworks/JavaVM.framework/Headers" 23 | fi 24 | 25 | gcc -I"$JAVA_INCLUDE" -shared "$BASE/src/Vec3.c" -o "$BASE/bin/libVec3.jnilib" 26 | 27 | # and execute 28 | 29 | scala -cp "$BASE/bin" Toy 30 | # java -Djava.library.path=bin -cp /Developer/Scala/lib/scala-library.jar:bin Toy 31 | 32 | # javap on the .class file -> get text 33 | # javah on the .class file -> get header file 34 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/datasets/README: -------------------------------------------------------------------------------- 1 | Put your datasets here (e.g., 1GB/lineitem.csv) and update tpch_template.hpp. 2 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/run_tpcds.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for i in 3 7 19 27 34 42 43 46 52 55 68 73 79; 4 | do 5 | echo "" 6 | echo "#############################" 7 | 8 | echo "Compiling TPC-DS query${i}..." 9 | g++ -Wall -Wno-unused-variable -std=c++11 -pedantic -O3 src/main.cpp -I src/lib -I src/tpcds -include src/tpcds/query${i}.hpp -o bin/tpcds_query${i} -DNUMBER_OF_RUNS=3 -include src/tpch/tpch.hpp -include src/tpch/tpch_template.hpp 10 | 11 | echo "Running TPC-DS query${i}..." 12 | bin/tpcds_query${i} 13 | 14 | echo "#############################" 15 | echo "" 16 | done 17 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/run_tpcds_batch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for i in 3 7 19 27 34 42 43 46 52 55 68 73 79; 4 | do 5 | for bs in 1 10 100 1000 10000 100000 6 | do 7 | echo "" 8 | echo "#############################" 9 | 10 | echo "Compiling TPC-DS query${i}..." 11 | g++ -Wall -Wno-unused-variable -std=c++11 -pedantic -O3 src/main.cpp -I src/lib -I src/tpcds -include src/tpcds/query${i}.hpp -o bin/tpcds_query${i} -DBATCH_MODE -DBATCH_SIZE=$bs -DNUMBER_OF_RUNS=3 -include src/tpch/tpch.hpp -include src/tpch/tpch_template.hpp 12 | 13 | echo "Running TPC-DS query${i} with batch size ${bs}..." 14 | bin/tpcds_query${i} 15 | 16 | echo "#############################" 17 | echo "" 18 | done 19 | done 20 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/run_tpch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for i in `seq 1 22`; 4 | do 5 | echo "" 6 | echo "#############################" 7 | 8 | echo "Compiling TPC-H query${i}..." 9 | g++ -Wall -Wno-unused-variable -std=c++11 -pedantic -O3 src/main.cpp -I src/lib -I src/tpch -include src/tpch/query${i}.hpp -o bin/tpch_query${i} -DNUMBER_OF_RUNS=3 -include src/tpch/tpch.hpp -include src/tpch/tpch_template.hpp 10 | 11 | echo "Running TPC-H query${i}..." 12 | bin/tpch_query${i} 13 | 14 | echo "#############################" 15 | echo "" 16 | done 17 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/run_tpch_batch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for i in `seq 1 22`; 4 | do 5 | for bs in 1 10 100 1000 10000 100000 6 | do 7 | echo "" 8 | echo "#############################" 9 | 10 | echo "Compiling TPC-H query${i}..." 11 | g++ -Wall -Wno-unused-variable -std=c++11 -pedantic -O3 src/main.cpp -I src/lib -I src/tpch -include src/tpch/query${i}.hpp -o bin/tpch_query${i} -DBATCH_MODE -DBATCH_SIZE=${bs} -DNUMBER_OF_RUNS=3 -include src/tpch/tpch.hpp -include src/tpch/tpch_template.hpp 12 | 13 | echo "Running TPC-H query${i} with batch size ${bs}..." 14 | bin/tpch_query${i} 15 | 16 | echo "#############################" 17 | echo "" 18 | done 19 | done 20 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/run_tpch_memory.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for i in `seq 1 22`; 4 | do 5 | echo "" 6 | echo "#############################" 7 | 8 | echo "Compiling TPC-H query${i}..." 9 | g++ -Wall -Wno-unused-variable -std=c++11 -pedantic -O3 src/main.cpp -I src/lib -I src/tpch -include src/tpch/query${i}.hpp -DNUMBER_OF_RUNS=1 -o bin/tpch_query${i} -include src/tpch/tpch_template_memory.hpp 10 | 11 | echo "Running TPC-H query${i}..." 12 | bin/tpch_query${i} 13 | 14 | echo "#############################" 15 | echo "" 16 | done 17 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/lib/functions.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_FUNCTIONS_HPP 2 | #define DBTOASTER_FUNCTIONS_HPP 3 | 4 | #include "macro.hpp" 5 | #include 6 | #include 7 | #include 8 | 9 | using namespace std; 10 | 11 | namespace dbtoaster 12 | { 13 | // Conversion helpers 14 | DATE_TYPE str2date(const char* c) 15 | { 16 | unsigned int y, m, d; 17 | if (sscanf(c, "%u-%u-%u", &y, &m, &d) < 3 || m > 12 || d > 31) 18 | { 19 | throw std::invalid_argument(std::string("invalid date string ") + c); 20 | } 21 | return (y % 10000) * 10000 + (m % 100) * 100 + (d % 100); 22 | } 23 | 24 | DATE_TYPE str2date(const STRING_TYPE& s) 25 | { 26 | return str2date(s.c_str()); 27 | } 28 | 29 | DATE_TYPE Udate(const char* c) 30 | { 31 | return str2date(c); 32 | } 33 | 34 | DATE_TYPE Udate(const STRING_TYPE& s) 35 | { 36 | return str2date(s.c_str()); 37 | } 38 | 39 | FORCE_INLINE long Ulistmax(long v1, long v2) { return ((v1 > v2) ? v1 : v2 ); } 40 | 41 | FORCE_INLINE DOUBLE_TYPE Ulistmax(DOUBLE_TYPE v1, long v2) { return ((v1 > v2) ? v1 : v2); } 42 | 43 | FORCE_INLINE DOUBLE_TYPE Udiv(DOUBLE_TYPE x) { return (x != 0.0 ? 1.0 / x : 0.0); } 44 | 45 | FORCE_INLINE long Uyear_part(date d) 46 | { 47 | return (d / 10000) % 10000; 48 | } 49 | 50 | FORCE_INLINE long Umonth_part(date d) 51 | { 52 | return (d / 100) % 100; 53 | } 54 | 55 | FORCE_INLINE long Uday_part(date d) 56 | { 57 | return d % 100; 58 | } 59 | 60 | FORCE_INLINE int Upreg_match(const regex_t& preg, const STRING_TYPE& s) 61 | { 62 | int ret = regexec(&preg, s.c_str(), 0, NULL, 0); 63 | if (ret == 0) return 1; 64 | else if (ret == REG_NOMATCH) return 0; 65 | 66 | std::cerr << "Error evaluating regular expression." << std::endl; 67 | exit(-1); 68 | } 69 | 70 | FORCE_INLINE STRING_TYPE Usubstring(const STRING_TYPE &s, uint32_t start, uint32_t len) 71 | { 72 | return s.substr(start, len); 73 | } 74 | } 75 | 76 | #endif /* DBTOASTER_FUNCTIONS_HPP */ 77 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/lib/macro.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_MACRO_HPP 2 | #define DBTOASTER_MACRO_HPP 3 | 4 | #define STRING(s) #s 5 | 6 | //----------------------------------------------------------------------------- 7 | // Microsoft Visual Studio 8 | 9 | #if defined(_MSC_VER) 10 | 11 | typedef unsigned int uint32_t; 12 | 13 | #define FORCE_INLINE __forceinline 14 | #define NEVER_INLINE __declspec(noinline) 15 | 16 | //----------------------------------------------------------------------------- 17 | // Other compilers 18 | 19 | #else // defined(_MSC_VER) 20 | 21 | #include 22 | 23 | #define FORCE_INLINE inline __attribute__((always_inline)) 24 | #define NEVER_INLINE __attribute__((noinline)) 25 | 26 | #endif // !defined(_MSC_VER) 27 | 28 | #endif /* DBTOASTER_MACRO_HPP */ 29 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/lib/stopwatch.hpp: -------------------------------------------------------------------------------- 1 | #ifndef STOPWATCH_HPP 2 | #define STOPWATCH_HPP 3 | 4 | #include 5 | 6 | class Stopwatch { 7 | private: 8 | long startTime; 9 | long endTime; 10 | 11 | public: 12 | Stopwatch() : startTime(0), endTime(0) { } 13 | 14 | void restart() 15 | { 16 | timeval start_time; 17 | gettimeofday(&start_time, NULL); 18 | startTime = start_time.tv_sec * 1000 + start_time.tv_usec / 1000; 19 | } 20 | 21 | void stop() 22 | { 23 | timeval end_time; 24 | gettimeofday(&end_time, NULL); 25 | endTime = end_time.tv_sec * 1000 + end_time.tv_usec / 1000; 26 | } 27 | 28 | long elapsedTimeInMilliSeconds() 29 | { 30 | return endTime - startTime; 31 | } 32 | }; 33 | 34 | #endif /* STOPWATCH_HPP */ 35 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/lib/types.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_TYPES_HPP 2 | #define DBTOASTER_TYPES_HPP 3 | 4 | // #include 5 | #include "string.hpp" 6 | 7 | // #define DOUBLE_ZERO_APPROXIMATED 8 | // #define DOUBLE_ZERO_THRESHOLD 1e-8 9 | 10 | namespace dbtoaster 11 | { 12 | typedef long date; 13 | 14 | typedef long DATE_TYPE; 15 | 16 | typedef double DOUBLE_TYPE; 17 | 18 | typedef size_t HASH_RES_t; 19 | 20 | //typedef RefCountedString STRING_TYPE; 21 | 22 | typedef PooledRefCountedString STRING_TYPE; 23 | 24 | //typedef VariableLengthString STRING_TYPE; 25 | 26 | //typedef FixedLengthString<10> STRING_TYPE; 27 | 28 | //typedef std::string STRING_TYPE; 29 | 30 | template 31 | struct ZeroValue 32 | { 33 | V get() { return V(); } 34 | FORCE_INLINE bool isZero(V a) { return (a == V()); } 35 | }; 36 | 37 | template<> 38 | struct ZeroValue 39 | { 40 | int get() { return 0; } 41 | FORCE_INLINE bool isZero(int a) { return (a == 0); } 42 | }; 43 | 44 | template<> 45 | struct ZeroValue 46 | { 47 | long get() { return 0L; } 48 | FORCE_INLINE bool isZero(long a) { return (a == 0L); } 49 | }; 50 | 51 | template<> 52 | struct ZeroValue 53 | { 54 | float get() { return 0.0f; } 55 | FORCE_INLINE bool isZero(float a) { return (a == 0.0f); } 56 | }; 57 | 58 | template<> 59 | struct ZeroValue 60 | { 61 | double get() { return 0.0; } 62 | #ifdef DOUBLE_ZERO_APPROXIMATED 63 | FORCE_INLINE bool isZero(double a) { return (a >= -DOUBLE_ZERO_THRESHOLD && a <= DOUBLE_ZERO_THRESHOLD); } 64 | #else 65 | FORCE_INLINE bool isZero(double a) { return (a == 0.0); } 66 | #endif 67 | }; 68 | } 69 | 70 | #endif /* DBTOASTER_TYPES_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/codegen: -------------------------------------------------------------------------------- 1 | codegen_revision_r3408 -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/codegen_batch: -------------------------------------------------------------------------------- 1 | codegen_revision_r3408_batch -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query19.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_STORESALES 3 | #define USE_TPCDS_ITEM 4 | #define USE_TPCDS_CUSTOMER 5 | #define USE_TPCDS_CUSTOMERADDRESS 6 | #define USE_TPCDS_STORE 7 | 8 | #ifdef BATCH_MODE 9 | #include "codegen_batch/Tpcds19VCpp.hpp" 10 | #else 11 | #include "codegen/Tpcds19VCpp.hpp" 12 | #endif 13 | 14 | namespace dbtoaster 15 | { 16 | class data_t; 17 | 18 | void print_result(data_t& data) 19 | { 20 | std::cout << "TPC-DS QUERY19: " << data.get_EXT_PRICE().count() << std::endl; 21 | } 22 | } 23 | 24 | 25 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query27.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_CUSTOMERDEMOGRAPHICS 3 | #define USE_TPCDS_STORESALES 4 | #define USE_TPCDS_ITEM 5 | #define USE_TPCDS_STORE 6 | 7 | #ifdef BATCH_MODE 8 | #include "codegen_batch/Tpcds27VCpp.hpp" 9 | #else 10 | #include "codegen/Tpcds27VCpp.hpp" 11 | #endif 12 | 13 | namespace dbtoaster 14 | { 15 | class data_t; 16 | 17 | void print_result(data_t& data) 18 | { 19 | std::cout << "TPC-DS QUERY27: " << data.get_AGG1().count() 20 | << " " << data.get_AGG2().count() 21 | << " " << data.get_AGG3().count() 22 | << " " << data.get_AGG4().count() 23 | << std::endl; 24 | } 25 | } 26 | 27 | 28 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query3.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_STORESALES 3 | #define USE_TPCDS_ITEM 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpcds3VCpp.hpp" 7 | #else 8 | #include "codegen/Tpcds3VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "TPC-DS QUERY3: " << data.get_SUM_AGG().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query34.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_HOUSEHOLDDEMOGRAPHICS 3 | #define USE_TPCDS_STORESALES 4 | #define USE_TPCDS_CUSTOMER 5 | #define USE_TPCDS_STORE 6 | 7 | #ifdef BATCH_MODE 8 | #include "codegen_batch/Tpcds34VCpp.hpp" 9 | #else 10 | #include "codegen/Tpcds34VCpp.hpp" 11 | #endif 12 | 13 | namespace dbtoaster 14 | { 15 | class data_t; 16 | 17 | void print_result(data_t& data) 18 | { 19 | std::cout << "TPC-DS QUERY34: " << data.get_COUNT().count() << std::endl; 20 | } 21 | } 22 | 23 | 24 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query42.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_STORESALES 3 | #define USE_TPCDS_ITEM 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpcds42VCpp.hpp" 7 | #else 8 | #include "codegen/Tpcds42VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "TPC-DS QUERY42: " << data.get___SQL_SUM_AGGREGATE_1().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query43.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_STORESALES 3 | #define USE_TPCDS_STORE 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpcds43VCpp.hpp" 7 | #else 8 | #include "codegen/Tpcds43VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "TPC-DS QUERY43: " 18 | << " " << data.get_SUN_SALES().count() 19 | << " " << data.get_MON_SALES().count() 20 | << " " << data.get_TUE_SALES().count() 21 | << " " << data.get_WED_SALES().count() 22 | << " " << data.get_THU_SALES().count() 23 | << " " << data.get_FRI_SALES().count() 24 | << " " << data.get_SAT_SALES().count() 25 | << std::endl; 26 | } 27 | } 28 | 29 | 30 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query46.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_HOUSEHOLDDEMOGRAPHICS 3 | #define USE_TPCDS_STORESALES 4 | #define USE_TPCDS_CUSTOMER 5 | #define USE_TPCDS_CUSTOMERADDRESS 6 | #define USE_TPCDS_STORE 7 | 8 | #ifdef BATCH_MODE 9 | #include "codegen_batch/Tpcds46VCpp.hpp" 10 | #else 11 | #include "codegen/Tpcds46VCpp.hpp" 12 | #endif 13 | 14 | namespace dbtoaster 15 | { 16 | class data_t; 17 | 18 | void print_result(data_t& data) 19 | { 20 | std::cout << "TPC-DS QUERY46: " << data.get_COUNT().count() << std::endl; 21 | } 22 | } 23 | 24 | 25 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query52.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_STORESALES 3 | #define USE_TPCDS_ITEM 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpcds52VCpp.hpp" 7 | #else 8 | #include "codegen/Tpcds52VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "TPC-DS QUERY52: " << data.get_EXT_PRICE().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query55.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_STORESALES 3 | #define USE_TPCDS_ITEM 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpcds55VCpp.hpp" 7 | #else 8 | #include "codegen/Tpcds55VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "TPC-DS QUERY55: " << data.get_EXT_PRICE().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query68.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_HOUSEHOLDDEMOGRAPHICS 3 | #define USE_TPCDS_STORESALES 4 | #define USE_TPCDS_CUSTOMER 5 | #define USE_TPCDS_CUSTOMERADDRESS 6 | #define USE_TPCDS_STORE 7 | 8 | #ifdef BATCH_MODE 9 | #include "codegen_batch/Tpcds68VCpp.hpp" 10 | #else 11 | #include "codegen/Tpcds68VCpp.hpp" 12 | #endif 13 | 14 | namespace dbtoaster 15 | { 16 | class data_t; 17 | 18 | void print_result(data_t& data) 19 | { 20 | std::cout << "TPC-DS QUERY68: " << data.get_COUNT().count() << std::endl; 21 | } 22 | } 23 | 24 | 25 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query7.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_CUSTOMERDEMOGRAPHICS 3 | #define USE_TPCDS_PROMOTION 4 | #define USE_TPCDS_STORESALES 5 | #define USE_TPCDS_ITEM 6 | 7 | 8 | #ifdef BATCH_MODE 9 | #include "codegen_batch/Tpcds7VCpp.hpp" 10 | #else 11 | #include "codegen/Tpcds7VCpp.hpp" 12 | #endif 13 | 14 | namespace dbtoaster 15 | { 16 | class data_t; 17 | 18 | void print_result(data_t& data) 19 | { 20 | std::cout << "TPC-DS QUERY7: " << data.get_AGG1().count() 21 | << " " << data.get_AGG1().count() 22 | << " " << data.get_AGG2().count() 23 | << " " << data.get_AGG3().count() 24 | << " " << data.get_AGG4().count() 25 | << std::endl; 26 | } 27 | } 28 | 29 | 30 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query73.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_HOUSEHOLDDEMOGRAPHICS 3 | #define USE_TPCDS_STORESALES 4 | #define USE_TPCDS_CUSTOMER 5 | #define USE_TPCDS_STORE 6 | 7 | #ifdef BATCH_MODE 8 | #include "codegen_batch/Tpcds73VCpp.hpp" 9 | #else 10 | #include "codegen/Tpcds73VCpp.hpp" 11 | #endif 12 | 13 | namespace dbtoaster 14 | { 15 | class data_t; 16 | 17 | void print_result(data_t& data) 18 | { 19 | std::cout << "TPC-DS QUERY73: " << data.get_COUNT().count() << std::endl; 20 | } 21 | } 22 | 23 | 24 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpcds/query79.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCDS_DATEDIM 2 | #define USE_TPCDS_HOUSEHOLDDEMOGRAPHICS 3 | #define USE_TPCDS_STORESALES 4 | #define USE_TPCDS_CUSTOMER 5 | #define USE_TPCDS_STORE 6 | 7 | #ifdef BATCH_MODE 8 | #include "codegen_batch/Tpcds79VCpp.hpp" 9 | #else 10 | #include "codegen/Tpcds79VCpp.hpp" 11 | #endif 12 | 13 | namespace dbtoaster 14 | { 15 | class data_t; 16 | 17 | void print_result(data_t& data) 18 | { 19 | std::cout << "TPC-DS QUERY79: " << data.get_COUNT().count() << std::endl; 20 | } 21 | } 22 | 23 | 24 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/codegen: -------------------------------------------------------------------------------- 1 | codegen_revision_r3408 -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/codegen_batch: -------------------------------------------------------------------------------- 1 | codegen_revision_r3408_batch -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query1.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_LINEITEM 2 | 3 | #ifdef BATCH_MODE 4 | #include "codegen_batch/Tpch1VCpp.hpp" 5 | #else 6 | #include "codegen/Tpch1VCpp.hpp" 7 | #endif 8 | 9 | 10 | namespace dbtoaster 11 | { 12 | class data_t; 13 | 14 | void print_result(data_t& data) 15 | { 16 | std::cout << "AVG_DISC: " << data.get_AVG_DISC().count() << std::endl; 17 | } 18 | } 19 | 20 | 21 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query10.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_CUSTOMER 2 | #define USE_TPCH_LINEITEM 3 | #define USE_TPCH_ORDERS 4 | #define USE_TPCH_NATION 5 | 6 | #ifdef BATCH_MODE 7 | #include "codegen_batch/Tpch10VCpp.hpp" 8 | #else 9 | #include "codegen/Tpch10VCpp.hpp" 10 | #endif 11 | 12 | namespace dbtoaster 13 | { 14 | class data_t; 15 | 16 | void print_result(data_t& data) 17 | { 18 | std::cout << "REVENUE: " << data.get_REVENUE().count() << std::endl; 19 | } 20 | } 21 | 22 | 23 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query11.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_PARTSUPP 2 | #define USE_TPCH_SUPPLIER 3 | #define USE_TPCH_NATION 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpch11VCpp.hpp" 7 | #else 8 | #include "codegen/Tpch11VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "QUERY11: " << data.get_QUERY11().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query12.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_ORDERS 2 | #define USE_TPCH_LINEITEM 3 | 4 | #ifdef BATCH_MODE 5 | #include "codegen_batch/Tpch12VCpp.hpp" 6 | #else 7 | #include "codegen/Tpch12VCpp.hpp" 8 | #endif 9 | 10 | namespace dbtoaster 11 | { 12 | class data_t; 13 | 14 | void print_result(data_t& data) 15 | { 16 | std::cout << "HIGH_LINE_COUNT: " << data.get_HIGH_LINE_COUNT().count() << std::endl; 17 | } 18 | } 19 | 20 | 21 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query13.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_ORDERS 2 | #define USE_TPCH_CUSTOMER 3 | 4 | #ifdef BATCH_MODE 5 | #include "codegen_batch/Tpch13VCpp.hpp" 6 | #else 7 | #include "codegen/Tpch13VCpp.hpp" 8 | #endif 9 | 10 | namespace dbtoaster 11 | { 12 | class data_t; 13 | 14 | void print_result(data_t& data) 15 | { 16 | std::cout << "CUSTDIST: " << data.get_CUSTDIST().count() << std::endl; 17 | } 18 | } 19 | 20 | 21 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query14.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_LINEITEM 2 | #define USE_TPCH_PART 3 | 4 | #ifdef BATCH_MODE 5 | #include "codegen_batch/Tpch14VCpp.hpp" 6 | #else 7 | #include "codegen/Tpch14VCpp.hpp" 8 | #endif 9 | 10 | namespace dbtoaster 11 | { 12 | class data_t; 13 | 14 | void print_result(data_t& data) 15 | { 16 | std::cout << "PROMO_REVENUE: " << data.get_PROMO_REVENUE() << std::endl; 17 | } 18 | } 19 | 20 | 21 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query15.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_LINEITEM 2 | #define USE_TPCH_SUPPLIER 3 | 4 | #ifdef BATCH_MODE 5 | #include "codegen_batch/Tpch15VCpp.hpp" 6 | #else 7 | #include "codegen/Tpch15VCpp.hpp" 8 | #endif 9 | 10 | namespace dbtoaster 11 | { 12 | class data_t; 13 | 14 | void print_result(data_t& data) 15 | { 16 | std::cout << "COUNT: " << data.get_COUNT().count() << std::endl; 17 | } 18 | } 19 | 20 | 21 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query16.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_PARTSUPP 2 | #define USE_TPCH_PART 3 | #define USE_TPCH_SUPPLIER 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpch16VCpp.hpp" 7 | #else 8 | #include "codegen/Tpch16VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "SUPPLIER_CNT: " << data.get_SUPPLIER_CNT().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query17.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_PART 2 | #define USE_TPCH_LINEITEM 3 | 4 | #ifdef BATCH_MODE 5 | #include "codegen_batch/Tpch17VCpp.hpp" 6 | #else 7 | #include "codegen/Tpch17VCpp.hpp" 8 | #endif 9 | 10 | namespace dbtoaster 11 | { 12 | class data_t; 13 | 14 | void print_result(data_t& data) 15 | { 16 | std::cout << "AVG_YEARLY: " << data.get_AVG_YEARLY() << std::endl; 17 | } 18 | } 19 | 20 | 21 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query18.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_CUSTOMER 2 | #define USE_TPCH_ORDERS 3 | #define USE_TPCH_LINEITEM 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpch18VCpp.hpp" 7 | #else 8 | #include "codegen/Tpch18VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "QUERY18: " << data.get_QUERY18().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query19.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_PART 2 | #define USE_TPCH_LINEITEM 3 | 4 | #ifdef BATCH_MODE 5 | #include "codegen_batch/Tpch19VCpp.hpp" 6 | #else 7 | #include "codegen/Tpch19VCpp.hpp" 8 | #endif 9 | 10 | namespace dbtoaster 11 | { 12 | class data_t; 13 | 14 | void print_result(data_t& data) 15 | { 16 | std::cout << "REVENUE: " << data.get_REVENUE() << std::endl; 17 | } 18 | } 19 | 20 | 21 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query2.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_PART 2 | #define USE_TPCH_PARTSUPP 3 | #define USE_TPCH_SUPPLIER 4 | #define USE_TPCH_NATION 5 | #define USE_TPCH_REGION 6 | 7 | #ifdef BATCH_MODE 8 | #include "codegen_batch/Tpch2VCpp.hpp" 9 | #else 10 | #include "codegen/Tpch2VCpp.hpp" 11 | #endif 12 | 13 | namespace dbtoaster 14 | { 15 | class data_t; 16 | 17 | void print_result(data_t& data) 18 | { 19 | std::cout << "COUNT: " << data.get_COUNT().count() << std::endl; 20 | } 21 | } 22 | 23 | 24 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query20.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_SUPPLIER 2 | #define USE_TPCH_NATION 3 | #define USE_TPCH_PARTSUPP 4 | #define USE_TPCH_PART 5 | #define USE_TPCH_LINEITEM 6 | 7 | #ifdef BATCH_MODE 8 | #include "codegen_batch/Tpch20VCpp.hpp" 9 | #else 10 | #include "codegen/Tpch20VCpp.hpp" 11 | #endif 12 | 13 | namespace dbtoaster 14 | { 15 | class data_t; 16 | 17 | void print_result(data_t& data) 18 | { 19 | std::cout << "COUNT: " << data.get_COUNT().count() << std::endl; 20 | } 21 | } 22 | 23 | 24 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query21.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_SUPPLIER 2 | #define USE_TPCH_NATION 3 | #define USE_TPCH_ORDERS 4 | #define USE_TPCH_LINEITEM 5 | 6 | #ifdef BATCH_MODE 7 | #include "codegen_batch/Tpch21VCpp.hpp" 8 | #else 9 | #include "codegen/Tpch21VCpp.hpp" 10 | #endif 11 | 12 | namespace dbtoaster 13 | { 14 | class data_t; 15 | 16 | void print_result(data_t& data) 17 | { 18 | std::cout << "NUMWAIT: " << data.get_NUMWAIT().count() << std::endl; 19 | } 20 | } 21 | 22 | 23 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query22.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_CUSTOMER 2 | #define USE_TPCH_ORDERS 3 | 4 | #ifdef BATCH_MODE 5 | #include "codegen_batch/Tpch22VCpp.hpp" 6 | #else 7 | #include "codegen/Tpch22VCpp.hpp" 8 | #endif 9 | 10 | namespace dbtoaster 11 | { 12 | class data_t; 13 | 14 | void print_result(data_t& data) 15 | { 16 | std::cout << "NUMCUST: " << data.get_NUMCUST().count() << std::endl; 17 | } 18 | } 19 | 20 | 21 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query3.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_LINEITEM 2 | #define USE_TPCH_ORDERS 3 | #define USE_TPCH_CUSTOMER 4 | 5 | #ifdef BATCH_MODE 6 | #include "codegen_batch/Tpch3VCpp.hpp" 7 | #else 8 | #include "codegen/Tpch3VCpp.hpp" 9 | #endif 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "QUERY3: " << data.get_QUERY3().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query4.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_LINEITEM 2 | #define USE_TPCH_ORDERS 3 | 4 | #ifdef BATCH_MODE 5 | #include "codegen_batch/Tpch4VCpp.hpp" 6 | #else 7 | #include "codegen/Tpch4VCpp.hpp" 8 | #endif 9 | 10 | 11 | namespace dbtoaster 12 | { 13 | class data_t; 14 | 15 | void print_result(data_t& data) 16 | { 17 | std::cout << "ORDER_COUNT: " << data.get_ORDER_COUNT().count() << std::endl; 18 | } 19 | } 20 | 21 | 22 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query5.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_LINEITEM 2 | #define USE_TPCH_ORDERS 3 | #define USE_TPCH_CUSTOMER 4 | #define USE_TPCH_SUPPLIER 5 | #define USE_TPCH_NATION 6 | #define USE_TPCH_REGION 7 | 8 | #ifdef BATCH_MODE 9 | #include "codegen_batch/Tpch5VCpp.hpp" 10 | #else 11 | #include "codegen/Tpch5VCpp.hpp" 12 | #endif 13 | 14 | namespace dbtoaster 15 | { 16 | class data_t; 17 | 18 | void print_result(data_t& data) 19 | { 20 | std::cout << "REVENUE: " << data.get_REVENUE().count() << std::endl; 21 | } 22 | } 23 | 24 | 25 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query6.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_LINEITEM 2 | 3 | #ifdef BATCH_MODE 4 | #include "codegen_batch/Tpch6VCpp.hpp" 5 | #else 6 | #include "codegen/Tpch6VCpp.hpp" 7 | #endif 8 | 9 | namespace dbtoaster 10 | { 11 | class data_t; 12 | 13 | void print_result(data_t& data) 14 | { 15 | std::cout << "REVENUE: " << data.get_REVENUE() << std::endl; 16 | } 17 | } 18 | 19 | 20 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query7.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_SUPPLIER 2 | #define USE_TPCH_LINEITEM 3 | #define USE_TPCH_ORDERS 4 | #define USE_TPCH_CUSTOMER 5 | #define USE_TPCH_NATION 6 | 7 | #ifdef BATCH_MODE 8 | #include "codegen_batch/Tpch7VCpp.hpp" 9 | #else 10 | #include "codegen/Tpch7VCpp.hpp" 11 | #endif 12 | 13 | namespace dbtoaster 14 | { 15 | class data_t; 16 | 17 | void print_result(data_t& data) 18 | { 19 | std::cout << "REVENUE: " << data.get_REVENUE().count() << std::endl; 20 | } 21 | } 22 | 23 | 24 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query8.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_PART 2 | #define USE_TPCH_SUPPLIER 3 | #define USE_TPCH_LINEITEM 4 | #define USE_TPCH_ORDERS 5 | #define USE_TPCH_CUSTOMER 6 | #define USE_TPCH_NATION 7 | #define USE_TPCH_REGION 8 | 9 | #ifdef BATCH_MODE 10 | #include "codegen_batch/Tpch8VCpp.hpp" 11 | #else 12 | #include "codegen/Tpch8VCpp.hpp" 13 | #endif 14 | 15 | 16 | namespace dbtoaster 17 | { 18 | class data_t; 19 | 20 | void print_result(data_t& data) 21 | { 22 | std::cout << "MKT_SHARE: " << data.get_MKT_SHARE().count() << std::endl; 23 | } 24 | } 25 | 26 | 27 | -------------------------------------------------------------------------------- /ddbtoaster/experiments/src/tpch/query9.hpp: -------------------------------------------------------------------------------- 1 | #define USE_TPCH_PART 2 | #define USE_TPCH_SUPPLIER 3 | #define USE_TPCH_PARTSUPP 4 | #define USE_TPCH_LINEITEM 5 | #define USE_TPCH_ORDERS 6 | #define USE_TPCH_NATION 7 | 8 | #ifdef BATCH_MODE 9 | #include "codegen_batch/Tpch9VCpp.hpp" 10 | #else 11 | #include "codegen/Tpch9VCpp.hpp" 12 | #endif 13 | 14 | namespace dbtoaster 15 | { 16 | class data_t; 17 | 18 | void print_result(data_t& data) 19 | { 20 | std::cout << "SUM_PROFIT: " << data.get_SUM_PROFIT().count() << std::endl; 21 | } 22 | } 23 | 24 | 25 | -------------------------------------------------------------------------------- /ddbtoaster/lms/DefaultLMSGen.scala: -------------------------------------------------------------------------------- 1 | package ddbt.codegen 2 | 3 | import ddbt.ast.M3 4 | 5 | /** Stub for conditional compilation, actual implementation in lms/ folder */ 6 | abstract class LMSGen(override val cgOpts: CodeGenOptions) extends CodeGen { 7 | override def apply(s0: M3.System): String = sys.error( 8 | "Set 'ddbt.lms=1' in conf/ddbt.properties to enable LMS code generation" 9 | ) 10 | } 11 | 12 | class LMSScalaGen(cgOpts: CodeGenOptions, override val watch: Boolean = false) extends LMSGen(cgOpts) with IScalaGen { 13 | override def apply(s0: M3.System): String = sys.error( 14 | "Set 'ddbt.lms=1' in conf/ddbt.properties to enable LMS code generation" 15 | ) 16 | } 17 | 18 | class LMSCppGen(cgOpts: CodeGenOptions) extends LMSGen(cgOpts) with ICppGen { 19 | override def apply(s0: M3.System): String = sys.error( 20 | "Set 'ddbt.lms=1' in conf/ddbt.properties to enable LMS code generation" 21 | ) 22 | } 23 | class LMSSparkGen(cgOpts: CodeGenOptions) extends LMSGen(cgOpts) with IScalaGen { 24 | override def apply(s0: M3.System): String = sys.error( 25 | "Set 'ddbt.lms=1' in conf/ddbt.properties to enable LMS code generation" 26 | ) 27 | } -------------------------------------------------------------------------------- /ddbtoaster/lms/build.sbt: -------------------------------------------------------------------------------- 1 | Seq( 2 | // --------- Project information 3 | name := "dbtoaster-lms", 4 | 5 | // --------- Paths 6 | scalaSource in Compile <<= baseDirectory / "." 7 | ) 8 | 9 | // --------- LMS codegen, enabled with ddbt.lms = 1 in conf/ddbt.properties 10 | { 11 | val prop = new java.util.Properties() 12 | try { prop.load(new java.io.FileInputStream("ddbtoaster/conf/ddbt.properties")) } 13 | catch { case _: Throwable => } 14 | 15 | if (prop.getProperty("ddbt.lms","0") != "1") 16 | Seq( 17 | sources in Compile ~= (_.filter(_.toString.endsWith("DefaultLMSGen.scala"))) 18 | ) 19 | else 20 | Seq( 21 | sources in Compile ~= (_.filter(!_.toString.endsWith("DefaultLMSGen.scala"))), 22 | 23 | // --------- Compilation options 24 | scalaVersion := "2.11.2", 25 | scalaOrganization := "org.scala-lang.virtualized", 26 | scalacOptions ++= Seq("-Yvirtualize"), 27 | 28 | libraryDependencies ++= Seq( 29 | "org.scala-lang.virtualized" % "scala-library" % scalaVersion.value, 30 | "org.scala-lang.virtualized" % "scala-compiler" % scalaVersion.value, 31 | "org.scalariform" %% "scalariform" % "0.2.3", 32 | "EPFL" %% "lms" % "0.3-SNAPSHOT" 33 | ) 34 | ) 35 | } -------------------------------------------------------------------------------- /ddbtoaster/lms/dbtoptimizer/Packages.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lms.dbtoptimizer 2 | 3 | import scala.virtualization.lms.common._ 4 | import scala.virtualization.lms.internal._ 5 | import ddbt.lms.dbtoptimizer._ 6 | import ddbt.lms.dbtoptimizer.lifters._ 7 | import ddbt.lms.oltp.opt.lifters._ 8 | import ddbt.lib.store._ 9 | import scala.reflect.SourceContext 10 | 11 | trait ToasterBoosterOpsPkg extends ScalaOpsPkg with ExtendedExpressions 12 | with SimpleValOps with ImplicitConversionLifters /*with ListContainerOps*/ 13 | /*with K3PersistentCollectionOps*/ with StdFunctionsOps with StoreOps with SEntryOps 14 | 15 | trait ToasterBoosterOpsPkgExp extends ScalaOpsPkgExp with ToasterBoosterOpsPkg 16 | with SimpleValExp /*with ListContainerExp with K3PersistentCollectionExp*/ 17 | with StdFunctionsExp with StoreExp with SEntryExp 18 | 19 | trait ToasterBoosterOpsPkgExpOpt extends ScalaOpsPkgExpOpt with ToasterBoosterOpsPkgExp 20 | /*with K3PersistentCollectionExpOpt*/ with StdFunctionsExpOpt with StoreExpOpt with SEntryExpOpt 21 | 22 | trait ToasterBoosterScalaCodeGenPkg extends ScalaCodeGenPkg with ScalaGenSimpleVal 23 | /*with ScalaGenK3PersistentCollection*/ with ToasterBoosterScalaCodegen with ScalaGenStdFunctions 24 | with ScalaGenSEntry with ScalaGenStore { 25 | val IR: ToasterBoosterOpsPkgExp 26 | import IR._ 27 | } 28 | 29 | trait ToasterBoosterCCodeGenPkg extends ToasterBoosterScalaCodeGenPkg 30 | 31 | trait DSL extends DSLBase with ScalaOpsPkg 32 | with LiftPrimitives with LiftString with LiftVariables 33 | with ToasterBoosterOpsPkg { 34 | var classArgs: List[Sym[_]] = Nil 35 | 36 | def newSStore[E<:Entry:Manifest]():Rep[Store[E]] = { val s = fresh[Store[E]]; collectStore[E](s); classArgs = classArgs :+ s; s } 37 | } 38 | -------------------------------------------------------------------------------- /ddbtoaster/lms/dbtoptimizer/lifters/ImplicitConversionLifters.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lms.dbtoptimizer.lifters 2 | 3 | import scala.virtualization.lms.common._ 4 | import scala.virtualization.lms.internal._ 5 | 6 | trait ImplicitConversionLifters extends CastingOps with IfThenElse with Expressions { 7 | import scala.language.implicitConversions 8 | implicit def boolToLong(b: Rep[Boolean]): Rep[Long] = __ifThenElse[Long](b, unit(1L), unit(0L)) 9 | implicit def boolToDouble(b: Rep[Boolean]): Rep[Double] = __ifThenElse[Double](b, unit(1.0), unit(0.0)) 10 | implicit def longToDouble1(l: Rep[Long]): Rep[Double] = l.asInstanceOf[Rep[Double]] 11 | //implicit def longToDouble2(l: Const[Long]): Rep[Double] = unit(l.x.toDouble) 12 | //implicit def doubleToLong1(l: Rep[Double]): Rep[Long] = l.asInstanceOf[Rep[Long]] 13 | //implicit def doubleToLong2(l: Const[Double]): Rep[Long] = unit(l.x.toLong) 14 | implicit def intToDouble1(l: Rep[Int]): Rep[Double] = l.asInstanceOf[Rep[Double]] 15 | //implicit def intToDouble2(l: Const[Int]): Rep[Double] = unit(l.x.toDouble) 16 | //implicit def doubleToInt1(l: Rep[Double]): Rep[Int] = l.asInstanceOf[Rep[Int]] 17 | //implicit def doubleToInt2(l: Const[Double]): Rep[Int] = unit(l.x.toInt) 18 | implicit def intToLong1(l: Rep[Int]): Rep[Long] = l.asInstanceOf[Rep[Long]] 19 | //implicit def intToLong2(l: Const[Int]): Rep[Long] = unit(l.x.toLong) 20 | } 21 | -------------------------------------------------------------------------------- /ddbtoaster/lms/oltp/opt/lifters/SEntryGen.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lms.oltp.opt.lifters 2 | 3 | object SEntryGen { 4 | def main(args: Array[String]): Unit = { 5 | val out = new StringBuilder 6 | for(i <- 2 to 22) { 7 | out.append(genSEntryClass(i)).append('\n') 8 | } 9 | for(i <- 2 to 22) { 10 | out.append(genSEntry2Tuple(i)).append('\n') 11 | } 12 | println(out.toString) 13 | } 14 | def genTemplate1(begin:Int, end:Int, template:String) = (begin to end).map(template.format(_)).mkString(",") 15 | def genTemplate2(begin:Int, end:Int, template:String) = (begin to end).map{x => template.format(x,x)}.mkString(",") 16 | def genSEntryClass(i:Int) = "abstract class SEntry%d[%s] extends SEntry[(%s)]".format(i, genTemplate1(1,i,"T%d:Manifest"), genTemplate1(1,i,"T%d")) 17 | def genSEntry2Tuple(i:Int) = (" implicit def conv2SEntry%d[%s](x: Rep[SEntry%d[%s]])(implicit pos: SourceContext) =\n" + 18 | " (%s)").format(i, genTemplate1(1,i,"T%d:Manifest"), i, genTemplate1(1,i,"T%d"), genTemplate2(1,i, "x.get(unit(%d)).asInstanceOf[Rep[T%d]]")) 19 | } -------------------------------------------------------------------------------- /ddbtoaster/lms/store/Entry.java: -------------------------------------------------------------------------------- 1 | package ddbt.lib.lms.store; 2 | 3 | /** 4 | * Abstract entry that is specialized for each map. 5 | * There is no key/value distinction as it is encoded by the indices. 6 | * Functions cmp() and hash() operate over the projection #i of the tuple; 7 | * this projection is actually never materialized. 8 | * 9 | * @author TCK 10 | */ 11 | public abstract class Entry { 12 | final Object[] data; 13 | public Entry(int n) { data=new Object[n]; } 14 | abstract public Entry copy(); // returns a copy of the entry, for B-Trees only 15 | abstract public Object[] elements(); 16 | //abstract public boolean zero(); // the tuple can safely be deleted from the map 17 | //abstract public void merge(Entry e); // combine e in this (some kine of aggregation) 18 | 19 | // backward compatibility 20 | //public boolean zero() { return false; } 21 | //public void merge(Entry e) {} // again we create typing issues here 22 | } 23 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/build.sbt: -------------------------------------------------------------------------------- 1 | Seq( 2 | // --------- Project information 3 | name := "dbtoaster-pardis", 4 | 5 | // --------- Compilation options 6 | javacOptions ++= Seq("-P:continuations:enable"), 7 | 8 | // --------- Paths 9 | scalaSource in Compile <<= baseDirectory / "." 10 | ) 11 | 12 | autoCompilerPlugins := true 13 | addCompilerPlugin("org.scala-lang.plugins" % "scala-continuations-plugin_2.11.8" % "1.0.3") 14 | 15 | // --------- Pardis codegen -- START 16 | { 17 | // generatorSettings 18 | import ch.epfl.data.sc.purgatory.plugin.PurgatoryPlugin._ 19 | 20 | val prop = new java.util.Properties() 21 | try { prop.load(new java.io.FileInputStream("ddbtoaster/conf/ddbt.properties")) } 22 | catch { case _: Throwable => } 23 | 24 | Seq( 25 | // scalaSource in Compile <<= baseDirectory / "pardis", // incorrect; copied from lms 26 | // sourceDirectories in Compile ++= Seq(file("/Users/milllic/Documents/repo/work/merge/DDBToaster-MERGE/ddbtoaster/pardis")), 27 | 28 | libraryDependencies ++= Seq( 29 | "org.scala-lang.plugins" %% "scala-continuations-library" % "1.0.2", 30 | "ch.epfl.data" % "squid-sc-backend_2.11" % "0.1-SNAPSHOT", 31 | "ch.epfl.data" % "sc-pardis-compiler_2.11" % "0.1.4-SNAPSHOT", 32 | "ch.epfl.data" % "sc-pardis-quasi_2.11" % "0.1.4-SNAPSHOT" 33 | ), 34 | outputFolder := prop.getProperty("ddbt.pardis.outputFolder", "ddbtoaster/pardis/lifter"), 35 | inputPackage := prop.getProperty("ddbt.pardis.inputPackage", "ddbt.lib.store"), 36 | outputPackage := prop.getProperty("ddbt.pardis.outputPackage", "ddbt.lib.store.deep") 37 | ) ++ generatorSettings 38 | } 39 | // --------- Pardis codegen -- FINISH -------------------------------------------------------------------------------- /ddbtoaster/pardis/lifter/package.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | import ch.epfl.data.sc.pardis.quasi.anf.BaseQuasiExt 4 | import ch.epfl.data.sc.pardis.quasi.engine.QuasiAPI 5 | 6 | package object deep extends QuasiAPI[StoreDSL, BaseQuasiExt] 7 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/microbenchmarks/MB1.scala: -------------------------------------------------------------------------------- 1 | package sc.micro 2 | 3 | import ch.epfl.data.sc.pardis.types.{DoubleType, IntType, StringType} 4 | import ddbt.codegen.TransactionProgram 5 | import ddbt.lib.store.deep.DateIRs.DateType 6 | import ddbt.lib.store.{GenericEntry, Store, StringExtra} 7 | import ddbt.lib.store.deep._ 8 | import ddbt.transformer.{IndexedCols, StoreSchema} 9 | import sc.micro.MicroBenchRunner._ 10 | class MB1 (override val Context : StoreDSL) extends Prog(Context) { 11 | import Sqd.Predef.{anyContextIsEmptyContext => _, _} 12 | import Sqd.Quasicodes._ 13 | import Context.Predef._ 14 | import Context.{__newStoreNamed, typeGenericEntry} 15 | var tbl : Rep[Store[GenericEntry]] = null 16 | val initBlock = reifyBlock({ 17 | tbl = __newStoreNamed[GenericEntry]("customerTbl") 18 | unit(1) 19 | }) 20 | def body = ir { 21 | var customerNames = $(tbl).map(e => GenericEntry("SteNewSEntry", e.get[Double](15)-e.get[Double](17), StringExtra.Substring(e.get[String](4), 0, 5))).filter(e => e.get[Double](1) < 47000.0).map(e => GenericEntry("SteNewSEntry", e.get[String](2))).fold(0)((a, b) => a+1) 22 | // var test = $(tbl).map(e => GenericEntry(10)).map(e => GenericEntry(20)) 23 | // $(tbl).map(e => GenericEntry("SteNewSEntry", e.get[Int](3) * -2, 2, e.get[String](2))).filter(e => e.get[Int](1) > 0).map(e => GenericEntry("SteNewSEntry", e.get[Int](1)*100)).getCopy(0, GenericEntry("SteSampleSEntry", 1)) 24 | // entry += (1, 11) 25 | 1 26 | }.toRep 27 | override def getTransactionProgram: TransactionProgram[Int] = { 28 | val idxCols = new IndexedCols 29 | idxCols.primary = List(1, 2, 3) 30 | // idxCols.fixedrange = List((1, 1, 3001), (2, 1, 11), (3, 1, 100 + 1)) //can't do for each on array index yet 31 | tbl.asInstanceOf[Sym[_]].attributes += idxCols 32 | tbl.asInstanceOf[Sym[_]].attributes += StoreSchema(List(IntType, IntType, IntType, StringType, StringType, StringType, StringType, StringType, StringType, StringType, StringType, StringType, DateType, StringType, DoubleType, DoubleType, DoubleType, DoubleType, IntType, IntType, StringType)) 33 | TransactionProgram(initBlock, List(tbl.asInstanceOf[Sym[_]]), List(("fun1", List(), reifyBlock(body))), Nil, Nil) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/store/MMultiRes.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | import ch.epfl.data.sc.pardis.annotations.{deep, pure, reflect} 4 | 5 | @deep 6 | @reflect[MultiRes] 7 | class MMultiRes { 8 | @pure 9 | def isEmpty: Boolean = ??? 10 | } 11 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/store/MirrorAggregator.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | import ch.epfl.data.sc.pardis.annotations._ 4 | 5 | /** 6 | * Created by sachin on 18.03.16. 7 | */ 8 | @deep 9 | @reflect[Aggregator[_]] 10 | trait MirrorAggregator[E <: Entry] extends (E => Unit) { 11 | def apply(e: E): Unit 12 | @read 13 | def result: E 14 | def resultForUpdate: E 15 | } 16 | object MirrorAggregator { 17 | @pure 18 | def min[E <: Entry, R](f: E => R)(implicit order: Ordering[R]) : Aggregator[E] = ??? 19 | @pure 20 | def max[E <: Entry, R](f: E => R)(implicit order: Ordering[R]) : Aggregator[E] = ??? 21 | @pure 22 | def median[E <: Entry, R](f: E => R)(implicit order: Ordering[R]) : Aggregator[E] = ??? 23 | } -------------------------------------------------------------------------------- /ddbtoaster/pardis/store/MirrorBooleanExtra.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | import ch.epfl.data.sc.pardis.annotations.{pure, deep} 4 | 5 | /** 6 | * Created by sachin on 22.04.16. 7 | */ 8 | @deep 9 | trait BooleanExtra 10 | 11 | object BooleanExtra { 12 | @pure 13 | def conditional[T](cond: Boolean, ift: T, iff: T): T = ??? 14 | } -------------------------------------------------------------------------------- /ddbtoaster/pardis/store/MirrorDate.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | /** 4 | * Created by khayyam on 4/9/15. 5 | */ 6 | import ch.epfl.data.sc.pardis.annotations._ 7 | 8 | @needs[Numeric[_]] 9 | @deep 10 | @reflect[java.util.Date] 11 | class MirrorDate(longValue: Long) { 12 | @pure 13 | def getTime(): Long = ??? 14 | } 15 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/store/MirrorEntryIdx.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | import ch.epfl.data.sc.pardis.annotations._ 4 | 5 | /** 6 | * Created by sachin on 15.04.16. 7 | */ 8 | @deep 9 | @needs[GenericEntry] 10 | @reflect[EntryIdx[_]] 11 | abstract class MirrorEntryIdx[E <: Entry] { 12 | @pure 13 | def cmp(e1: E, e2: E): Int 14 | @pure 15 | def hash(e: E): Int 16 | } 17 | 18 | object MirrorEntryIdx { 19 | @pure 20 | def apply[E<:Entry](h: (E => Int), c: ((E, E) => Int), entryidxname: String): EntryIdx[E] = ??? 21 | @pure 22 | def genericOps(cols: Seq[Int]): EntryIdx[GenericEntry] = ??? 23 | @pure 24 | def genericCmp[R](cols: Seq[Int], f: GenericEntry => R): EntryIdx[GenericEntry] = ??? 25 | @pure 26 | def genericFixedRangeOps(colsRange: Seq[(Int, Int, Int)]) : EntryIdx[GenericEntry] = ??? 27 | } 28 | 29 | 30 | // 31 | //@deep 32 | //case class MirrorGenericCmp[R](val f: MirrorGenericEntry => R)(implicit order: Ordering[R]) extends MirrorEntryIdx[MirrorGenericEntry] { 33 | // def hash(e: MirrorGenericEntry): Int = ??? 34 | // def cmp(e1: MirrorGenericEntry, e2: MirrorGenericEntry): Int = ??? 35 | //} -------------------------------------------------------------------------------- /ddbtoaster/pardis/store/MirrorGenericEntry.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | import ch.epfl.data.sc.pardis.annotations.{read, write, reflect, deep} 4 | 5 | /** 6 | * Created by khayyam on 4/13/15. 7 | */ 8 | 9 | @deep 10 | @reflect[GenericEntry] 11 | class MirrorGenericEntry { 12 | @write 13 | def update(i: Int, v: Any): Unit = ??? 14 | 15 | def increase(i: Int, v: Any): Unit = ??? 16 | 17 | def +=(i: Int, v: Any): Unit = ??? 18 | 19 | def decrease(i: Int, v: Any): Unit = ??? 20 | 21 | def -=(i: Int, v: Any): Unit = ??? 22 | 23 | @read 24 | def get[E](i: Int): E = ??? 25 | 26 | 27 | // def copy:Entry = ??? 28 | } 29 | 30 | object MirrorGenericEntry { 31 | def apply(ignore: Any, elems: Any*): MirrorGenericEntry = ??? 32 | } -------------------------------------------------------------------------------- /ddbtoaster/pardis/store/MirrorIdx.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | import ch.epfl.data.sc.pardis.annotations.{deep, needs, read, reflect} 4 | 5 | /** 6 | * Created by sachin on 02.05.16. 7 | */ 8 | @deep 9 | @reflect[Idx[_]] 10 | @needs[MultiRes] 11 | trait MirrorIdx[E <: Entry] { 12 | def unsafeInsert(e: E): Unit = ??? 13 | def insert(e: E): Unit = ??? 14 | def update(e: E): Unit = ??? 15 | def updateCopyDependent(e: E, ref: E): Unit = ??? 16 | def updateCopy(e: E, primary: Idx[E]): Unit = ??? 17 | @read 18 | def get(key: E): E = ??? 19 | @read 20 | def getCopy(key: E): E = ??? 21 | @read 22 | def getCopyDependent(key: E): E = ??? 23 | def getForUpdate(key: E): E = ??? 24 | def foreach(f: (E) => Unit): Unit = ??? 25 | def foreachCopy(f: (E) => Unit): Unit = ??? 26 | def foreachRes(): MultiRes = ??? 27 | def foreachResMap(f: (E) => Unit, res: MultiRes): Unit = ??? 28 | def sliceNoUpdate(key: E, f: E => Unit) : Unit = ??? 29 | def slice(key: E, f: (E) => Unit): Unit = ??? 30 | def sliceRes(key: E): MultiRes = ??? 31 | def sliceResMap(key: E, f: (E) => Unit, res: MultiRes): Unit = ??? 32 | def sliceResMapNoUpd(key: E, f: E => Unit, res:MultiRes): Unit = ??? 33 | def sliceCopy(key: E, f: (E) => Unit): Unit = ??? 34 | def sliceCopyDependent(key: E, f: (E) => Unit): Unit = ??? 35 | def clear(): Unit = ??? 36 | } 37 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/store/StringExtra.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | import ch.epfl.data.sc.pardis.annotations.{deep, pure, write} 4 | 5 | /** 6 | * Created by sachin on 12.12.16. 7 | */ 8 | @deep 9 | trait StringExtra 10 | 11 | 12 | object StringExtra { 13 | 14 | def StringNew(len: Int): String = "" 15 | 16 | def StringAppend(str: String, obj: Any): Unit = ??? 17 | 18 | def StringAppendN(str: String, obj: Any, n: Int): Unit = ??? 19 | 20 | @pure 21 | def Substring(str: String, init: Int, len: Int) : String = ??? 22 | 23 | @pure 24 | def StringCompare(str1: String, str2: String): Int = str1.compareToIgnoreCase(str2) 25 | 26 | @write 27 | def StringPrintf(maxSize: Int, f: String, args: Any*): String = f.format(args) 28 | } 29 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/CTransformer.scala: -------------------------------------------------------------------------------- 1 | package ddbt.transformer 2 | 3 | import ch.epfl.data.sc.pardis.ir.CTypes 4 | import ch.epfl.data.sc.pardis.optimization.TopDownTransformerTraverser 5 | import ddbt.lib.store.deep.StoreDSL 6 | 7 | trait CTransformer extends TopDownTransformerTraverser[StoreDSL] { 8 | val IR: StoreDSL 9 | 10 | import IR._ 11 | import ch.epfl.data.sc.pardis.ir.CNodes._ 12 | 13 | implicit class PointerTypeOps[T](tp: TypeRep[T]) { 14 | def isPointerType: Boolean = tp match { 15 | case x: CTypes.PointerType[_] => true 16 | case _ => false 17 | } 18 | } 19 | 20 | override def transformExp[T: TypeRep, S: TypeRep](exp: Rep[T]): Rep[S] = exp match { 21 | case t: typeOf[_] => typeOf()(apply(t.tp)).asInstanceOf[Rep[S]] 22 | case _ => super.transformExp[T, S](exp) 23 | } 24 | } -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/CommonPureExpression.scala: -------------------------------------------------------------------------------- 1 | package ddbt.transformer 2 | 3 | import ch.epfl.data.sc.pardis.utils.TypeUtils._ 4 | import ddbt.lib.store.deep._ 5 | import ch.epfl.data.sc.pardis.optimization.{RecursiveRuleBasedTransformer, RuleBasedTransformer} 6 | import ch.epfl.data.sc.pardis.ir._ 7 | import ch.epfl.data.sc.pardis.types.UnitType 8 | import ddbt.codegen.Optimizer 9 | 10 | class CommonPureExpression(override val IR: StoreDSL) extends RuleBasedTransformer[StoreDSL](IR) { 11 | 12 | import IR._ 13 | 14 | override def optimize[T](node: IR.Block[T])(implicit evidence$1: IR.TypeRep[T]): IR.Block[T] = { 15 | val b = super.optimize(node) 16 | counts.filter(_._2 != 2).map( t => t._1 -> (t._2/2)).foreach(System.err.println) 17 | counts.clear() 18 | b 19 | } 20 | 21 | //TODO: Reset after each block. Otherwise global vars are captured 22 | val counts = collection.mutable.HashMap[Def[_], Int]() 23 | analysis += rule { 24 | case s if s.isPure && !s.isInstanceOf[PardisLambdaDef] => 25 | val old = counts.getOrElse(s, 0) 26 | // System.err.println(s"$old $s") 27 | counts += (s -> (old + 1)) 28 | () 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/Deforestation.scala: -------------------------------------------------------------------------------- 1 | package ddbt.transformer 2 | 3 | import ch.epfl.data.sc.pardis.ir.{PardisLambda, PardisLambda2} 4 | import ch.epfl.data.sc.pardis.optimization.{RecursiveRuleBasedTransformer, RuleBasedTransformer} 5 | import ch.epfl.data.sc.pardis.types.{PardisType, UnitType} 6 | import ddbt.lib.store.deep.StoreDSL 7 | import ddbt.lib.store.Entry 8 | 9 | /** 10 | * Created by sachin on 02.05.16. 11 | */ 12 | 13 | abstract class Collector[T <: Entry](implicit val IR: StoreDSL, implicit val typeT : PardisType[T]) { self => 14 | import IR._ 15 | def apply(f: Rep[T] => Rep[Unit]) : Rep[Unit] 16 | def map[U <: Entry : TypeRep](g: Rep[T] => Rep[U]) = new Collector[U] { 17 | override def apply(f: Rep[U] => Rep[Unit]) = self.apply(g andThen f) 18 | } 19 | def filter(p : Rep[T] => Rep[Boolean]) = new Collector[T] { 20 | override def apply(f: (Rep[T]) => Rep[Unit]) = self.apply((x:Rep[T]) => __ifThenElse(p(x), f(x), unit())(UnitType)) 21 | } 22 | } 23 | 24 | class Deforestation(override val IR: StoreDSL) extends RuleBasedTransformer[StoreDSL](IR) { 25 | 26 | import IR.{EntryType => _, typeNull => _, _} 27 | 28 | def toCollector[T <: Entry](n : Rep[Store[T]])(implicit typeT: PardisType[T]) : Collector[T] = n match { 29 | case Def(sf@StoreFilter(self, Def(PardisLambda(f, _ ,_)))) => toCollector(self)(sf.typeE).filter(f) 30 | case Def(sm@StoreMap(self, Def(PardisLambda(f, _, _)))) => toCollector(self)(sm.typeE).map(f) 31 | case Def(st@StoreNew2()) => new Collector{ 32 | override def apply(f: (Rep[T]) => Rep[Unit]) = n.foreachCopy(doLambda(f)(typeT, UnitType)) 33 | } 34 | } 35 | rewrite += rule { 36 | case sf@StoreForeachCopy(self, Def(PardisLambda(f, _, _))) => toCollector(self)(sf.typeE).apply(f) 37 | case sf@StoreFold(self, zero, Def(PardisLambda2(f, _, _, _))) => 38 | val res = __newVar(zero)(sf.typeU) 39 | toCollector(self)(sf.typeE).apply({ 40 | x: Rep[Entry] => __assign(res, f(__readVar(res)(sf.typeU), x))(sf.typeU) 41 | }) 42 | __readVar(res)(sf.typeU) 43 | } 44 | 45 | } -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/InsertNoChecks.scala: -------------------------------------------------------------------------------- 1 | package ddbt.transformer 2 | 3 | import ch.epfl.data.sc.pardis.optimization.RuleBasedTransformer 4 | import ddbt.lib.store.deep.StoreDSL 5 | import ddbt.lib.store.deep.StoreIRs.StoreIndex 6 | 7 | class InsertNoChecks(override val IR: StoreDSL) extends RuleBasedTransformer[StoreDSL](IR) { 8 | import IR._ 9 | rewrite += rule{ 10 | case StoreInsert(self, e) => self.unsafeInsert(e) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/MultiResSplitter.scala: -------------------------------------------------------------------------------- 1 | package ddbt.transformer 2 | 3 | import ch.epfl.data.sc.pardis.utils.TypeUtils._ 4 | import ddbt.lib.store.deep._ 5 | import ch.epfl.data.sc.pardis.optimization.{RecursiveRuleBasedTransformer, RuleBasedTransformer} 6 | import ch.epfl.data.sc.pardis.ir._ 7 | import ch.epfl.data.sc.pardis.types.UnitType 8 | import ddbt.codegen.Optimizer 9 | 10 | class MultiResSplitter(override val IR: StoreDSL) extends RuleBasedTransformer[StoreDSL](IR) { 11 | 12 | import IR._ 13 | 14 | rewrite += rule { 15 | case IdxSlice(self, key, f) => 16 | val res = self.sliceRes(key) 17 | __ifThenElse(res.isEmpty, unit(), self.sliceResMap(key, f, res))(UnitType) 18 | case IdxSliceNoUpdate(self, key, f) => 19 | val res = self.sliceRes(key) 20 | __ifThenElse(res.isEmpty, unit(), self.sliceResMapNoUpd(key, f, res))(UnitType) 21 | case IdxForeach(self, f) => 22 | val res = self.foreachRes() 23 | __ifThenElse(res.isEmpty, unit(), self.foreachResMap(f, res))(UnitType) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/SampleEntryHoister.scala: -------------------------------------------------------------------------------- 1 | package ddbt.transformer 2 | 3 | 4 | import ch.epfl.data.sc.pardis.ir.{PardisStructArg, PardisStruct} 5 | import ch.epfl.data.sc.pardis.optimization.Rule.Statement 6 | import ch.epfl.data.sc.pardis.optimization.RuleBasedTransformer 7 | import ddbt.lib.store.deep.StoreDSL 8 | 9 | /** 10 | * Created by sachin on 02.05.16. 11 | */ 12 | class SampleEntryHoister(override val IR: StoreDSL) extends RuleBasedTransformer[StoreDSL](IR) { 13 | 14 | import IR._ 15 | 16 | val tmpVars = collection.mutable.HashMap[Sym[_], PardisStruct[_]]() 17 | 18 | 19 | analysis += statement { 20 | case sym -> (ent@PardisStruct(tag, elems, methods)) => { 21 | val elems_ = elems.map(arg => PardisStructArg(arg.name, arg.mutable, nullValue(arg.init.tp))) 22 | tmpVars += sym -> PardisStruct(tag, elems_, methods)(ent.tp) 23 | () 24 | } 25 | } 26 | rewrite += statement { 27 | case sym -> (ent@PardisStruct(tag, elems, methods)) if tmpVars.contains(sym) => { 28 | val entry = sym.asInstanceOf[Rep[SEntry]] 29 | elems.foreach(arg => { 30 | implicit val tp = arg.init.tp 31 | if (arg.init != nullValue(tp)) 32 | fieldSetter(entry, arg.name, arg.init) 33 | }) 34 | sym 35 | } 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/StoreDCE.scala: -------------------------------------------------------------------------------- 1 | package ddbt.transformer 2 | 3 | 4 | import ch.epfl.data.sc.pardis.optimization.RuleBasedTransformer 5 | import ddbt.lib.store.deep.StoreDSL 6 | 7 | /** 8 | * Created by sachin on 25.04.16. 9 | */ 10 | class StoreDCE(override val IR: StoreDSL) extends RuleBasedTransformer[StoreDSL](IR) { 11 | 12 | import IR._ 13 | 14 | val toRemove = collection.mutable.ArrayBuffer[Rep[_]]() 15 | analysis += rule { 16 | case EntryIdxApplyObject(h, c, _) => toRemove +=(h, c);() 17 | } 18 | rewrite += statement{ 19 | case sym->st if (toRemove.contains(sym)) => () 20 | //Remove left overs after hoisting 21 | case sym -> (st:EntryIdxApplyObject[_]) => () 22 | case sym -> (st: EntryIdxGenericOpsObject) => () 23 | case sym -> (st: EntryIdxGenericCmpObject[_]) => () 24 | case sym -> (st: EntryIdxGenericFixedRangeOpsObject) => () 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/StructDynamicAccessTransformer.scala: -------------------------------------------------------------------------------- 1 | package ddbt.transformer 2 | 3 | import ch.epfl.data.sc.pardis.ir.Constant 4 | import ch.epfl.data.sc.pardis.optimization.RuleBasedTransformer 5 | import ddbt.lib.store.deep.{StoreDSL, StructDynamicFieldAccess} 6 | 7 | /** 8 | * Created by sachin on 23/3/17. 9 | */ 10 | class StructDynamicAccessTransformer(override val IR: StoreDSL) extends RuleBasedTransformer[StoreDSL](IR) { 11 | 12 | import IR._ 13 | 14 | rewrite += rule { 15 | case StructDynamicFieldAccess(struct, _, Def(Int$plus2(Constant(i), v))) => 16 | StructDynamicFieldAccess(struct, (i + 1).toString, v) 17 | } 18 | 19 | } 20 | -------------------------------------------------------------------------------- /ddbtoaster/pardis/transformer/TreeDumper.scala: -------------------------------------------------------------------------------- 1 | 2 | package ddbt.transformer 3 | 4 | import java.io.FileWriter 5 | 6 | import ch.epfl.data.sc.pardis.ir.Base 7 | import ch.epfl.data.sc.pardis.optimization.TransformerHandler 8 | import ch.epfl.data.sc.pardis.types.PardisType 9 | import ddbt.codegen.prettyprinter.StoreScalaCodeGenerator 10 | import ddbt.lib.store.deep.StoreDSL 11 | 12 | /** 13 | * Factory for creating a transformation phase which does not change the given program, but dumps its representation into a file. 14 | * The main use case is for debugging the transformation phases in a transformation pipeline. 15 | */ 16 | object TreeDumper { 17 | /** 18 | * Creates a tree dumper transformation phase. 19 | * 20 | * @param concreteSyntax specifies if the dumped tree should be printed in the concrete syntax form or in the IR form 21 | */ 22 | def apply(concreteSyntax: Boolean, name: String = "") = new TransformerHandler { 23 | def apply[Lang <: Base, T: PardisType](context: Lang)(block: context.Block[T]): context.Block[T] = { 24 | if (concreteSyntax) { 25 | val cg = new StoreScalaCodeGenerator(context.asInstanceOf[StoreDSL]) 26 | val pw = new FileWriter(s"tree_debug_dump$name.txt", true) 27 | val doc = cg.blockToDocument(block) 28 | doc.format(40, pw) 29 | pw.write("\n-----------------------[END OF BLOCK]------------------------\n") 30 | pw.flush() 31 | } else { 32 | val pw = new FileWriter(s"tree_debug_dump$name.txt", true) 33 | pw.write(block.toString) 34 | pw.write("\n-----------------------[END OF BLOCK]------------------------\n") 35 | pw.flush() 36 | } 37 | 38 | block 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /ddbtoaster/release/README: -------------------------------------------------------------------------------- 1 | Welcome to the DBToaster Binary Distribution 2 | 3 | The layout of this distribution is as follows: 4 | 5 | LICENSE The DBToaster distribution end user license agreement 6 | README This README file 7 | bin/ The dbtoaster compiler binary 8 | docs/ Documentation for DBToaster 9 | examples/code Examples of how to integrate DBToaster with 10 | your own code 11 | examples/data Data for the example queries 12 | examples/queries Example queries in DBT-SQL 13 | examples/queries/simple Illustrations and tests of various features 14 | examples/queries/tpch The full TPC-H benchmark, adapted for DBToaster 15 | examples/queries/finance An algorithmic trading-oriented benchmark 16 | lib/ DBToaster support libraries. These are required 17 | for compilingcode generated by dbtoaster -------------------------------------------------------------------------------- /ddbtoaster/scripts/pushover.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | input=`cat` 4 | dir=`dirname $0` 5 | properties=$dir/pushover.properties 6 | 7 | if [ -f $properties ]; then 8 | . $properties 9 | message=`echo "$input" | grep "Total number of tests run"` 10 | curl -s \ 11 | -F "token=$APP_TOKEN" \ 12 | -F "user=$USER_KEY" \ 13 | -F "message=$message" \ 14 | https://api.pushover.net/1/messages.json > pushover.log 15 | fi 16 | 17 | echo "$input" 18 | -------------------------------------------------------------------------------- /ddbtoaster/scripts/ramdisk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # This script creates a Mac OS X ramdisk to 'hot' directories 4 | # 5 | 6 | SIZE_MB=512 7 | 8 | cd `dirname $0`; cd ..; 9 | base=`pwd` 10 | mpoint="$base/target"; 11 | 12 | rd_dev=`mount | grep "$mpoint" | sed 's/ .*//g'` 13 | 14 | if [ "$rd_dev" != "" ]; then 15 | if [ "$1" = "" ]; then echo "Disk $rd_dev already mounted" 16 | else 17 | case $OSTYPE in 18 | darwin*) diskutil eject $rd_dev;; 19 | *) echo "Maybe try this command: umount $rd_dev";; 20 | esac; 21 | # remove redirections 22 | # rm "$base/test/gen" 23 | fi 24 | exit 25 | fi 26 | 27 | case $OSTYPE in 28 | darwin*) 29 | rd_size=`expr $SIZE_MB '*' 2048` 30 | rd_name=`basename $mpoint` 31 | rd_dev=`/usr/bin/hdiutil attach -nomount ram://$rd_size` 32 | diskutil erasevolume HFS+ $rd_name $rd_dev >/dev/null 33 | diskutil unmount $rd_dev >/dev/null 34 | mkdir -p $mpoint; 35 | diskutil mount -mountPoint $mpoint $rd_dev 36 | # chflags hidden $mpoint; killall Finder # nohidden 37 | ;; 38 | *) echo 'Sorry, no ramdisk script currently available';; 39 | esac 40 | 41 | # Extra redirections into ramdisk 42 | redirect() { # $1=source $2=rd_folder 43 | dir="$2"; if [ "$dir" = "" ]; then dir=`basename $1`; fi 44 | if [ ! -d "$mpoint/$dir" ]; then mkdir "$mpoint/$dir"; fi 45 | if [ ! -e "$1" ]; then ln -s "$mpoint/$dir" "$1"; fi 46 | } 47 | 48 | #redirect "$base/test/gen" 49 | -------------------------------------------------------------------------------- /ddbtoaster/scripts/regress.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # This script wires up all the stuff to test legacy DBToaster results. 4 | # 5 | # regress.sh [-d dataset] query 6 | # 7 | 8 | DBT='bin/dbtoaster_frontend' 9 | #DBT='./dbtoaster_ref' # reference released version 10 | 11 | cd `dirname $0`; cd .. 12 | BOOST="`sed 's/#.*//g' conf/ddbt.properties | grep ddbt.lib_boost | sed 's/.*= *//g'`" 13 | if [ "$BOOST" != "" ]; then BOOST="$BOOST"; export DYLD_LIBRARY_PATH="$BOOST/lib"; BOOST="-I$BOOST/include -L$BOOST/lib"; fi 14 | cd "`sed 's/#.*//g' conf/ddbt.properties | grep ddbt.base_repo | sed 's/.*= *//g'`/dbtoaster/compiler/alpha5" 15 | 16 | # We're now in original DBToaster repository 17 | 18 | d='standard'; if [ "$1" = "-d" ]; then shift; d="$1"; shift; fi 19 | q="test/queries/$1.sql"; if [ ! -f "$q" ]; then echo "query $1 does not exist"; exit; fi 20 | gen() { # $1 = lang, $2=file 21 | printf ' gen'; $DBT -l $1 -O3 -o $2.tmp $q 22 | sed "s/\/standard\//\/$d\//g" $2.tmp > $2; rm $2.tmp 23 | } 24 | 25 | # Scala 26 | printf '[--------------------- Scala:' 27 | gen scala query.scala 28 | printf ' compile' 29 | fsc -cp lib/dbt_scala/dbtlib.jar query.scala lib/dbt_scala/src/org/dbtoaster/RunQuery.scala 30 | echo '. ----]' 31 | scala -J-Xmx14G -J-Xms14G -J-verbose:gc -cp .:lib/dbt_scala/dbtlib.jar org.dbtoaster.RunQuery 32 | rm -r *.scala org 33 | 34 | # CPP 35 | printf '[--------------------- C++:' 36 | gen cpp query.hpp 37 | printf ' compile' 38 | g++ -O3 -Ilib/dbt_c++ -Llib/dbt_c++ $BOOST -lpthread -ldbtoaster lib/dbt_c++/main.cpp -include query.hpp -o query \ 39 | -lboost_program_options -lboost_serialization -lboost_system -lboost_filesystem -lboost_chrono -lboost_thread 40 | echo '. ----]' 41 | ./query 42 | rm *.hpp query 43 | -------------------------------------------------------------------------------- /ddbtoaster/spark/build.sbt: -------------------------------------------------------------------------------- 1 | Seq( 2 | // --------- Project information 3 | name := "dbtoaster-spark", 4 | 5 | // --------- Paths 6 | scalaSource in Compile <<= baseDirectory / "src", 7 | javaSource in Compile <<= baseDirectory / "src", 8 | sourceDirectory in Compile <<= baseDirectory / "src", 9 | resourceDirectory in Compile <<= baseDirectory / "conf" 10 | ) 11 | 12 | libraryDependencies ++= Seq( 13 | "org.apache.spark" %% "spark-core" % "2.2.0" 14 | ) -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/log4j.properties: -------------------------------------------------------------------------------- 1 | # Set everything to be logged to the console 2 | log4j.rootCategory=WARN, console 3 | log4j.appender.console=org.apache.log4j.ConsoleAppender 4 | log4j.appender.console.target=System.err 5 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n 7 | 8 | # Settings to quiet third party logs that are too verbose 9 | log4j.logger.org.spark-project.jetty=WARN 10 | log4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR 11 | log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=WARN 12 | log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=WARN 13 | log4j.logger.org.apache.parquet=ERROR 14 | log4j.logger.parquet=ERROR 15 | 16 | # SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support 17 | log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL 18 | log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR 19 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config: -------------------------------------------------------------------------------- 1 | spark.config.1000 -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.10: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=10 15 | spark.partitions.num=10 16 | spark.driver.memory=140G 17 | spark.executor.memory=140G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.100: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=99 15 | spark.partitions.num=99 16 | spark.driver.memory=140G 17 | spark.executor.memory=140G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.1000: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=990 15 | spark.partitions.num=990 16 | spark.driver.memory=140G 17 | spark.executor.memory=12G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.200: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=198 15 | spark.partitions.num=198 16 | spark.driver.memory=140G 17 | spark.executor.memory=80G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.25: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=25 15 | spark.partitions.num=25 16 | spark.driver.memory=140G 17 | spark.executor.memory=180G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.400: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=396 15 | spark.partitions.num=396 16 | spark.driver.memory=140G 17 | spark.executor.memory=39G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.50: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=50 15 | spark.partitions.num=50 16 | spark.driver.memory=140G 17 | spark.executor.memory=170G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.600: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=594 15 | spark.partitions.num=594 16 | spark.driver.memory=140G 17 | spark.executor.memory=20G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.800: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | spark.home.dir=/data/lab/dbtoaster/spark 5 | spark.jars= 6 | 7 | #spark.local.dir=/Users/nikolic/Temp 8 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 9 | #spark.eventLog.dir=file:/tmp/spark-events 10 | spark.eventLog.enabled=true 11 | #spark.eventLog.enabled=false 12 | #spark.executor.javaopts= 13 | 14 | spark.executors.num=792 15 | spark.partitions.num=792 16 | spark.driver.memory=140G 17 | spark.executor.memory=20G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/conf/spark.config.default: -------------------------------------------------------------------------------- 1 | spark.master.url=yarn-cluster 2 | #spark.master.url=yarn-client 3 | #spark.master.url=local[11] 4 | #spark.home.dir=/data/hadoop/spark-1.5.0-bin-hadoop2.6/ 5 | spark.home.dir=/data/lab/dbtoaster/spark 6 | spark.jars= 7 | 8 | #spark.local.dir=/Users/nikolic/Temp 9 | spark.eventLog.dir=hdfs:///user/nikolic/dbtoaster/tmp/spark-events 10 | #spark.eventLog.dir=file:/tmp/spark-events 11 | spark.eventLog.enabled=true 12 | #spark.eventLog.enabled=false 13 | #spark.executor.javaopts= 14 | 15 | spark.partitions.num=50 16 | spark.driver.memory=40G 17 | spark.executor.memory=80G 18 | spark.executor.cores=2 19 | 20 | dist.input.path=hdfs:///datasets/tpch 21 | log.output.path=hdfs:///user/lab/dbtoaster/experiments/logs 22 | -------------------------------------------------------------------------------- /ddbtoaster/spark/src/lib/LogWriter.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.spark 2 | 3 | import org.apache.hadoop.conf.Configuration 4 | import org.apache.hadoop.fs.{ Path, FileSystem, FSDataOutputStream } 5 | 6 | class LogWriter(name: String, outputPath: String) { 7 | 8 | private val fs = FileSystem.get(new Configuration()) 9 | 10 | private val output: FSDataOutputStream = 11 | if (outputPath == null) null 12 | else { 13 | val outputDir = new Path(outputPath) 14 | if (!fs.exists(outputDir)) fs.mkdirs(outputDir) 15 | 16 | val dateFormat = new java.text.SimpleDateFormat("yyyyMMdd_hhmm") 17 | val dateSuffix = dateFormat.format(new java.util.Date()) 18 | val filename = new Path(s"${outputPath}/${name}_${dateSuffix}") 19 | fs.create(filename, true) 20 | } 21 | 22 | def println(s: String, hsync: Boolean = false) = { 23 | if (output != null) { 24 | output.writeUTF(s + "\n") 25 | if (hsync) output.hsync() 26 | } 27 | Predef.println(s) 28 | } 29 | 30 | def flush() = if (output != null) output.hsync() 31 | } 32 | -------------------------------------------------------------------------------- /ddbtoaster/spark/src/lib/store/CharArray.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.spark.store 2 | 3 | import java.util.Arrays 4 | 5 | object CharArrayImplicits { 6 | import scala.language.implicitConversions 7 | implicit def charArrayToStr(c: CharArray): String = c.getString 8 | } 9 | 10 | class CharArray(capacity: Int) { 11 | 12 | val charArray = new Array[Char](capacity) 13 | var _size = 0 14 | 15 | def this(str: String) = { 16 | this(str.length) 17 | putString(str) 18 | } 19 | 20 | def size: Int = _size 21 | 22 | def clear(): Unit = _size = 0 23 | 24 | def putString(str: String): CharArray = { 25 | _size = math.min(capacity, str.length) 26 | str.getChars(0, _size, charArray, 0) 27 | this 28 | } 29 | 30 | def getString(): String = new String(charArray, 0, _size) 31 | 32 | def putBytes(bytes: Array[Byte], offset: Int): Unit = { 33 | var endFound = false 34 | var byteOffset = offset 35 | var i = 0 36 | while (i < capacity && !endFound) { 37 | charArray(i) = ByteUtils.bytesToChar(bytes, byteOffset) 38 | endFound = (charArray(i) == 0.toChar) 39 | byteOffset += 2 40 | i += 1 41 | } 42 | if (endFound) { 43 | _size = i - 1 44 | Arrays.fill(charArray, i, capacity, 0.toChar) 45 | } 46 | else _size = capacity 47 | } 48 | 49 | def getBytes(bytes: Array[Byte], offset: Int): Unit = { 50 | var byteOffset = offset 51 | var i = 0 52 | while (i < _size) { 53 | ByteUtils.charToBytes(charArray(i), bytes, byteOffset) 54 | byteOffset += 2 55 | i += 1 56 | } 57 | Arrays.fill(bytes, byteOffset, offset + (capacity << 1), 0.toByte) 58 | } 59 | 60 | def equals(other: CharArray): Boolean = { 61 | if (this._size != other._size) false 62 | else { 63 | var result = true 64 | var i = 0 65 | while (i < this._size && result) { 66 | result = (this.charArray(i) == other.charArray(i)) 67 | i += 1 68 | } 69 | result 70 | } 71 | } 72 | } -------------------------------------------------------------------------------- /ddbtoaster/spark/src/lib/store/Index.java: -------------------------------------------------------------------------------- 1 | package ddbt.lib.spark.store; 2 | 3 | import scala.Function1; 4 | import scala.Unit; 5 | 6 | /** 7 | * Index interface. By default it emits a warning when an unsupported function 8 | * is called. Specific implementation are below. 9 | * 10 | * @author TCK, Milos Nikolic 11 | */ 12 | public abstract class Index implements IndexOperations { 13 | 14 | protected final int indexId; 15 | protected final boolean unique; 16 | protected int size; 17 | 18 | Index(int indexId, boolean unique) { 19 | this.indexId = indexId; 20 | this.unique = unique; 21 | this.size = 0; 22 | } 23 | 24 | protected void w(String n) { 25 | System.out.println( 26 | this.getClass().getName() + ": " + n + " not supported"); 27 | } 28 | 29 | public void unsafeInsert(E e) { 30 | w("unsafeInsert"); 31 | } 32 | 33 | public void insert(E e) { 34 | w("insert"); 35 | } 36 | 37 | public void delete(E e) { 38 | w("delete"); 39 | } 40 | 41 | public void update(E e) { 42 | w("update"); 43 | } // reposition the entry if key/hash modified 44 | 45 | public E get(E key) { 46 | w("get"); 47 | return null; 48 | } // returns the first element only 49 | 50 | public void foreach(Function1 f) { 51 | w("foreach"); 52 | } // on all elements; warning: what about reordering updates? 53 | 54 | public void slice(E key, Function1 f) { 55 | w("slice"); 56 | } // foreach on a slice 57 | 58 | public void range(E min, E max, boolean withMin, boolean withMax, 59 | Function1 f) { 60 | w("range"); 61 | } 62 | 63 | public void clear() { 64 | w("clear"); 65 | } 66 | 67 | public void compact() { 68 | w("compact"); 69 | } 70 | 71 | public String info() { 72 | return this.getClass().getName() + 73 | "(" + indexId + "," + (unique ? "unique" : "multiple") + ")"; 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /ddbtoaster/spark/src/lib/store/IndexOperations.java: -------------------------------------------------------------------------------- 1 | package ddbt.lib.spark.store; 2 | 3 | public interface IndexOperations { 4 | public int cmp(E e1, E e2); // key comparison between entries 5 | public int hash(E e); // hash function for Hash, index for Array 6 | 7 | // Note: The hash function must take care of shuffling LSBs enough, no 8 | // re-shuffling is done in the Store. Some indices (IdxDirect) require 9 | // order(entries)=order(hash(entries)) to work correctly. 10 | } -------------------------------------------------------------------------------- /ddbtoaster/spark/src/lib/store/KryoSerializable.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.spark.store 2 | 3 | import com.esotericsoftware.kryo.io.{Input, Output} 4 | 5 | trait KryoSerializable { 6 | 7 | def write(out: Output): Unit 8 | 9 | def read(in: Input): Unit 10 | } -------------------------------------------------------------------------------- /ddbtoaster/spark/src/lib/store/LogStore.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.spark.store 2 | 3 | trait ReadStore[E <: MapEntry] { this: ColumnarStore => 4 | def foreach(f: E => Unit): Unit 5 | def size: Int = buffers.headOption.map(_.size).getOrElse(-1) 6 | } 7 | 8 | trait WriteStore[E <: MapEntry] extends ReadStore[E] { this: ColumnarStore => 9 | def +=(e: E): Unit 10 | def clear(): Unit = buffers foreach { _.clear() } 11 | def size_=(_size: Int) = buffers foreach { _.size = _size } 12 | } 13 | 14 | class ColumnarStore(val buffers: Array[Buffer]) extends Serializable 15 | 16 | class ColumnarPartition(val id: Int, val buffers: Array[Buffer]) extends Serializable 17 | 18 | object ColumnarPartition { 19 | import com.esotericsoftware.kryo.io.{Input, Output} 20 | 21 | def write(out: Output, store: ColumnarPartition) = { 22 | out.writeInt(store.id) 23 | out.writeInt(store.buffers.length) 24 | store.buffers foreach { b => Buffer.write(out, b) } 25 | } 26 | 27 | def read(in: Input): ColumnarPartition = { 28 | val id = in.readInt() 29 | val length = in.readInt() 30 | val buffers = Array.fill[Buffer](length)(Buffer.read(in)) 31 | new ColumnarPartition(id, buffers) 32 | } 33 | } 34 | 35 | class PartitionContainer[E <: MapEntry]( 36 | val partitions: Array[_ <: ColumnarStore with WriteStore[E]]) extends Serializable { 37 | def +=(e: E): Unit = partitions.foreach(_ += e) 38 | 39 | def foreach(f: E => Unit): Unit = partitions.foreach(_.foreach(f)) 40 | 41 | def clear(): Unit = partitions foreach { _.clear() } 42 | } 43 | 44 | 45 | class StoreWrapper( 46 | val id: Int, 47 | val lArray: Array[Long] = Array[Long](), 48 | val dArray: Array[Double] = Array[Double](), 49 | val pArray: Array[ColumnarPartition] = Array[ColumnarPartition]()) extends Serializable 50 | -------------------------------------------------------------------------------- /ddbtoaster/spark/src/lib/store/MapEntry.java: -------------------------------------------------------------------------------- 1 | package ddbt.lib.spark.store; 2 | 3 | /** 4 | * Abstract entry that is specialized for each map. There is no key/value 5 | * distinction as it is encoded by the indices. 6 | * 7 | * @author TCK, Milos Nikolic 8 | */ 9 | public abstract class MapEntry { 10 | 11 | final Object[] indexRefs; 12 | 13 | public MapEntry(int n) { 14 | indexRefs = new Object[n]; 15 | } 16 | } -------------------------------------------------------------------------------- /ddbtoaster/srccpp/driver/compatibility.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DRIVER_COMPATIBILITY_HPP 2 | #define DRIVER_COMPATIBILITY_HPP 3 | 4 | // Defining C++14 specific stuff if not present (C++11 or lower) 5 | 6 | #if ((defined(_MSVC_LANG) && _MSVC_LANG < 201402L) || __cplusplus < 201402L) 7 | 8 | #include 9 | #include 10 | 11 | template 12 | using tuple_element_t = typename std::tuple_element::type; 13 | 14 | template 15 | struct integer_sequence { 16 | typedef T value_type; 17 | static constexpr std::size_t size() { return sizeof...(Ints); } 18 | }; 19 | 20 | template 21 | using index_sequence = integer_sequence; 22 | 23 | template 24 | struct make_integer_sequence : make_integer_sequence {}; 25 | 26 | template 27 | struct make_integer_sequence : integer_sequence {}; 28 | 29 | template 30 | using make_index_sequence = make_integer_sequence; 31 | 32 | template 33 | using index_sequence_for = make_index_sequence; 34 | 35 | template 36 | std::unique_ptr make_unique(Args&&... args) { 37 | return std::unique_ptr(new T(std::forward(args)...)); 38 | } 39 | 40 | #endif 41 | 42 | #endif /* DRIVER_COMPATIBILITY_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/driver/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include "platform.hpp" 5 | #include "runtime_opts.hpp" 6 | #include "application.hpp" 7 | 8 | using namespace std; 9 | 10 | void showHelp(); 11 | 12 | void showHelp() { 13 | cout << "\nRuntime options:\n"; 14 | cout << "---------------\n"; 15 | cout << " --help | -h \t\t show help message\n"; 16 | cout << " --num-runs \t number of runs (short -r)\n"; 17 | cout << " --batch-size \t update batch size (short -b)\n"; 18 | cout << " --no-output \t\t omit printing final result\n"; 19 | cout << " --preload \t\t preload input into memory before streaming\n"; 20 | cout << " --log-count \t log tuple count every [arg] updates\n"; 21 | cout << endl; 22 | } 23 | 24 | int main(int argc, char** argv) { 25 | 26 | RuntimeOpts opts; 27 | 28 | for (int i = 0; i < argc; i++) { 29 | if (strcmp(argv[i], "-h") == 0 || strcmp(argv[i], "--help") == 0) { 30 | showHelp(); 31 | exit(0); 32 | } 33 | else if ((strcmp(argv[i], "--num-runs") == 0 || 34 | strcmp(argv[i], "-r") == 0) && i + 1 < argc) { 35 | opts.num_runs = stoul(argv[++i]); 36 | } 37 | else if ((strcmp(argv[i], "--batch-size") == 0 || 38 | strcmp(argv[i], "-b") == 0) && i + 1 < argc) { 39 | opts.batch_size = stoul(argv[++i]); 40 | } 41 | else if (strcmp(argv[i], "--no-output") == 0) { 42 | opts.print_result = false; 43 | } 44 | else if (strcmp(argv[i], "--preload") == 0) { 45 | opts.preload_input = true; 46 | } 47 | else if (strcmp(argv[i], "--log-count") == 0 && i + 1 < argc) { 48 | opts.log_count = stoul(argv[++i]); 49 | } 50 | } 51 | 52 | // Code runs on the 3rd CPU by default 53 | setAffinity(2); 54 | 55 | Application app(opts); 56 | app.run(); 57 | 58 | return 0; 59 | } -------------------------------------------------------------------------------- /ddbtoaster/srccpp/driver/ordered_event.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DRIVER_ORDERED_EVENT_HPP 2 | #define DRIVER_ORDERED_EVENT_HPP 3 | 4 | #include 5 | #include "event.hpp" 6 | 7 | using namespace dbtoaster; 8 | 9 | typedef size_t OrderType; 10 | 11 | constexpr OrderType kInvalidOrder = std::numeric_limits::max(); 12 | 13 | struct OrderedEvent : Event { 14 | OrderedEvent() : Event(), order(kInvalidOrder) { } 15 | 16 | OrderedEvent(OrderType t_order, RelationId t_id, EventType t_tp, MessageBasePtr t_msg) 17 | : Event(t_id, t_tp, std::move(t_msg)), order(t_order) { } 18 | 19 | bool operator<(const OrderedEvent& other) const { 20 | return this->order > other.order; // higher order, lower priority 21 | } 22 | 23 | OrderType order; 24 | }; 25 | 26 | #endif /* DRIVER_ORDERED_EVENT_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/driver/platform.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DRIVER_PLATFORM_HPP 2 | #define DRIVER_PLATFORM_HPP 3 | 4 | #include 5 | 6 | void setAffinity(int cpu); 7 | 8 | //----------------------------------------------------------------------------- 9 | // Microsoft Visual Studio 10 | 11 | #if defined(_MSC_VER) 12 | 13 | #include 14 | 15 | void setAffinity(int cpu) { 16 | SetProcessAffinityMask(GetCurrentProcess(), cpu); 17 | SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_HIGHEST); 18 | } 19 | 20 | //----------------------------------------------------------------------------- 21 | // Other compilers 22 | 23 | #else // defined(_MSC_VER) 24 | 25 | #include 26 | 27 | void setAffinity(int cpu) { 28 | #if !defined(__CYGWIN__) && !defined(__APPLE__) 29 | cpu_set_t mask; 30 | 31 | CPU_ZERO(&mask); 32 | 33 | CPU_SET(cpu, &mask); 34 | 35 | if (sched_setaffinity(0, sizeof(mask), &mask) == -1) { 36 | std::cerr << "WARNING: Could not set CPU affinity" << std::endl; 37 | } 38 | #endif 39 | } 40 | 41 | #endif // !defined(_MSC_VER) 42 | 43 | #endif /* DRIVER_PLATFORM_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/driver/runtime_opts.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DRIVER_RUNTIME_OPTS_HPP 2 | #define DRIVER_RUNTIME_OPTS_HPP 3 | 4 | struct RuntimeOpts { 5 | RuntimeOpts() : num_runs(1), 6 | batch_size(0), 7 | print_result(true), 8 | preload_input(false), 9 | log_count(0) { } 10 | 11 | size_t num_runs; 12 | size_t batch_size; 13 | bool print_result; 14 | bool preload_input; 15 | size_t log_count; 16 | }; 17 | 18 | #endif /* DRIVER_RUNTIME_OPTS_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/driver/stopwatch.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DRIVER_STOPWATCH_HPP 2 | #define DRIVER_STOPWATCH_HPP 3 | 4 | #include 5 | #include 6 | 7 | class Stopwatch { 8 | public: 9 | Stopwatch() : start_time(0), end_time(0) { } 10 | 11 | void restart() { 12 | timeval curr_time; 13 | gettimeofday(&curr_time, nullptr); 14 | start_time = curr_time.tv_sec * 1000 + curr_time.tv_usec / 1000; 15 | } 16 | 17 | void stop() { 18 | timeval curr_time; 19 | gettimeofday(&curr_time, nullptr); 20 | end_time = curr_time.tv_sec * 1000 + curr_time.tv_usec / 1000; 21 | } 22 | 23 | long elapsedMilliSec() { 24 | return end_time - start_time; 25 | } 26 | 27 | long elapsedSec() { 28 | return std::lround((end_time - start_time) / 1000.0); 29 | } 30 | 31 | private: 32 | long start_time; 33 | long end_time; 34 | }; 35 | #endif /* DRIVER_STOPWATCH_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/date_format.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_DATEFORMAT_HPP 2 | #define DBTOASTER_DATEFORMAT_HPP 3 | 4 | #include "date_type.hpp" 5 | #include "utils.hpp" 6 | 7 | namespace dbtoaster { 8 | 9 | // Expected date format YYYY-MM-DD 10 | struct DateFormat { 11 | static constexpr DateType parse(const char* str) { 12 | return DateType( 13 | parseNumber(str, find('-', str)), 14 | parseNumber(find('-', str) + 1, find('-', find('-', str) + 1)), 15 | parseNumber(find('-', find('-', str) + 1) + 1) 16 | ); 17 | } 18 | 19 | private: 20 | static constexpr const char* find(char c, const char* str, size_t len) { 21 | return (len == 0) ? throw "Character not found" : 22 | ((str[0] == c) ? str : find(c, str + 1, len - 1)); 23 | } 24 | 25 | static constexpr const char* find(char c, const char* str) { 26 | return find(c, str, dbtoaster::utils::stringLength(str)); 27 | } 28 | 29 | static constexpr size_t parseDigit(const char* str, size_t i) { 30 | return (str[i] >= '0' && str[i] <= '9') ? 31 | static_cast(str[i]-'0') : 32 | throw "Error: digit format"; 33 | } 34 | 35 | static constexpr size_t parseNumber(const char* str, size_t i) { 36 | return (i == 0) ? 37 | parseDigit(str, i) : 38 | parseNumber(str, i - 1) * 10 + parseDigit(str, i); 39 | } 40 | 41 | static constexpr size_t parseNumber(const char* str) { 42 | return parseNumber(str, dbtoaster::utils::stringLength(str) - 1); 43 | } 44 | 45 | static constexpr size_t parseNumber(const char* start, const char* end) { 46 | return parseNumber(start, static_cast(end - start - 1)); 47 | } 48 | }; 49 | 50 | } 51 | #endif /* DBTOASTER_DATEFORMAT_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/date_type.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_DATETYPE_HPP 2 | #define DBTOASTER_DATETYPE_HPP 3 | 4 | #include 5 | 6 | namespace dbtoaster { 7 | 8 | struct DateType { 9 | public: 10 | constexpr DateType(uint16_t t_year, uint8_t t_month, uint8_t t_day) 11 | : day(t_day), month(t_month), year(t_year) { } 12 | 13 | constexpr DateType() : day(0), month(0), year(0) { } 14 | 15 | constexpr uint16_t getYear() const { return year; } 16 | 17 | constexpr uint8_t getMonth() const { return month; } 18 | 19 | constexpr uint8_t getDay() const { return day; } 20 | 21 | constexpr uint32_t getNumeric() const { return numeric; } 22 | 23 | friend constexpr bool operator==(const DateType& d1, const DateType& d2); 24 | friend constexpr bool operator!=(const DateType& d1, const DateType& d2); 25 | friend constexpr bool operator< (const DateType& d1, const DateType& d2); 26 | friend constexpr bool operator<=(const DateType& d1, const DateType& d2); 27 | friend constexpr bool operator> (const DateType& d1, const DateType& d2); 28 | friend constexpr bool operator>=(const DateType& d1, const DateType& d2); 29 | 30 | private: 31 | union { 32 | struct { 33 | uint8_t day; 34 | uint8_t month; 35 | uint16_t year; 36 | }; 37 | uint32_t numeric; 38 | }; 39 | }; 40 | 41 | inline constexpr bool operator==(const DateType& d1, const DateType& d2) { 42 | return d1.numeric == d2.numeric; 43 | } 44 | 45 | inline constexpr bool operator!=(const DateType& d1, const DateType& d2) { 46 | return d1.numeric != d2.numeric; 47 | } 48 | 49 | inline constexpr bool operator< (const DateType& d1, const DateType& d2) { 50 | return d1.numeric < d2.numeric; 51 | } 52 | 53 | inline constexpr bool operator<=(const DateType& d1, const DateType& d2) { 54 | return d1.numeric <= d2.numeric; 55 | } 56 | 57 | inline constexpr bool operator> (const DateType& d1, const DateType& d2) { 58 | return d1.numeric > d2.numeric; 59 | } 60 | 61 | inline constexpr bool operator>=(const DateType& d1, const DateType& d2) { 62 | return d1.numeric >= d2.numeric; 63 | } 64 | 65 | } 66 | 67 | #endif /* DBTOASTER_DATETYPE_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/event.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_EVENT_HPP 2 | #define DBTOASTER_EVENT_HPP 3 | 4 | #include 5 | #include "relation.hpp" 6 | #include "message.hpp" 7 | 8 | namespace dbtoaster { 9 | 10 | enum class EventType : uint32_t { 11 | kNotDefined = 0, 12 | kInsertTuple, 13 | kDeleteTuple, 14 | kBatchUpdate 15 | }; 16 | 17 | struct Event { 18 | Event() : relation_id(kInvalidRelationId), event_type(EventType::kNotDefined) { } 19 | 20 | Event(RelationId rid, EventType tp, MessageBasePtr msg) 21 | : relation_id(rid), event_type(tp), message(std::move(msg)) { } 22 | 23 | bool isEmpty() { return message == nullptr; } 24 | 25 | union { 26 | struct { 27 | RelationId relation_id; 28 | EventType event_type; 29 | }; 30 | uint64_t id; 31 | }; 32 | MessageBasePtr message; 33 | 34 | static constexpr uint64_t getId(RelationId r, EventType t) { 35 | return (static_cast(t) << 32) | r; 36 | } 37 | }; 38 | 39 | static_assert(sizeof(MessageBasePtr) + 8 == sizeof(Event), "Unexpected event type size"); 40 | 41 | } 42 | 43 | #endif /* DBTOASTER_EVENT_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/macro.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_MACRO_HPP 2 | #define DBTOASTER_MACRO_HPP 3 | 4 | #define STRING(s) #s 5 | 6 | //----------------------------------------------------------------------------- 7 | // Microsoft Visual Studio 8 | 9 | #if defined(_MSC_VER) 10 | 11 | #define INLINE inline 12 | #define FORCE_INLINE __forceinline 13 | #define NEVER_INLINE __declspec(noinline) 14 | 15 | //----------------------------------------------------------------------------- 16 | // Other compilers 17 | 18 | #else // defined(_MSC_VER) 19 | 20 | #define INLINE inline 21 | #define FORCE_INLINE inline __attribute__((always_inline)) 22 | #define NEVER_INLINE __attribute__((noinline)) 23 | 24 | #endif // !defined(_MSC_VER) 25 | 26 | #endif /* DBTOASTER_MACRO_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/map_type.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_MAP_HPP 2 | #define DBTOASTER_MAP_HPP 3 | 4 | #include 5 | #include 6 | #include 7 | #include "utils.hpp" 8 | 9 | namespace dbtoaster { 10 | 11 | template struct Index_{}; 12 | 13 | template 14 | struct EqualTo { 15 | constexpr bool operator()(const T& x, const T& y) const { 16 | return x == y; 17 | } 18 | }; 19 | 20 | template <> 21 | struct EqualTo { 22 | constexpr bool operator()(const char* x, const char* y) const { 23 | return dbtoaster::utils::stringEqual(x, y); 24 | } 25 | }; 26 | 27 | template, 29 | class ValueEqual = EqualTo> 30 | struct Map { 31 | using Pair = std::pair; 32 | 33 | const std::array data; 34 | 35 | constexpr Value get(const Key& key) const { 36 | return (find(key) != N) ? 37 | data[find(key)].second : 38 | throw std::range_error("Key not found"); 39 | } 40 | 41 | constexpr Value getOrElse(const Key& key, const Value& value) const { 42 | return (find(key) != N) ? data[find(key)].second : value; 43 | } 44 | 45 | constexpr bool exists(const Key& key) const { 46 | return find(key) != N; 47 | } 48 | 49 | constexpr bool exists(const Key& key, const Value& value) const { 50 | return find(key) != N && ValueEqual{}(data[find(key)].second, value); 51 | } 52 | 53 | private: 54 | constexpr size_t find(const Key& key) const { 55 | return find(key, Index_{}); 56 | } 57 | 58 | template 59 | constexpr size_t find(const Key& key, Index_) const { 60 | return KeyEqual{}(data[I].first, key) ? I : find(key, Index_{}); 61 | } 62 | 63 | constexpr size_t find(const Key& key, Index_<0>) const { 64 | return KeyEqual{}(data[0].first, key) ? 0 : N; 65 | } 66 | }; 67 | 68 | } 69 | 70 | #endif /* DBTOASTER_MAP_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/message.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_MESSAGE_HPP 2 | #define DBTOASTER_MESSAGE_HPP 3 | 4 | #include 5 | #include 6 | #include 7 | 8 | namespace dbtoaster { 9 | 10 | struct MessageBase { 11 | virtual ~MessageBase() = default; 12 | }; 13 | 14 | template 15 | struct Message : MessageBase { 16 | T content; 17 | }; 18 | 19 | template 20 | struct BatchMessage : MessageBase { 21 | using KVpair = std::pair; 22 | std::vector content; 23 | 24 | void append(const Key& key, const Value& value) { 25 | content.push_back(std::make_pair(key, value)); 26 | } 27 | }; 28 | 29 | typedef std::unique_ptr MessageBasePtr; 30 | 31 | } 32 | 33 | #endif /* DBTOASTER_MESSAGE_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/pardis/sc_extra.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_PARDIS_SC_EXTRA_HPP 2 | #define DBTOASTER_PARDIS_SC_EXTRA_HPP 3 | 4 | #include 5 | #include 6 | #include 7 | #include "types.hpp" 8 | #include "generic_entry.hpp" 9 | #include "aggregator.hpp" 10 | 11 | using namespace std; 12 | 13 | namespace dbtoaster { 14 | 15 | namespace pardis { 16 | 17 | #define EXPAND(x) #x 18 | #define STRINGIFY(x) EXPAND(x) 19 | #define CHECK_STAT(x) cerr << STRINGIFY(x) << " -> "; x.getBucketStats() 20 | #define GET_RUN_STAT(x, f) f << "\"" << STRINGIFY(x) << "\" : ["; x.getSizeStats(f); f << "]"; 21 | #define GET_RUN_STAT_P(x, f)\ 22 | f << "\"" << STRINGIFY(x) << "\" : [";\ 23 | partitions[0].x.getSizeStats(f);\ 24 | for(int i=1; i 5 | 6 | namespace dbtoaster { 7 | 8 | typedef uint32_t RelationId; 9 | 10 | constexpr RelationId kInvalidRelationId = std::numeric_limits::max(); 11 | 12 | enum class RelationType { kTable = 0, kStream }; 13 | 14 | struct Relation { 15 | constexpr Relation(RelationId t_id, const char* t_name, RelationType t_type) 16 | : id(t_id), name(t_name), type(t_type) { } 17 | 18 | constexpr bool isTable() const { return type == RelationType::kTable; } 19 | 20 | RelationId id; 21 | const char* name; 22 | RelationType type; 23 | }; 24 | 25 | } 26 | 27 | #endif /* DBTOASTER_RELATION_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/singleton.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_SINGLETON_HPP 2 | #define DBTOASTER_SINGLETON_HPP 3 | 4 | namespace dbtoaster { 5 | 6 | template 7 | class Singleton { 8 | public: 9 | T* acquire() { 10 | ++counter_; 11 | if (instance_ == nullptr) { 12 | instance_ = new T(); 13 | } 14 | return instance_; 15 | } 16 | 17 | void release(T* obj) { 18 | if (instance_ == obj && --counter_ == 0) { 19 | delete instance_; 20 | instance_ = nullptr; 21 | } 22 | } 23 | 24 | private: 25 | static T* instance_; 26 | static size_t counter_; 27 | }; 28 | 29 | template 30 | T* Singleton::instance_ = nullptr; 31 | 32 | template 33 | size_t Singleton::counter_ = 0; 34 | 35 | } 36 | 37 | #endif /* DBTOASTER_SINGLETON_HPP */ 38 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/source.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_SOURCE_HPP 2 | #define DBTOASTER_SOURCE_HPP 3 | 4 | #include 5 | 6 | namespace dbtoaster { 7 | 8 | struct Source { }; 9 | 10 | struct FileSource : Source { 11 | FileSource(std::string t_filename, bool t_binary = false) 12 | : filename(t_filename), binary(t_binary) { } 13 | 14 | std::string filename; 15 | bool binary; 16 | }; 17 | 18 | } 19 | 20 | #endif /* DBTOASTER_SOURCE_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/types.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_TYPES_HPP 2 | #define DBTOASTER_TYPES_HPP 3 | 4 | #include 5 | #include 6 | #include "date_type.hpp" 7 | #include "map_type.hpp" 8 | 9 | namespace dbtoaster { 10 | 11 | typedef double DoubleType; 12 | 13 | typedef std::size_t HashType; 14 | 15 | typedef std::string StringType; 16 | 17 | template 18 | using CStringMap = Map; 19 | 20 | template 21 | struct Value { 22 | static constexpr T zero = T(); 23 | static constexpr bool isZero(const T& a) { return a.isZero(); } 24 | }; 25 | 26 | template 27 | constexpr T Value::zero; 28 | 29 | template 30 | struct Value::value>::type> { 31 | static constexpr T zero = 0; 32 | static constexpr bool isZero(const T& a) { return a == 0; } 33 | }; 34 | 35 | template 36 | constexpr T Value::value>::type>::zero; 37 | 38 | template 39 | struct Value::value>::type> { 40 | static constexpr T zero = 0.0; 41 | static constexpr bool isZero(const T& a) { return a == 0.0; } 42 | }; 43 | 44 | template 45 | constexpr T Value::value>::type>::zero; 46 | 47 | } 48 | #endif /* DBTOASTER_TYPES_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/lib/utils.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_UTILS_HPP 2 | #define DBTOASTER_UTILS_HPP 3 | 4 | namespace dbtoaster { 5 | 6 | namespace utils { 7 | 8 | constexpr size_t stringLength(const char* s) { 9 | return (*s == 0) ? 0 : 1 + stringLength(s + 1); 10 | } 11 | 12 | constexpr bool stringEqual(const char* x, const char* y) { 13 | return (*x == 0 && *y == 0) || (*x == *y && stringEqual(x + 1, y + 1)); 14 | } 15 | 16 | } 17 | 18 | } 19 | 20 | #endif /* DBTOASTER_UTILS_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_driver/event.cpp: -------------------------------------------------------------------------------- 1 | #include "event.hpp" 2 | 3 | namespace dbtoaster { 4 | 5 | std::string event_name[] = { 6 | std::string("insert"), 7 | std::string("delete"), 8 | std::string("batch_update"), 9 | std::string("system_ready") 10 | }; 11 | bool compare_event_timestamp_order (event_t const & p1, event_t const & p2) 12 | { 13 | return p1.event_order < p2.event_order; 14 | } 15 | } -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_driver/event.hpp: -------------------------------------------------------------------------------- 1 | /* 2 | * event.hpp 3 | * 4 | * Created on: May 8, 2012 5 | * Author: daniel 6 | */ 7 | 8 | #ifndef DBTOASTER_EVENT_H 9 | #define DBTOASTER_EVENT_H 10 | 11 | #include 12 | #include 13 | #include 14 | 15 | namespace dbtoaster { 16 | 17 | /** 18 | * Type definitions of data-structures used for representing events. 19 | */ 20 | 21 | enum event_type { 22 | delete_tuple = 0, 23 | insert_tuple, 24 | batch_update, 25 | system_ready_event 26 | }; 27 | 28 | typedef int relation_id_t; 29 | typedef std::vector> event_args_t; 30 | 31 | extern std::string event_name[]; 32 | 33 | /** 34 | * Data-structure used for representing a event consisting of: event type, 35 | * relation identifier corresponding to the stream/table it relates to and 36 | * finally, the tuple associated with event. 37 | */ 38 | struct event_t 39 | { 40 | event_type type; 41 | relation_id_t id; 42 | unsigned int event_order; 43 | event_args_t data; 44 | 45 | event_t(const event_t& other) 46 | : type(other.type), id(other.id), event_order(other.event_order), data(other.data) 47 | {} 48 | 49 | event_t(event_type t, relation_id_t i, unsigned int ord, event_args_t& d) 50 | : type(t), id(i), event_order(ord), data(d) 51 | {} 52 | }; 53 | 54 | bool compare_event_timestamp_order (event_t const & p1, event_t const & p2); 55 | 56 | struct event_timestamp_order 57 | { 58 | bool operator()(event_t const & p1, event_t const & p2) { 59 | return compare_event_timestamp_order(p1, p2); 60 | } 61 | }; 62 | 63 | 64 | } 65 | 66 | #endif /* DBTOASTER_DBT_EVENT_H */ 67 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_driver/makefile: -------------------------------------------------------------------------------- 1 | G++ := g++ 2 | LIB_OBJ := libdriver.a 3 | TARGET:=$(shell which $(G++) &>/dev/null && echo $(LIB_OBJ) || echo warn) 4 | 5 | SRC_FILES := event.cpp \ 6 | iprogram.cpp \ 7 | program_base.cpp \ 8 | runtime.cpp \ 9 | standard_adaptors.cpp \ 10 | streams.cpp 11 | 12 | OBJ_FILES := $(patsubst %.cpp,bin/%.o,$(SRC_FILES)) 13 | 14 | all: $(TARGET) 15 | 16 | warn: $(SRC_FILES) 17 | @echo 18 | @echo "------------------------------------------------------------" 19 | @echo "Warning: C++ library will not be built: $(G++) not found!" 20 | @echo "------------------------------------------------------------" 21 | @echo 22 | 23 | 24 | $(LIB_OBJ) : $(OBJ_FILES) 25 | @echo "Linking $@" 26 | @ar cr $@ $^ 27 | 28 | $(OBJ_FILES) : bin/%.o : %.cpp $(HDR_FILES) 29 | @mkdir -p ./bin 30 | @echo Compiling $< 31 | @$(G++) -Wall -std=c++11 $(CPP_FLAGS) -I ../old_lib -O3 -o $(patsubst %.cpp,bin/%.o,$<) -c $< 32 | 33 | clean: 34 | rm -rf bin $(LIB_OBJ) 35 | 36 | .PHONY: all clean 37 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/date.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_DATE_HPP 2 | #define DBTOASTER_DATE_HPP 3 | 4 | namespace dbtoaster { 5 | 6 | struct DateType { 7 | public: 8 | DateType() : day(0), month(0), year(0) { } 9 | DateType(uint16_t y, uint8_t m, uint8_t d) : day(d), month(m), year(y) { } 10 | 11 | uint16_t getYear() const { return year; } 12 | 13 | uint8_t getMonth() const { return month; } 14 | 15 | uint8_t getDay() const { return day; } 16 | 17 | uint32_t getNumeric() const { return numeric; } 18 | 19 | friend bool operator==(const DateType& d1, const DateType& d2); 20 | friend bool operator!=(const DateType& d1, const DateType& d2); 21 | friend bool operator< (const DateType& d1, const DateType& d2); 22 | friend bool operator<=(const DateType& d1, const DateType& d2); 23 | friend bool operator> (const DateType& d1, const DateType& d2); 24 | friend bool operator>=(const DateType& d1, const DateType& d2); 25 | 26 | private: 27 | union { 28 | struct { 29 | uint8_t day; 30 | uint8_t month; 31 | uint16_t year; 32 | }; 33 | uint32_t numeric; 34 | }; 35 | 36 | }; 37 | 38 | inline bool operator==(const DateType& d1, const DateType& d2) { 39 | return d1.numeric == d2.numeric; 40 | } 41 | 42 | inline bool operator!=(const DateType& d1, const DateType& d2) { 43 | return d1.numeric != d2.numeric; 44 | } 45 | 46 | inline bool operator< (const DateType& d1, const DateType& d2) { 47 | return d1.numeric < d2.numeric; 48 | } 49 | 50 | inline bool operator<=(const DateType& d1, const DateType& d2) { 51 | return d1.numeric <= d2.numeric; 52 | } 53 | 54 | inline bool operator> (const DateType& d1, const DateType& d2) { 55 | return d1.numeric > d2.numeric; 56 | } 57 | 58 | inline bool operator>=(const DateType& d1, const DateType& d2) { 59 | return d1.numeric >= d2.numeric; 60 | } 61 | } 62 | 63 | #endif /* DBTOASTER_DATE_HPP */ 64 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/hash.cpp: -------------------------------------------------------------------------------- 1 | #include "hash.hpp" 2 | 3 | using namespace dbtoaster; 4 | 5 | namespace dbtoaster { 6 | volatile Cast c; 7 | volatile Cast cLLD; 8 | } 9 | 10 | std::hash double_hasher; 11 | std::hash string_hasher; -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/hpds/KDouble.cpp: -------------------------------------------------------------------------------- 1 | // The MIT License (MIT) 2 | 3 | // Copyright (c) 2014 Mohammad Dashti 4 | // (www.mdashti.com - mohammad.dashti [at] epfl [dot] ch - mdashti [at] gmail [dot] com) 5 | 6 | // Permission is hereby granted, free of charge, to any person obtaining a copy 7 | // of this software and associated documentation files (the "Software"), to deal 8 | // in the Software without restriction, including without limitation the rights 9 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | // copies of the Software, and to permit persons to whom the Software is 11 | // furnished to do so, subject to the following conditions: 12 | 13 | // The above copyright notice and this permission notice shall be included in all 14 | // copies or substantial portions of the Software. 15 | 16 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | // SOFTWARE. 23 | 24 | #include "KDouble.hpp" 25 | 26 | namespace dbtoaster { 27 | 28 | size_t precision = 7; // significative numbers (7 to pass r_sumdivgrp, 10 otherwise) 29 | double KDouble::diff_p = std::pow(0.1,precision); 30 | 31 | } 32 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/hpds/pstring.cpp: -------------------------------------------------------------------------------- 1 | // The MIT License (MIT) 2 | 3 | // Copyright (c) 2014 Mohammad Dashti 4 | // (www.mdashti.com - mohammad.dashti [at] epfl [dot] ch - mdashti [at] gmail [dot] com) 5 | 6 | // Permission is hereby granted, free of charge, to any person obtaining a copy 7 | // of this software and associated documentation files (the "Software"), to deal 8 | // in the Software without restriction, including without limitation the rights 9 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | // copies of the Software, and to permit persons to whom the Software is 11 | // furnished to do so, subject to the following conditions: 12 | 13 | // The above copyright notice and this permission notice shall be included in all 14 | // copies or substantial portions of the Software. 15 | 16 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | // SOFTWARE. 23 | 24 | #include "pstring.hpp" 25 | 26 | 27 | //global operators 28 | #ifdef USE_POOL 29 | CharPool<> PString::pool_; 30 | #endif //USE_POOL 31 | 32 | // to prevent 'no symbol' warning on MacOS 33 | char dummy; 34 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/hpds/pstringops.cpp: -------------------------------------------------------------------------------- 1 | #include "pstringops.hpp" 2 | 3 | std::ostream& operator<< (std::ostream& o, PString const& str) 4 | { 5 | return o << "\"" << str.c_str() << "\""; 6 | } 7 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/hpds/pstringops.hpp: -------------------------------------------------------------------------------- 1 | #include "pstring.hpp" 2 | 3 | 4 | inline bool operator==(const char *str1, const PString &str2) 5 | { 6 | return (strcmp(str1, str2.c_str()) == 0); 7 | } 8 | 9 | inline bool operator!=(const char *str1, const PString &str2) 10 | { 11 | return (strcmp(str1, str2.c_str()) != 0); 12 | } 13 | 14 | std::ostream& operator<< (std::ostream& o, PString const& str); 15 | 16 | FORCE_INLINE size_t hash_value(PString const& str) 17 | { 18 | return MurmurHash2(str.c_str(), str.length() * sizeof(char), 0); 19 | } -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/macro.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_MACRO_HPP 2 | #define DBTOASTER_MACRO_HPP 3 | 4 | #define STRING(s) #s 5 | 6 | //----------------------------------------------------------------------------- 7 | // Microsoft Visual Studio 8 | 9 | #if defined(_MSC_VER) 10 | 11 | #define INLINE inline 12 | #define FORCE_INLINE __forceinline 13 | #define NEVER_INLINE __declspec(noinline) 14 | 15 | //----------------------------------------------------------------------------- 16 | // Other compilers 17 | 18 | #else // defined(_MSC_VER) 19 | 20 | #define INLINE inline 21 | #define FORCE_INLINE inline __attribute__((always_inline)) 22 | #define NEVER_INLINE __attribute__((noinline)) 23 | 24 | #endif // !defined(_MSC_VER) 25 | 26 | #endif /* DBTOASTER_MACRO_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/makefile: -------------------------------------------------------------------------------- 1 | G++ := g++ 2 | LIB_OBJ := libdbtoaster.a 3 | TARGET:=$(shell which $(G++) &>/dev/null && echo $(LIB_OBJ) || echo warn) 4 | 5 | SRC_FILES := hash.cpp \ 6 | smhasher/MurmurHash2.cpp \ 7 | hpds/pstring.cpp \ 8 | hpds/pstringops.cpp \ 9 | hpds/KDouble.cpp \ 10 | standard_functions.cpp 11 | 12 | OBJ_FILES := $(patsubst %.cpp,bin/%.o,$(SRC_FILES)) 13 | 14 | all: $(TARGET) 15 | 16 | warn: $(SRC_FILES) 17 | @echo 18 | @echo "------------------------------------------------------------" 19 | @echo "Warning: C++ library will not be built: $(G++) not found!" 20 | @echo "------------------------------------------------------------" 21 | @echo 22 | 23 | 24 | $(LIB_OBJ) : $(OBJ_FILES) 25 | @echo "Linking $@" 26 | @ar cr $@ $^ 27 | 28 | $(OBJ_FILES) : bin/%.o : %.cpp 29 | @mkdir -p ./bin 30 | @mkdir -p ./bin/hpds 31 | @mkdir -p ./bin/smhasher 32 | @echo Compiling $< 33 | @$(G++) -Wall -std=c++11 $(CPP_FLAGS) -I . -O3 -o $(patsubst %.cpp,bin/%.o,$<) -c $< 34 | 35 | clean: 36 | rm -rf bin $(LIB_OBJ) 37 | 38 | .PHONY: all clean 39 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/misc/hashmap/internal/sparseconfig.h: -------------------------------------------------------------------------------- 1 | /* 2 | * NOTE: This file is for internal use only. 3 | * Do not use these #defines in your own program! 4 | */ 5 | 6 | /* Namespace for Google classes */ 7 | #define GOOGLE_NAMESPACE ::dbtoaster 8 | 9 | /* the location of the header defining hash functions */ 10 | #define HASH_FUN_H 11 | 12 | /* the namespace of the hash<> function */ 13 | #define HASH_NAMESPACE std::tr1 14 | 15 | /* Define to 1 if you have the header file. */ 16 | #define HAVE_INTTYPES_H 1 17 | 18 | /* Define to 1 if the system has the type `long long'. */ 19 | #define HAVE_LONG_LONG 1 20 | 21 | /* Define to 1 if you have the `memcpy' function. */ 22 | #define HAVE_MEMCPY 1 23 | 24 | /* Define to 1 if you have the header file. */ 25 | #define HAVE_STDINT_H 1 26 | 27 | /* Define to 1 if you have the header file. */ 28 | #define HAVE_SYS_TYPES_H 1 29 | 30 | /* Define to 1 if the system has the type `uint16_t'. */ 31 | #define HAVE_UINT16_T 1 32 | 33 | /* Define to 1 if the system has the type `u_int16_t'. */ 34 | #define HAVE_U_INT16_T 1 35 | 36 | /* Define to 1 if the system has the type `__uint16'. */ 37 | /* #undef HAVE___UINT16 */ 38 | 39 | /* The system-provided hash function including the namespace. */ 40 | #define SPARSEHASH_HASH HASH_NAMESPACE::hash 41 | 42 | /* Stops putting the code inside the Google namespace */ 43 | #define _END_GOOGLE_NAMESPACE_ } 44 | 45 | /* Puts following code inside the Google namespace */ 46 | #define _START_GOOGLE_NAMESPACE_ namespace dbtoaster { 47 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/misc/run-benchHashCmp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | rm -f ./mmap 4 | g++ -O3 -std=c++17 benchHashCmp.cpp ../hash.cpp -I .. -o benchHashCmp && ./benchHashCmp -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/misc/run-mmap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | rm -f ./mmap 4 | g++ -Wall -O3 -std=c++17 -I .. -I/usr/local/include -L/usr/local/lib mmap.cpp -o mmap && ./mmap -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/misc/util.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_UTIL_H 2 | #define DBTOASTER_UTIL_H 3 | 4 | #include 5 | 6 | namespace dbtoaster { 7 | namespace util { 8 | // Misc function object helpers. 9 | struct fold_hash { 10 | typedef std::size_t result_type; 11 | template 12 | std::size_t operator()(std::size_t current, const T& arg) { 13 | hash_combine(current, arg); 14 | return(current); 15 | } 16 | }; 17 | } 18 | 19 | template 20 | std::list singleton(T elem) { 21 | std::list sing; 22 | sing.push_back(elem); 23 | return sing; 24 | } 25 | } 26 | 27 | #endif 28 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/sc/ScExtra.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef SCEXTRA_H 3 | #define SCEXTRA_H 4 | #include 5 | #include 6 | #include 7 | #include "types.hpp" 8 | #include "GenericEntry.hpp" 9 | #include "Aggregator.hpp" 10 | #ifdef CONCURRENT 11 | #include "types.h" 12 | #include "Version.h" 13 | #endif 14 | 15 | using namespace std; 16 | using namespace dbtoaster; 17 | 18 | #define EXPAND(x) #x 19 | #define STRINGIFY(x) EXPAND(x) 20 | #define CHECK_STAT(x) cerr << STRINGIFY(x) << " -> "; x.getBucketStats() 21 | #define GET_RUN_STAT(x, f) f << "\"" << STRINGIFY(x) << "\" : ["; x.getSizeStats(f); f << "]"; 22 | #define GET_RUN_STAT_P(x, f)\ 23 | f << "\"" << STRINGIFY(x) << "\" : [";\ 24 | partitions[0].x.getSizeStats(f);\ 25 | for(int i=1; i 5 | 6 | class __attribute__((aligned(64))) SpinLock { 7 | std::atomic_flag lock_; 8 | public: 9 | 10 | SpinLock() { 11 | lock_.clear(); 12 | } 13 | 14 | inline void lock() { 15 | while (lock_.test_and_set(std::memory_order_acquire)); 16 | } 17 | 18 | inline void unlock() { 19 | lock_.clear(std::memory_order_release); 20 | } 21 | 22 | inline bool try_lock() { 23 | return !lock_.test_and_set(std::memory_order_acquire); 24 | } 25 | 26 | }; 27 | 28 | #endif 29 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/sc/Transaction.h: -------------------------------------------------------------------------------- 1 | #ifndef TRANSACTION_H 2 | #define TRANSACTION_H 3 | #include "types.h" 4 | 5 | struct ALIGN Transaction { 6 | VBase* undoBufferHead; 7 | PRED* predicateHead; 8 | static TransactionManager& tm; 9 | timestamp startTS; 10 | volatile timestamp commitTS; 11 | Transaction * prevCommitted; 12 | uint8_t threadId; 13 | 14 | uint8_t ptype; 15 | Transaction* failedBecauseOf; 16 | Transaction() { 17 | failedBecauseOf = nullptr; 18 | threadId = 0; 19 | commitTS = initCommitTS; 20 | undoBufferHead = nullptr; 21 | predicateHead = nullptr; 22 | prevCommitted = nullptr; 23 | } 24 | 25 | void reset() { 26 | threadId = 0; 27 | commitTS = initCommitTS; 28 | undoBufferHead = nullptr; 29 | predicateHead = nullptr; 30 | prevCommitted = nullptr; 31 | } 32 | }; 33 | 34 | #endif /* TRANSACTION_H */ 35 | 36 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/sc/mmap.hpp: -------------------------------------------------------------------------------- 1 | #ifdef SC_GENERATED //using SC 2 | #include "ScExtra.h" 3 | 4 | #ifdef CONCURRENT 5 | #include "cmmap.hpp" // For SC concurrent CPP 6 | #else 7 | #include "mmap2.hpp" // For SC CPP 8 | #endif 9 | #endif 10 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/smhasher/MurmurHash2.hpp: -------------------------------------------------------------------------------- 1 | //----------------------------------------------------------------------------- 2 | // MurmurHash2 was written by Austin Appleby, and is placed in the public 3 | // domain. The author hereby disclaims copyright to this source code. 4 | 5 | #ifndef _MURMURHASH2_H_ 6 | #define _MURMURHASH2_H_ 7 | 8 | //----------------------------------------------------------------------------- 9 | // Platform-specific functions and macros 10 | 11 | // Microsoft Visual Studio 12 | 13 | #if defined(_MSC_VER) && (_MSC_VER < 1600) 14 | 15 | typedef unsigned char uint8_t; 16 | typedef unsigned int uint32_t; 17 | typedef unsigned __int64 uint64_t; 18 | 19 | // Other compilers 20 | 21 | #else // defined(_MSC_VER) 22 | 23 | #include 24 | 25 | #endif // !defined(_MSC_VER) 26 | 27 | //----------------------------------------------------------------------------- 28 | 29 | uint32_t MurmurHash2 ( const void * key, int len, uint32_t seed ); 30 | uint64_t MurmurHash64A ( const void * key, int len, uint64_t seed ); 31 | uint64_t MurmurHash64B ( const void * key, int len, uint64_t seed ); 32 | uint32_t MurmurHash2A ( const void * key, int len, uint32_t seed ); 33 | uint32_t MurmurHashNeutral2 ( const void * key, int len, uint32_t seed ); 34 | uint32_t MurmurHashAligned2 ( const void * key, int len, uint32_t seed ); 35 | 36 | //----------------------------------------------------------------------------- 37 | 38 | #endif // _MURMURHASH2_H_ 39 | 40 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/smhasher/MurmurHash3.hpp: -------------------------------------------------------------------------------- 1 | //----------------------------------------------------------------------------- 2 | // MurmurHash3 was written by Austin Appleby, and is placed in the public 3 | // domain. The author hereby disclaims copyright to this source code. 4 | 5 | #ifndef _MURMURHASH3_H_ 6 | #define _MURMURHASH3_H_ 7 | 8 | // Check windows 9 | #if _WIN32 || _WIN64 10 | #if _WIN64 11 | #define ENV64BIT 12 | #else 13 | #define ENV32BIT 14 | #endif 15 | #endif 16 | 17 | // Check GCC 18 | #if __GNUC__ 19 | #if __x86_64__ || __ppc64__ 20 | #define ENV64BIT 21 | #else 22 | #define ENV32BIT 23 | #endif 24 | #endif 25 | 26 | //----------------------------------------------------------------------------- 27 | // Platform-specific functions and macros 28 | 29 | // Microsoft Visual Studio 30 | 31 | #if defined(_MSC_VER) && (_MSC_VER < 1600) 32 | 33 | typedef unsigned char uint8_t; 34 | typedef unsigned int uint32_t; 35 | typedef unsigned __int64 uint64_t; 36 | 37 | // Other compilers 38 | 39 | #else // defined(_MSC_VER) 40 | 41 | #include 42 | 43 | #endif // !defined(_MSC_VER) 44 | 45 | //----------------------------------------------------------------------------- 46 | void MurmurHash3_x86_32 ( const void * key, int len, uint32_t seed, void * out ); 47 | 48 | #ifndef ENV64BIT 49 | void MurmurHash3_128 ( const void * key, int len, uint32_t seed, void * out ); 50 | #else 51 | void MurmurHash3_128 ( const void * key, int len, uint32_t seed, void * out ); 52 | #endif 53 | //----------------------------------------------------------------------------- 54 | 55 | #endif // _MURMURHASH3_H_ 56 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/smhasher/PMurHash.cpp: -------------------------------------------------------------------------------- 1 | #include "PMurHash.hpp" 2 | 3 | std::hash long_hasher; 4 | std::hash double_hasher; 5 | std::hash string_hasher; 6 | 7 | FORCE_INLINE void hash_combine(std::size_t& seed, const long& v) 8 | { 9 | seed ^= long_hasher(v) + 0x9e3779b9 + (seed<<6) + (seed>>2); 10 | } 11 | FORCE_INLINE void hash_combine(std::size_t& seed, const double& v) 12 | { 13 | seed ^= double_hasher(v) + 0x9e3779b9 + (seed<<6) + (seed>>2); 14 | } 15 | FORCE_INLINE void hash_combine(std::size_t& seed, const std::string& v) 16 | { 17 | seed ^= string_hasher(v) + 0x9e3779b9 + (seed<<6) + (seed>>2); 18 | } 19 | -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/source.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_SOURCE_HPP 2 | #define DBTOASTER_SOURCE_HPP 3 | 4 | namespace dbtoaster { 5 | template struct Schema { }; 6 | 7 | enum class SourceType { kTable = 0, kStream }; 8 | 9 | struct Source { 10 | const char* name; 11 | const SourceType type; 12 | constexpr Source(const char* _name, SourceType _type) : name(_name), type(_type) { } 13 | bool isTable() const { return type == SourceType::kTable; } 14 | }; 15 | 16 | struct FileSource : Source { 17 | const char* path; 18 | const bool binary; 19 | constexpr FileSource(const char* _name, SourceType _type, const char* _path, bool _binary) 20 | : Source(_name, _type), path(_path), binary(_binary) { } 21 | }; 22 | 23 | struct CSVFileSource : FileSource { 24 | const char delimiter; 25 | constexpr CSVFileSource(const char* _name, SourceType _type, const char* _path, char _delimiter) 26 | : FileSource(_name, _type, _path, false), delimiter(_delimiter) { } 27 | }; 28 | 29 | struct OrderbookFileSource : CSVFileSource { 30 | // TODO: unified stream 31 | }; 32 | } 33 | 34 | #endif /* DBTOASTER_SOURCE_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/srccpp/old_lib/types.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_TYPES_H 2 | #define DBTOASTER_TYPES_H 3 | 4 | #include 5 | #include 6 | 7 | // #include "hpds/pstring.hpp" 8 | // #include "hpds/pstringops.hpp" 9 | // #include "hpds/KDouble.hpp" 10 | 11 | #include "date.hpp" 12 | 13 | namespace dbtoaster { 14 | 15 | typedef double DoubleType; 16 | 17 | typedef size_t HashType; 18 | 19 | typedef std::string StringType; 20 | 21 | } 22 | 23 | #endif /* DBTOASTER_TYPES_HPP */ -------------------------------------------------------------------------------- /ddbtoaster/test/cpp/htest.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include "hashmap/dense_hash_map" 6 | 7 | using namespace std; 8 | using std::tr1::hash; 9 | using dbtoaster::dense_hash_map; // namespace where class lives by default 10 | 11 | struct eqstr 12 | { 13 | bool operator()(const char* s1, const char* s2) const 14 | { 15 | if (s1 && s2) 16 | return strcmp(s1, s2) == 0; 17 | else 18 | return s1 == NULL && s2 == NULL; 19 | } 20 | }; 21 | 22 | int main() 23 | { 24 | dense_hash_map, eqstr> months; 25 | 26 | months.set_empty_key(NULL); 27 | months["january"] = 31; 28 | months["february"] = 28; 29 | months["march"] = 31; 30 | months["april"] = 30; 31 | months["may"] = 31; 32 | months["june"] = 30; 33 | months["july"] = 31; 34 | months["august"] = 31; 35 | months["september"] = 30; 36 | months["october"] = 31; 37 | months["november"] = 30; 38 | months["december"] = 31; 39 | 40 | cout << "september -> " << months["september"] << endl; 41 | cout << "april -> " << months["april"] << endl; 42 | cout << "june -> " << months["june"] << endl; 43 | cout << "november -> " << months["november"] << endl; 44 | 45 | return 0; 46 | } -------------------------------------------------------------------------------- /ddbtoaster/test/cpp/run-htest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cd ../.. 4 | 5 | rm -f target/htest 6 | 7 | BOOST_LIB="/usr/local/Cellar/boost/1.55.0" 8 | 9 | g++ test/cpp/htest.cpp -o target/htest -O3 -lpthread -ldbtoaster -Isrccpp/lib -Lsrccpp/lib -lboost_program_options-mt -lboost_serialization-mt -lboost_system-mt -lboost_filesystem-mt -lboost_chrono-mt -lboost_thread-mt -I$BOOST_LIB/include -L$BOOST_LIB/lib 10 | 11 | target/htest -------------------------------------------------------------------------------- /ddbtoaster/test/cpp/run-sparse-htest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cd ../.. 4 | 5 | rm -f target/sparse_htest 6 | 7 | BOOST_LIB="/usr/local/Cellar/boost/1.55.0" 8 | 9 | g++ test/cpp/sparse_htest.cpp -o target/sparse_htest -O3 -lpthread -ldbtoaster -Isrccpp/lib -Lsrccpp/lib -lboost_program_options-mt -lboost_serialization-mt -lboost_system-mt -lboost_filesystem-mt -lboost_chrono-mt -lboost_thread-mt -I$BOOST_LIB/include -L$BOOST_LIB/lib 10 | 11 | target/sparse_htest -------------------------------------------------------------------------------- /ddbtoaster/test/cpp/run-time_hash_map.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cd ../.. 4 | 5 | echo "Moved to project root folder." 6 | 7 | rm -f target/time_hash_map 8 | 9 | echo "Cleaned." 10 | 11 | BOOST_LIB="/usr/local/Cellar/boost/1.55.0" 12 | 13 | g++ test/cpp/time_hash_map.cc -o target/time_hash_map -O3 -lpthread -ldbtoaster -Isrccpp/lib -Lsrccpp/lib -lboost_program_options-mt -lboost_serialization-mt -lboost_system-mt -lboost_filesystem-mt -lboost_chrono-mt -lboost_thread-mt -I$BOOST_LIB/include -L$BOOST_LIB/lib 14 | 15 | echo "Compiled" 16 | 17 | target/time_hash_map 18 | 19 | echo "Finished exec." -------------------------------------------------------------------------------- /ddbtoaster/test/cpp/sparse_htest.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include "hashmap/sparse_hash_map" 6 | 7 | using namespace std; 8 | using std::tr1::hash; 9 | using dbtoaster::sparse_hash_map; // namespace where class lives by default 10 | 11 | struct eqstr 12 | { 13 | bool operator()(const char* s1, const char* s2) const 14 | { 15 | if (s1 && s2) 16 | return strcmp(s1, s2) == 0; 17 | else 18 | return s1 == NULL && s2 == NULL; 19 | } 20 | }; 21 | 22 | int main() 23 | { 24 | sparse_hash_map, eqstr> months; 25 | 26 | // months.set_empty_key(NULL); 27 | months["january"] = 31; 28 | months["february"] = 28; 29 | months["march"] = 31; 30 | months["april"] = 30; 31 | months["may"] = 31; 32 | months["june"] = 30; 33 | months["july"] = 31; 34 | months["august"] = 31; 35 | months["september"] = 30; 36 | months["october"] = 31; 37 | months["november"] = 30; 38 | months["december"] = 31; 39 | 40 | cout << "february -> " << months["february"] << endl; 41 | cout << "april -> " << months["april"] << endl; 42 | cout << "july -> " << months["july"] << endl; 43 | cout << "november -> " << months["november"] << endl; 44 | 45 | return 0; 46 | } -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.7 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | resolvers += Resolver.sonatypeRepo("snapshots") 2 | 3 | addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.5.1") 4 | 5 | addSbtPlugin("ch.epfl.data" % "sc-purgatory-plugin" % "0.1.4-SNAPSHOT") 6 | -------------------------------------------------------------------------------- /runtime/build.sbt: -------------------------------------------------------------------------------- 1 | Seq( 2 | // --------- Project informations 3 | name := "Runtime", 4 | 5 | // --------- Paths 6 | scalaSource in Compile <<= baseDirectory / "tpcc", 7 | javaSource in Compile <<= baseDirectory / "tpcc", 8 | sourceDirectory in Compile <<= baseDirectory / "tpcc", 9 | 10 | // --------- Execution options 11 | fork := true, // required to enable javaOptions 12 | javaOptions ++= Seq("-Xss128m","-XX:-DontCompileHugeMethods","-XX:+CMSClassUnloadingEnabled"), 13 | javaOptions ++= Seq("-Xmx4G","-Xms1G"/*,"-verbose:gc"*/), parallelExecution in Test := false, // for large benchmarks 14 | javaOptions <+= (fullClasspath in Runtime) map (cp => "-Dsbt.classpath=" + cp.files.absString) // propagate paths 15 | ) 16 | 17 | libraryDependencies ++= Seq( 18 | "org.slf4j" % "slf4j-api" % "1.7.30" 19 | ) -------------------------------------------------------------------------------- /runtime/conf/tpcc.properties: -------------------------------------------------------------------------------- 1 | # TPC-C configuration properties 2 | # 3 | # See the README.txt first! 4 | # 5 | 6 | ############################################################ 7 | ## Number of warehouses (used both for data loading and ## 8 | ## running the benchmark). ## 9 | ############################################################ 10 | 11 | WAREHOUSECOUNT=1 12 | 13 | ############################################################ 14 | ## Data generation / loading properties ## 15 | ############################################################ 16 | 17 | # Data can be generated to tab delimited text files 18 | # suitable for loading into MySQL with LOAD DATA INFILE 19 | # or can be loaded directly via JDBC 20 | #MODE=FILE 21 | MODE=JDBC 22 | 23 | # For FILE mode, specify the output directory for the files. 24 | # Typically it is easiest to generate directly to the MySQL 25 | # database directory 26 | OUTPUTDIR=output 27 | 28 | # Specify which shard to load data for and the total number 29 | # of shards. Data is sharded based on warehouse ID. 30 | SHARDID=1 31 | SHARDCOUNT=1 32 | 33 | ############################################################ 34 | ## Database connection details used for loading data in ## 35 | ## JDBC mode and also used for running the benchmarks. ## 36 | ############################################################ 37 | 38 | # MySQL 39 | DRIVER=com.mysql.jdbc.Driver 40 | JDBCURL=jdbc:mysql://localhost:3306/tpcctest 41 | JDBCFETCHSIZE=-2147483648 42 | 43 | # dbShards 44 | #DRIVER=com.dbshards.jdbc.Driver 45 | #JDBCURL=jdbc:dbshards://tpcc/client 46 | 47 | # Credentials 48 | USER=root 49 | PASSWORD=ROOT 50 | #ROOT 51 | 52 | ############################################################ 53 | # Settings used for Tpcc benchmark only 54 | ############################################################ 55 | CONNECTIONS=1 56 | RAMPUPTIME=4 57 | DURATION=13 58 | 59 | -------------------------------------------------------------------------------- /runtime/conf/tpcc.properties.example: -------------------------------------------------------------------------------- 1 | # TPC-C configuration properties 2 | # 3 | # See the README.txt first! 4 | # 5 | 6 | ############################################################ 7 | ## Number of warehouses (used both for data loading and ## 8 | ## running the benchmark). ## 9 | ############################################################ 10 | 11 | WAREHOUSECOUNT=1 12 | 13 | ############################################################ 14 | ## Data generation / loading properties ## 15 | ############################################################ 16 | 17 | # Data can be generated to tab delimited text files 18 | # suitable for loading into MySQL with LOAD DATA INFILE 19 | # or can be loaded directly via JDBC 20 | #MODE=FILE 21 | MODE=JDBC 22 | 23 | # For FILE mode, specify the output directory for the files. 24 | # Typically it is easiest to generate directly to the MySQL 25 | # database directory 26 | OUTPUTDIR=output 27 | 28 | # Specify which shard to load data for and the total number 29 | # of shards. Data is sharded based on warehouse ID. 30 | SHARDID=1 31 | SHARDCOUNT=1 32 | 33 | ############################################################ 34 | ## Database connection details used for loading data in ## 35 | ## JDBC mode and also used for running the benchmarks. ## 36 | ############################################################ 37 | 38 | # MySQL 39 | DRIVER=com.mysql.jdbc.Driver 40 | JDBCURL=jdbc:mysql://localhost:3306/tpcctest 41 | JDBCFETCHSIZE=-2147483648 42 | 43 | # dbShards 44 | #DRIVER=com.dbshards.jdbc.Driver 45 | #JDBCURL=jdbc:dbshards://tpcc/client 46 | 47 | # Credentials 48 | USER=root 49 | PASSWORD= 50 | #ROOT 51 | 52 | ############################################################ 53 | # Settings used for Tpcc benchmark only 54 | ############################################################ 55 | CONNECTIONS=1 56 | RAMPUPTIME=4 57 | DURATION=4 58 | 59 | -------------------------------------------------------------------------------- /runtime/microbench/MicroBench.scala: -------------------------------------------------------------------------------- 1 | 2 | package ddbt.micro 3 | import ddbt.lib.store._ 4 | 5 | case class SEntry4_DDSD(var _1: Double, var _2: Double, var _3: String, var _4: Double) extends Entry(4) {def this() = this(-1.7976931348623157E308, -1.7976931348623157E308, null, -1.7976931348623157E308) ; def copy = SEntry4_DDSD(_1, _2, _3, _4); override def copyFrom(e: Entry) = { val that = e.asInstanceOf[SEntry4_DDSD]; _1 = that._1;_2 = that._2;_3 = that._3;_4 = that._4} } 6 | class SCExecutor { 7 | 8 | 9 | val x366 = Array[EntryIdx[SEntry4_DDSD]](SEntry4_DDSD_Idx1) 10 | val customerTbl = new Store[SEntry4_DDSD](1, x366); 11 | val customerTblIdx0 = customerTbl.index(0, IHash, true, -1) 12 | 13 | 14 | object SEntry4_DDSD_Idx1 extends EntryIdx[SEntry4_DDSD] { 15 | override def hash(x339 : SEntry4_DDSD) = { 16 | var x340: Int = 0; 17 | val x341 = x340; 18 | x340 = (x341.^((((((x339._1).hashCode()).+(-1640531527)).+((x341.<<(6)))).+((x341.>>(2)))))) 19 | val x351 = x340; 20 | x351 21 | } 22 | override def cmp(x353 : SEntry4_DDSD , x354 : SEntry4_DDSD) = { 23 | var x355: Int = 0; 24 | if(((x353._1).==((x354._1)))) { 25 | x355 = 0 26 | } else { 27 | x355 = 1 28 | } 29 | val x362 = x355; 30 | x362 31 | } 32 | } 33 | 34 | class fun1(customerTbl : Store[SEntry4_DDSD]) extends (() => Int ) { 35 | def apply() = { 36 | var x148: Int = 0; 37 | customerTbl.foreach(({ x149: SEntry4_DDSD => { 38 | val x152 = (x149._4).-((x149._3)); 39 | val x413 = SEntry2_DS(x152, (x149._2)); 40 | if((x152.<((1000.toDouble)))) { 41 | val x168 = x148; 42 | x148 = (x168.+(1)) 43 | } else { 44 | } 45 | () 46 | } 47 | })) 48 | val x174 = x148; 49 | var x29: Int = x174; 50 | 1 51 | } 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /runtime/microbench/README.txt: -------------------------------------------------------------------------------- 1 | Compiling generated MicroBench.cpp 2 | 3 | g++ -O3 MicroBench.cpp -I ../tpcc/pardisgen/include -DPROJECT_ROOT=\"\" 4 | -------------------------------------------------------------------------------- /runtime/tpcc/README.txt: -------------------------------------------------------------------------------- 1 | ddbt.tpcc.tx1: contains base implementation for TPC-C 2 | ddbt.tpcc.tx2: implementation with indices on key (by adding slice operation on maps) 3 | ddbt.tpcc.tx3: removing duplicate accesses to an element in the HashMap (for reading and then updating a tuple) 4 | ddbt.tpcc.tx4: implementation with indices on the combination of key and value (for more efficient slice operations) 5 | ddbt.tpcc.tx5: specific data-structures for Min-Max queries (using SortedSet instead of HashSet) -------------------------------------------------------------------------------- /runtime/tpcc/database/add_fkey_idx.sql: -------------------------------------------------------------------------------- 1 | SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; 2 | 3 | 4 | CREATE INDEX idx_customer ON customer (c_w_id,c_d_id,c_last,c_first); 5 | CREATE INDEX idx_orders ON orders (o_w_id,o_d_id,o_c_id,o_id); 6 | CREATE INDEX fkey_stock_2 ON stock (s_i_id); 7 | CREATE INDEX fkey_order_line_2 ON order_line (ol_supply_w_id,ol_i_id); 8 | 9 | ALTER TABLE district ADD CONSTRAINT fkey_district_1 FOREIGN KEY(d_w_id) REFERENCES warehouse(w_id); 10 | ALTER TABLE customer ADD CONSTRAINT fkey_customer_1 FOREIGN KEY(c_w_id,c_d_id) REFERENCES district(d_w_id,d_id); 11 | ALTER TABLE history ADD CONSTRAINT fkey_history_2 FOREIGN KEY(h_w_id,h_d_id) REFERENCES district(d_w_id,d_id); 12 | ALTER TABLE new_orders ADD CONSTRAINT fkey_new_orders_1 FOREIGN KEY(no_w_id,no_d_id,no_o_id) REFERENCES orders(o_w_id,o_d_id,o_id); 13 | ALTER TABLE orders ADD CONSTRAINT fkey_orders_1 FOREIGN KEY(o_w_id,o_d_id,o_c_id) REFERENCES customer(c_w_id,c_d_id,c_id); 14 | ALTER TABLE order_line ADD CONSTRAINT fkey_order_line_1 FOREIGN KEY(ol_w_id,ol_d_id,ol_o_id) REFERENCES orders(o_w_id,o_d_id,o_id); 15 | ALTER TABLE stock ADD CONSTRAINT fkey_stock_1 FOREIGN KEY(s_w_id) REFERENCES warehouse(w_id); 16 | ALTER TABLE stock ADD CONSTRAINT fkey_stock_2 FOREIGN KEY(s_i_id) REFERENCES item(i_id); 17 | 18 | 19 | #NOTE: the following FKs are not shard-safe since they can reference a warehouse in another shard 20 | #ALTER TABLE order_line ADD CONSTRAINT fkey_order_line_2 FOREIGN KEY(ol_supply_w_id,ol_i_id) REFERENCES stock(s_w_id,s_i_id); 21 | #ALTER TABLE history ADD CONSTRAINT fkey_history_1 FOREIGN KEY(h_c_w_id,h_c_d_id,h_c_id) REFERENCES customer(c_w_id,c_d_id,c_id); 22 | 23 | 24 | SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; 25 | SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; 26 | -------------------------------------------------------------------------------- /runtime/tpcc/database/load_data.sql: -------------------------------------------------------------------------------- 1 | LOAD DATA INFILE 'warehouse.txt' INTO TABLE warehouse; 2 | LOAD DATA INFILE 'district.txt' INTO TABLE district; 3 | LOAD DATA INFILE 'customer.txt' INTO TABLE customer; 4 | LOAD DATA INFILE 'history.txt' INTO TABLE history; 5 | LOAD DATA INFILE 'new_orders.txt' INTO TABLE new_orders; 6 | LOAD DATA INFILE 'orders.txt' INTO TABLE orders; 7 | LOAD DATA INFILE 'order_line.txt' INTO TABLE order_line; 8 | LOAD DATA INFILE 'item.txt' INTO TABLE item; 9 | LOAD DATA INFILE 'stock.txt' INTO TABLE stock; 10 | -------------------------------------------------------------------------------- /runtime/tpcc/itx/ITpccInMemTx.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.itx 2 | 3 | import java.util.Date 4 | import ddbt.tpcc.tx.TpccTable 5 | 6 | /** 7 | * NewOrder Transaction for TPC-C Benchmark 8 | * 9 | * @author Mohammad Dashti 10 | */ 11 | trait IInMemoryTx { self => 12 | def setSharedData(db:AnyRef): self.type 13 | } 14 | 15 | class InMemoryTxImpl extends IInMemoryTx { 16 | var SharedData:TpccTable = null 17 | 18 | override def setSharedData(db:AnyRef) = { 19 | SharedData = db.asInstanceOf[TpccTable] 20 | this 21 | } 22 | } 23 | 24 | trait INewOrderInMem extends INewOrder with IInMemoryTx 25 | trait IPaymentInMem extends IPayment with IInMemoryTx 26 | trait IOrderStatusInMem extends IOrderStatus with IInMemoryTx 27 | trait IDeliveryInMem extends IDelivery with IInMemoryTx 28 | trait IStockLevelInMem extends IStockLevel with IInMemoryTx 29 | -------------------------------------------------------------------------------- /runtime/tpcc/itx/ITpccTx.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.itx 2 | 3 | import java.util.Date 4 | 5 | /** 6 | * NewOrder Transaction for TPC-C Benchmark 7 | * 8 | * @author Mohammad Dashti 9 | */ 10 | trait INewOrder { 11 | def newOrderTx(datetime:Date, t_num: Int, w_id:Int, d_id:Int, c_id:Int, o_ol_count:Int, o_all_local:Int, itemid:Array[Int], supware:Array[Int], quantity:Array[Int], price:Array[Double], iname:Array[String], stock:Array[Int], bg:Array[String], amt:Array[Double]): Int 12 | } 13 | 14 | /** 15 | * Payment Transaction for TPC-C Benchmark 16 | * 17 | * @author Mohammad Dashti 18 | */ 19 | trait IPayment { 20 | def paymentTx(datetime:Date, t_num: Int, w_id: Int, d_id: Int, c_by_name: Int, c_w_id: Int, c_d_id: Int, c_id: Int, c_last: String, h_amount: Double):Int 21 | } 22 | 23 | /** 24 | * OrderStatus Transaction for TPC-C Benchmark 25 | * 26 | * @author Mohammad Dashti 27 | */ 28 | trait IOrderStatus { 29 | def orderStatusTx(datetime:Date, t_num: Int, w_id: Int, d_id: Int, c_by_name: Int, c_id: Int, c_last: String):Int 30 | } 31 | 32 | /** 33 | * Delivery Transaction for TPC-C Benchmark 34 | * 35 | * @author Mohammad Dashti 36 | */ 37 | trait IDelivery { 38 | def deliveryTx(datetime:Date, w_id: Int, o_carrier_id: Int): Int 39 | } 40 | 41 | /** 42 | * StockLevel Transaction for TPC-C Benchmark 43 | * 44 | * @author Mohammad Dashti 45 | */ 46 | trait IStockLevel { 47 | def stockLevelTx(t_num: Int, w_id: Int, d_id: Int, threshold: Int):Int 48 | } 49 | 50 | -------------------------------------------------------------------------------- /runtime/tpcc/lib/SuperIndex: -------------------------------------------------------------------------------- 1 | 2 | package ddbt.tpcc.lib 3 | 4 | import SHMap._ 5 | 6 | class SIndexEntry[K,V] { 7 | val s:SHSet[SEntry[K,V]] = new SHSet[SEntry[K,V]] 8 | 9 | def foreach(f: ((K, V)) => Unit): Unit = s.foreach(e => f(e.key, e.value)) 10 | 11 | def foreachEntry(f: SEntry[SEntry[K,V], Boolean] => Unit): Unit = s.foreachEntry(e => f(e)) 12 | } 13 | 14 | class SIndex[P,K,V](val proj:(K,V)=>P) { 15 | val idx = new SHMap[P,SIndexEntry[K,V]] 16 | 17 | def set(entry: SEntry[K,V]):Unit = { 18 | val p:P = proj(entry.key, entry.value) 19 | val s = idx.getNullOnNotFound(p) 20 | if (s==null) { 21 | val newIdx = new SIndexEntry[K,V] 22 | newIdx.s.add(entry) 23 | idx.put(p,newIdx) 24 | } else { 25 | s.s.add(entry) 26 | } 27 | } 28 | 29 | def del(entry: SEntry[K,V]):Unit = del(entry, entry.value) 30 | 31 | def del(entry: SEntry[K,V], v:V):Unit = { 32 | val p:P = proj(entry.key, v) 33 | val s=idx.getNullOnNotFound(p) 34 | if (s!=null) { 35 | s.s.remove(entry) 36 | if (s.s.size==0) idx.remove(p) 37 | } 38 | } 39 | 40 | def slice(part:P):SIndexEntry[K,V] = idx.getNullOnNotFound(part) match { 41 | case null => new SIndexEntry[K,V] 42 | case s=>s 43 | } 44 | 45 | def clear:Unit = idx.clear 46 | } 47 | -------------------------------------------------------------------------------- /runtime/tpcc/lib/SuperIndex.scala: -------------------------------------------------------------------------------- 1 | 2 | package ddbt.tpcc.lib 3 | 4 | import SIndex._ 5 | 6 | object SIndex { 7 | val EMPTY_INDEX_ENTRY = new SIndexEntry[Any,Any] 8 | } 9 | 10 | class SIndexEntry[K,V] { 11 | val s:SHSet[SEntry[K,V]] = new SHSet[SEntry[K,V]] 12 | 13 | def foreach(f: ((K, V)) => Unit): Unit = s.foreach(e => f(e.key, e.value)) 14 | 15 | def foreachEntry(f: SEntry[SEntry[K,V], Boolean] => Unit): Unit = s.foreachEntry(e => f(e)) 16 | } 17 | 18 | class SIndex[P,K,V](val proj:(K,V)=>P, loadFactor: Float, initialCapacity: Int) { 19 | 20 | val idx = new SHMap[P,SIndexEntry[K,V]](loadFactor, initialCapacity) 21 | 22 | def set(entry: SEntry[K,V]):Unit = { 23 | val p:P = proj(entry.key, entry.value) 24 | val s = idx.getNullOnNotFound(p) 25 | if (s==null) { 26 | val newIdx = new SIndexEntry[K,V] 27 | newIdx.s.add(entry) 28 | idx.put(p,newIdx) 29 | } else { 30 | s.s.add(entry) 31 | } 32 | } 33 | 34 | def del(entry: SEntry[K,V]):Unit = del(entry, entry.value) 35 | 36 | def del(entry: SEntry[K,V], v:V):Unit = { 37 | val p:P = proj(entry.key, v) 38 | val s=idx.getNullOnNotFound(p) 39 | if (s!=null) { 40 | s.s.remove(entry) 41 | if (s.s.size==0) idx.remove(p) 42 | } 43 | } 44 | 45 | def slice(part:P):SIndexEntry[K,V] = idx.getNullOnNotFound(part) match { 46 | case null => EMPTY_INDEX_ENTRY.asInstanceOf[SIndexEntry[K,V]] 47 | case s=>s 48 | } 49 | 50 | def clear:Unit = idx.clear 51 | } 52 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/AbortedTransactionException.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest 2 | 3 | class AbortedTransactionException(val message: String) extends Exception { 4 | def this() = this("") 5 | } 6 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/Counter.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest 2 | 3 | 4 | class Counter { 5 | 6 | private var count: Long = 0 7 | 8 | def increment(): Long = synchronized { 9 | count 10 | } 11 | 12 | def get(): Long = synchronized { 13 | count 14 | } 15 | 16 | def reset(): Long = { 17 | synchronized { 18 | val ret = count 19 | count = 0 20 | ret 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/DatabaseConnector.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest 2 | 3 | import java.io.File 4 | import java.io.FileInputStream 5 | import java.io.IOException 6 | import java.sql.Connection 7 | import java.sql.DriverManager 8 | import java.sql.SQLException 9 | // import java.text.DecimalFormat 10 | // import java.util.Map 11 | import java.util.Properties 12 | // import java.util.Set 13 | // import TpccThread._ 14 | import scala.collection.JavaConversions._ 15 | 16 | import org.slf4j.LoggerFactory 17 | import org.slf4j.Logger 18 | 19 | object DatabaseConnector { 20 | 21 | private val dbcLogger = LoggerFactory.getLogger(classOf[Driver]) 22 | 23 | def connectToDB(driverClassName:String, jdbcUrl:String, db_user:String, db_password:String): Connection = { 24 | DatabaseConnector.dbcLogger.info("Connection to database: driver: " + driverClassName + 25 | " url: " + 26 | jdbcUrl) 27 | Class.forName(driverClassName) 28 | val prop = new Properties() 29 | val connPropFile = new File("conf/jdbc-connection.properties") 30 | if (connPropFile.exists()) { 31 | DatabaseConnector.dbcLogger.info("Loading JDBC connection properties from " + connPropFile.getAbsolutePath) 32 | try { 33 | val is = new FileInputStream(connPropFile) 34 | prop.load(is) 35 | is.close() 36 | if (DatabaseConnector.dbcLogger.isDebugEnabled) { 37 | DatabaseConnector.dbcLogger.debug("Connection properties: {") 38 | val entries = prop.entrySet() 39 | for (entry <- entries) { 40 | DatabaseConnector.dbcLogger.debug(entry.getKey + " = " + entry.getValue) 41 | } 42 | DatabaseConnector.dbcLogger.debug("}") 43 | } 44 | } catch { 45 | case e: IOException => DatabaseConnector.dbcLogger.error("", e) 46 | } 47 | } else { 48 | DatabaseConnector.dbcLogger.warn(connPropFile.getAbsolutePath + 49 | " does not exist! Using default connection properties") 50 | } 51 | prop.put("user", db_user) 52 | prop.put("password", db_password) 53 | val conn = DriverManager.getConnection(jdbcUrl, prop) 54 | conn.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ) 55 | conn.setAutoCommit(false) 56 | conn 57 | } 58 | } -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/NamedThreadFactory.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest 2 | 3 | import java.util.concurrent.ThreadFactory 4 | 5 | class NamedThreadFactory(var namePrefix: String) extends ThreadFactory { 6 | 7 | private var nextID: Int = 1 8 | 9 | def newThread(runnable: Runnable): Thread = { 10 | var id: Int = 0 11 | this.synchronized { 12 | id = nextID 13 | nextID += 1 14 | } 15 | new Thread(runnable, namePrefix + "-" + id) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/TpccConstants.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest 2 | 3 | 4 | object TpccConstants { 5 | 6 | var TRANSACTION_COUNT: Int = 5 7 | 8 | var MAXITEMS: Int = 100000 9 | 10 | var CUST_PER_DIST: Int = 3000 11 | 12 | var DIST_PER_WARE: Int = 10 13 | 14 | var ORD_PER_DIST: Int = 3000 15 | 16 | var nums: Array[Int] = new Array[Int](CUST_PER_DIST) 17 | 18 | var MAX_NUM_ITEMS: Int = 15 19 | 20 | var MAX_ITEM_LEN: Int = 24 21 | 22 | val SHOW_OUTPUT = false 23 | 24 | var IN_MEMORY_IMPL_VERSION_UNDER_TEST = -1 25 | } 26 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/TpccLoadConfig.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest 2 | 3 | import ddbt.tpcc.loadtest.load.FileLoader 4 | import ddbt.tpcc.loadtest.load.JdbcPreparedStatementLoader 5 | import ddbt.tpcc.loadtest.load.JdbcStatementLoader 6 | import ddbt.tpcc.loadtest.load.RecordLoader 7 | import java.io.File 8 | import java.io.IOException 9 | import java.sql.Connection 10 | import scala.beans.{BeanProperty, BooleanBeanProperty} 11 | 12 | object LoadType extends Enumeration { 13 | 14 | type LoadType = Value 15 | 16 | val JDBC_STATEMENT, JDBC_PREPARED_STATEMENT, CSV = Value 17 | 18 | //implicit def convertValue(v: Value): LoadType = v.asInstanceOf[LoadType] 19 | } 20 | 21 | /** 22 | * Copyright (C) 2011 CodeFutures Corporation. All rights reserved. 23 | */ 24 | class TpccLoadConfig { 25 | 26 | @BeanProperty 27 | var loadType: LoadType.LoadType = LoadType.JDBC_PREPARED_STATEMENT 28 | 29 | @BeanProperty 30 | var conn: Connection = _ 31 | 32 | private var outputDir: File = _ 33 | 34 | private var jdbcInsertIgnore: Boolean = true 35 | 36 | private var jdbcBatchSize: Int = 100 37 | 38 | def createLoader(tableName: String, columnName: Array[String]): RecordLoader = loadType match { 39 | case LoadType.JDBC_STATEMENT => new JdbcStatementLoader(conn, tableName, columnName, jdbcInsertIgnore, jdbcBatchSize) 40 | case LoadType.JDBC_PREPARED_STATEMENT => new JdbcPreparedStatementLoader(conn, tableName, columnName, jdbcInsertIgnore, 41 | jdbcBatchSize) 42 | case LoadType.CSV => new FileLoader(new File(outputDir, tableName + ".txt")) 43 | case _ => throw new IllegalStateException() 44 | } 45 | 46 | def setOutputDir(outputDir: File) { 47 | this.outputDir = outputDir 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/load/FileLoader.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest.load 2 | 3 | import java.io.BufferedOutputStream 4 | import java.io.File 5 | import java.io.FileOutputStream 6 | import java.io.IOException 7 | import java.text.DateFormat 8 | import java.text.SimpleDateFormat 9 | import java.util.Date 10 | import FileLoader._ 11 | 12 | object FileLoader { 13 | 14 | protected val dateFormat = new SimpleDateFormat("yyyy-MM-dd") 15 | 16 | protected val dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") 17 | } 18 | 19 | /** 20 | * Copyright (C) 2011 CodeFutures Corporation. All rights reserved. 21 | */ 22 | class FileLoader(file: File) extends RecordLoader { 23 | 24 | protected var os: BufferedOutputStream = new BufferedOutputStream(new FileOutputStream(file, true)) 25 | 26 | protected val b = new StringBuilder() 27 | 28 | def load(r: Record) { 29 | b.setLength(0) 30 | val field = r.getField 31 | for (i <- 0 until field.length) { 32 | if (i > 0) { 33 | b.append('\t') 34 | } 35 | if (field(i) == null) { 36 | b.append("\\N") 37 | } else if (field(i).isInstanceOf[Date]) { 38 | b.append(dateTimeFormat.format(field(i).asInstanceOf[Date])) 39 | } else { 40 | b.append(field(i)) 41 | } 42 | } 43 | os.write(b.toString.getBytes) 44 | os.write("\n".getBytes) 45 | } 46 | 47 | def commit() { 48 | } 49 | 50 | def close() { 51 | os.close() 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/load/JdbcPreparedStatementLoader.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest.load 2 | 3 | import java.sql.Connection 4 | import java.sql.PreparedStatement 5 | import java.sql.SQLException 6 | 7 | /** 8 | * Data loader using prepared statements and batches. This is slower than the JdbcStatementLoader which uses 9 | * bulk inserts. 10 | */ 11 | class JdbcPreparedStatementLoader(var conn: Connection, 12 | var tableName: String, 13 | var columnName: Array[String], 14 | var ignore: Boolean, 15 | var maxBatchSize: Int) extends RecordLoader { 16 | 17 | var pstmt: PreparedStatement = _ 18 | 19 | var currentBatchSize: Int = _ 20 | 21 | val b = new StringBuilder() 22 | 23 | b.append("INSERT ") 24 | 25 | if (ignore) { 26 | b.append("IGNORE ") 27 | } 28 | 29 | b.append("INTO `").append(tableName).append("` (") 30 | 31 | for (i <- 0 until columnName.length) { 32 | if (i > 0) { 33 | b.append(',') 34 | } 35 | b.append(columnName(i).trim()) 36 | } 37 | 38 | b.append(") VALUES (") 39 | 40 | for (i <- 0 until columnName.length) { 41 | if (i > 0) { 42 | b.append(',') 43 | } 44 | b.append('?') 45 | } 46 | 47 | b.append(')') 48 | 49 | val sql = b.toString 50 | 51 | { 52 | this.conn.setAutoCommit(false) 53 | this.pstmt = conn.prepareStatement(sql) 54 | } 55 | 56 | def load(r: Record) { 57 | for (i <- 0 until columnName.length) { 58 | pstmt.setObject(i + 1, r.getField(i)) 59 | } 60 | pstmt.addBatch() 61 | currentBatchSize += 1 62 | if (currentBatchSize == maxBatchSize) { 63 | executeCurrentBatch() 64 | } 65 | } 66 | 67 | private def executeCurrentBatch() { 68 | pstmt.executeBatch() 69 | currentBatchSize = 0 70 | } 71 | 72 | def commit() { 73 | conn.commit() 74 | } 75 | 76 | def close() { 77 | executeCurrentBatch() 78 | pstmt.close() 79 | conn.commit() 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/load/Record.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest.load 2 | 3 | import java.util.Arrays 4 | import java.util.Date 5 | import scala.beans.{BeanProperty, BooleanBeanProperty} 6 | 7 | /** 8 | * Simple object to represent a single row of data being loaded to the database (or written to a CSV file). 9 | */ 10 | class Record(columnCount: Int) { 11 | 12 | /** 13 | * Column values. 14 | */ 15 | @BeanProperty 16 | val field = new Array[Any](columnCount) 17 | 18 | /** 19 | * Index of next column to write value to. 20 | */ 21 | private var index: Int = 0 22 | 23 | /** 24 | * Re-usable buffer for building string representations of the row. 25 | */ 26 | private val toStringBuilder = { 27 | val str = new StringBuilder("[") 28 | var i = 0; 29 | while( i < index ) { 30 | str.append(field(i)) 31 | 32 | } 33 | str.append("]") 34 | } 35 | 36 | def reset() { 37 | index = 0 38 | } 39 | 40 | def add(value: Any) { 41 | field(index) = value 42 | index += 1 43 | } 44 | 45 | def getField(i: Int): Any = field(i) 46 | 47 | def getColumnCount(): Int = field.length 48 | 49 | override def toString(): String = Arrays.toString(field.asInstanceOf[Array[Object]]) 50 | } 51 | -------------------------------------------------------------------------------- /runtime/tpcc/loadtest/load/RecordLoader.scala: -------------------------------------------------------------------------------- 1 | package ddbt.tpcc.loadtest.load 2 | 3 | 4 | /** 5 | * Copyright (C) 2011 CodeFutures Corporation. All rights reserved. 6 | */ 7 | trait RecordLoader { 8 | 9 | def load(r: Record): Unit 10 | 11 | def commit(): Unit 12 | 13 | def close(): Unit 14 | } 15 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/README.txt: -------------------------------------------------------------------------------- 1 | Compiling generated TpccGenSC.cpp 2 | 3 | g++ -std=c++17 -O3 TpccGenSC.cpp -I -DPROJECT_ROOT=\"\" 4 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/hpds/KDouble.cpp: -------------------------------------------------------------------------------- 1 | // The MIT License (MIT) 2 | 3 | // Copyright (c) 2014 Mohammad Dashti 4 | // (www.mdashti.com - mohammad.dashti [at] epfl [dot] ch - mdashti [at] gmail [dot] com) 5 | 6 | // Permission is hereby granted, free of charge, to any person obtaining a copy 7 | // of this software and associated documentation files (the "Software"), to deal 8 | // in the Software without restriction, including without limitation the rights 9 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | // copies of the Software, and to permit persons to whom the Software is 11 | // furnished to do so, subject to the following conditions: 12 | 13 | // The above copyright notice and this permission notice shall be included in all 14 | // copies or substantial portions of the Software. 15 | 16 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | // SOFTWARE. 23 | 24 | #include "KDouble.hpp" 25 | 26 | namespace dbtoaster { 27 | 28 | size_t precision = 7; // significative numbers (7 to pass r_sumdivgrp, 10 otherwise) 29 | double KDouble::diff_p = std::pow(0.1,precision); 30 | 31 | } 32 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/hpds/pstring.cpp: -------------------------------------------------------------------------------- 1 | // The MIT License (MIT) 2 | 3 | // Copyright (c) 2014 Mohammad Dashti 4 | // (www.mdashti.com - mohammad.dashti [at] epfl [dot] ch - mdashti [at] gmail [dot] com) 5 | 6 | // Permission is hereby granted, free of charge, to any person obtaining a copy 7 | // of this software and associated documentation files (the "Software"), to deal 8 | // in the Software without restriction, including without limitation the rights 9 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | // copies of the Software, and to permit persons to whom the Software is 11 | // furnished to do so, subject to the following conditions: 12 | 13 | // The above copyright notice and this permission notice shall be included in all 14 | // copies or substantial portions of the Software. 15 | 16 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | // SOFTWARE. 23 | 24 | #include "pstring.hpp" 25 | 26 | 27 | //global operators 28 | #ifdef USE_POOL 29 | CharPool<> PString::pool_; 30 | #endif //USE_POOL -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/hpds/pstringops.hpp: -------------------------------------------------------------------------------- 1 | #include "pstring.hpp" 2 | 3 | 4 | inline bool operator==(const char *str1, const PString &str2) 5 | { 6 | return (strcmp(str1, str2.c_str()) == 0); 7 | } 8 | 9 | inline bool operator!=(const char *str1, const PString &str2) 10 | { 11 | return (strcmp(str1, str2.c_str()) != 0); 12 | } 13 | 14 | std::ostream& operator<< (std::ostream& o, PString const& str) 15 | { 16 | return o << "\"" << str.c_str() << "\""; 17 | } 18 | 19 | FORCE_INLINE size_t hash_value(PString const& str) 20 | { 21 | return MurmurHash2(str.c_str(), str.length() * sizeof(char), 0); 22 | } -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/macro.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_MACRO_HPP 2 | #define DBTOASTER_MACRO_HPP 3 | 4 | #define STRING(s) #s 5 | 6 | //----------------------------------------------------------------------------- 7 | // Microsoft Visual Studio 8 | 9 | #if defined(_MSC_VER) 10 | 11 | #define INLINE inline 12 | #define FORCE_INLINE __forceinline 13 | #define NEVER_INLINE __declspec(noinline) 14 | 15 | //----------------------------------------------------------------------------- 16 | // Other compilers 17 | 18 | #else // defined(_MSC_VER) 19 | 20 | #define INLINE inline 21 | #define FORCE_INLINE inline __attribute__((always_inline)) 22 | #define NEVER_INLINE __attribute__((noinline)) 23 | 24 | #endif // !defined(_MSC_VER) 25 | 26 | #endif /* DBTOASTER_MACRO_HPP */ -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/sc/SpinLock.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #ifndef __COMMON_SPIN_LOCK_H__ 3 | #define __COMMON_SPIN_LOCK_H__ 4 | #include 5 | 6 | class __attribute__((aligned(64))) SpinLock { 7 | std::atomic_flag lock_; 8 | public: 9 | 10 | SpinLock() { 11 | lock_.clear(); 12 | } 13 | 14 | inline void lock() { 15 | while (lock_.test_and_set(std::memory_order_acquire)); 16 | } 17 | 18 | inline void unlock() { 19 | lock_.clear(std::memory_order_release); 20 | } 21 | 22 | inline bool try_lock() { 23 | return !lock_.test_and_set(std::memory_order_acquire); 24 | } 25 | 26 | }; 27 | 28 | #endif 29 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/sc/Transaction.h: -------------------------------------------------------------------------------- 1 | #ifndef TRANSACTION_H 2 | #define TRANSACTION_H 3 | #include "types.h" 4 | 5 | struct ALIGN Transaction { 6 | VBase* undoBufferHead; 7 | PRED* predicateHead; 8 | static TransactionManager& tm; 9 | timestamp startTS; 10 | volatile timestamp commitTS; 11 | Transaction * prevCommitted; 12 | uint8_t threadId; 13 | 14 | uint8_t ptype; 15 | Transaction* failedBecauseOf; 16 | Transaction() { 17 | failedBecauseOf = nullptr; 18 | threadId = 0; 19 | commitTS = initCommitTS; 20 | undoBufferHead = nullptr; 21 | predicateHead = nullptr; 22 | prevCommitted = nullptr; 23 | } 24 | 25 | void reset() { 26 | threadId = 0; 27 | commitTS = initCommitTS; 28 | undoBufferHead = nullptr; 29 | predicateHead = nullptr; 30 | prevCommitted = nullptr; 31 | } 32 | }; 33 | 34 | #endif /* TRANSACTION_H */ 35 | 36 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/sc/mmap.hpp: -------------------------------------------------------------------------------- 1 | #ifdef SC_GENERATED //using SC 2 | #include "ScExtra.h" 3 | 4 | #ifdef CONCURRENT 5 | #include "cmmap.hpp" // For SC concurrent CPP 6 | #else 7 | #include "mmap2.hpp" // For SC CPP 8 | #endif 9 | #endif 10 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/smhasher/MurmurHash2.hpp: -------------------------------------------------------------------------------- 1 | //----------------------------------------------------------------------------- 2 | // MurmurHash2 was written by Austin Appleby, and is placed in the public 3 | // domain. The author hereby disclaims copyright to this source code. 4 | 5 | #ifndef _MURMURHASH2_H_ 6 | #define _MURMURHASH2_H_ 7 | 8 | //----------------------------------------------------------------------------- 9 | // Platform-specific functions and macros 10 | 11 | // Microsoft Visual Studio 12 | 13 | #if defined(_MSC_VER) && (_MSC_VER < 1600) 14 | 15 | typedef unsigned char uint8_t; 16 | typedef unsigned int uint32_t; 17 | typedef unsigned __int64 uint64_t; 18 | 19 | // Other compilers 20 | 21 | #else // defined(_MSC_VER) 22 | 23 | #include 24 | 25 | #endif // !defined(_MSC_VER) 26 | 27 | //----------------------------------------------------------------------------- 28 | 29 | uint32_t MurmurHash2 ( const void * key, int len, uint32_t seed ); 30 | uint64_t MurmurHash64A ( const void * key, int len, uint64_t seed ); 31 | uint64_t MurmurHash64B ( const void * key, int len, uint64_t seed ); 32 | uint32_t MurmurHash2A ( const void * key, int len, uint32_t seed ); 33 | uint32_t MurmurHashNeutral2 ( const void * key, int len, uint32_t seed ); 34 | uint32_t MurmurHashAligned2 ( const void * key, int len, uint32_t seed ); 35 | 36 | //----------------------------------------------------------------------------- 37 | 38 | #endif // _MURMURHASH2_H_ 39 | 40 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/smhasher/MurmurHash3.hpp: -------------------------------------------------------------------------------- 1 | //----------------------------------------------------------------------------- 2 | // MurmurHash3 was written by Austin Appleby, and is placed in the public 3 | // domain. The author hereby disclaims copyright to this source code. 4 | 5 | #ifndef _MURMURHASH3_H_ 6 | #define _MURMURHASH3_H_ 7 | 8 | // Check windows 9 | #if _WIN32 || _WIN64 10 | #if _WIN64 11 | #define ENV64BIT 12 | #else 13 | #define ENV32BIT 14 | #endif 15 | #endif 16 | 17 | // Check GCC 18 | #if __GNUC__ 19 | #if __x86_64__ || __ppc64__ 20 | #define ENV64BIT 21 | #else 22 | #define ENV32BIT 23 | #endif 24 | #endif 25 | 26 | //----------------------------------------------------------------------------- 27 | // Platform-specific functions and macros 28 | 29 | // Microsoft Visual Studio 30 | 31 | #if defined(_MSC_VER) && (_MSC_VER < 1600) 32 | 33 | typedef unsigned char uint8_t; 34 | typedef unsigned int uint32_t; 35 | typedef unsigned __int64 uint64_t; 36 | 37 | // Other compilers 38 | 39 | #else // defined(_MSC_VER) 40 | 41 | #include 42 | 43 | #endif // !defined(_MSC_VER) 44 | 45 | //----------------------------------------------------------------------------- 46 | void MurmurHash3_x86_32 ( const void * key, int len, uint32_t seed, void * out ); 47 | 48 | #ifndef ENV64BIT 49 | void MurmurHash3_128 ( const void * key, int len, uint32_t seed, void * out ); 50 | #else 51 | void MurmurHash3_128 ( const void * key, int len, uint32_t seed, void * out ); 52 | #endif 53 | //----------------------------------------------------------------------------- 54 | 55 | #endif // _MURMURHASH3_H_ 56 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/smhasher/PMurHash.cpp: -------------------------------------------------------------------------------- 1 | #include "PMurHash.hpp" 2 | 3 | std::hash long_hasher; 4 | std::hash double_hasher; 5 | std::hash string_hasher; 6 | 7 | FORCE_INLINE void hash_combine(std::size_t& seed, const long& v) 8 | { 9 | seed ^= long_hasher(v) + 0x9e3779b9 + (seed<<6) + (seed>>2); 10 | } 11 | FORCE_INLINE void hash_combine(std::size_t& seed, const double& v) 12 | { 13 | seed ^= double_hasher(v) + 0x9e3779b9 + (seed<<6) + (seed>>2); 14 | } 15 | FORCE_INLINE void hash_combine(std::size_t& seed, const std::string& v) 16 | { 17 | seed ^= string_hasher(v) + 0x9e3779b9 + (seed<<6) + (seed>>2); 18 | } 19 | -------------------------------------------------------------------------------- /runtime/tpcc/pardisgen/include/types.hpp: -------------------------------------------------------------------------------- 1 | #ifndef DBTOASTER_TYPES_H 2 | #define DBTOASTER_TYPES_H 3 | 4 | #include 5 | #include 6 | 7 | #include "hpds/pstring.hpp" 8 | #include "hpds/KDouble.hpp" 9 | 10 | namespace dbtoaster { 11 | typedef int date; 12 | 13 | typedef double DOUBLE_TYPE; 14 | 15 | typedef size_t HASH_RES_t; 16 | 17 | typedef PString STRING_TYPE; 18 | } 19 | 20 | #endif /* DBTOASTER_TYPES_HPP */ -------------------------------------------------------------------------------- /storelib/build.sbt: -------------------------------------------------------------------------------- 1 | Seq( 2 | // --------- Project informations 3 | name := "dbtoaster-sstore", 4 | 5 | // --------- Paths 6 | scalaSource in Compile <<= baseDirectory / "src", 7 | javaSource in Compile <<= baseDirectory / "src", 8 | sourceDirectory in Compile <<= baseDirectory / "src" 9 | ) 10 | 11 | libraryDependencies ++= Seq( 12 | "org.scala-lang.modules" % "scala-xml_2.11" % "1.0.5", 13 | "com.typesafe.akka" %% "akka-actor" % "2.5.32", 14 | "com.typesafe.akka" %% "akka-remote" % "2.5.32", 15 | "org.scala-lang" % "scala-compiler" % scalaVersion.value 16 | ) -------------------------------------------------------------------------------- /storelib/src/lib/ExecutionProfiler.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib 2 | 3 | import collection.mutable.HashMap 4 | 5 | object ExP { 6 | val startTimes = HashMap[String, Long]() 7 | val durations = HashMap[String, Long]() 8 | val counters = HashMap[String, Long]() 9 | 10 | def start(n: String): Unit = { 11 | startTimes(n) = System.nanoTime() 12 | () 13 | } 14 | 15 | def end(n: String): Unit = { 16 | val e = System.nanoTime() 17 | val s = startTimes(n) 18 | val d = e - s 19 | if (durations.contains(n)) { 20 | durations(n) += d 21 | counters(n) += 1 22 | } else { 23 | durations += ((n, d)) 24 | counters += ((n, 1)) 25 | } 26 | () 27 | } 28 | 29 | def print() = { 30 | System.err.println(durations.keySet.map(k => s"$k count = ${counters(k)} time = ${durations(k) / 1000000.0} ms avg time = ${durations(k) / counters(k)} ns").mkString("\n")) 31 | } 32 | } -------------------------------------------------------------------------------- /storelib/src/lib/IQuery.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib 2 | 3 | import ddbt.lib.Messages.StreamEvent 4 | 5 | trait IQuery { 6 | def handleEvent(e: StreamEvent): Any 7 | } 8 | -------------------------------------------------------------------------------- /storelib/src/lib/Stopwatch.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib 2 | 3 | import scala.collection.mutable.ArrayBuffer 4 | 5 | class Stopwatch { 6 | 7 | private var startTime = -1L 8 | private var elapsedTime = 0L 9 | private var running = false 10 | private val checkpoints = new ArrayBuffer[Long]() 11 | 12 | def start(): Stopwatch = { 13 | if (!running) { 14 | startTime = System.nanoTime 15 | running = true 16 | } 17 | this 18 | } 19 | 20 | def stop(checkpoint: Boolean = true): Stopwatch = { 21 | if (running) { 22 | val lastRun = System.nanoTime - startTime 23 | elapsedTime += lastRun 24 | if (checkpoint) checkpoints += lastRun 25 | running = false 26 | } 27 | this 28 | } 29 | 30 | def getElapsedTime: Long = 31 | if (!running) elapsedTime / 1000000L 32 | else throw new RuntimeException("Reading elapsed time of running watchstop") 33 | 34 | def reset() = { 35 | startTime = -1L 36 | elapsedTime = 0L 37 | checkpoints.clear 38 | running = false 39 | } 40 | 41 | def checkpoint() = { 42 | if (running) { 43 | val currentTime = System.nanoTime 44 | val lastRun = currentTime - startTime 45 | elapsedTime += lastRun 46 | checkpoints += lastRun 47 | startTime = currentTime 48 | } 49 | } 50 | 51 | def getCheckpoints(): ArrayBuffer[Long] = checkpoints map (_ / 1000000L) 52 | 53 | @inline 54 | def time[R](block: => R, checkpoint: Boolean = true): R = { 55 | this.start() 56 | val result = block 57 | this.stop(checkpoint) 58 | result 59 | } 60 | } 61 | 62 | 63 | object Stopwatch { 64 | 65 | def time[R](message: String, block: => R): R = { 66 | val startTime = System.nanoTime 67 | val result = block 68 | val elapsedTime = (System.nanoTime - startTime) / 1000000L 69 | println(message + elapsedTime + " ms") 70 | result 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /storelib/src/lib/store/Entry.java: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store; 2 | 3 | /** 4 | * Abstract entry that is specialized for each map. 5 | * There is no key/value distinction as it is encoded by the indices. 6 | * Functions cmp() and hash() operate over the projection #i of the tuple; 7 | * this projection is actually never materialized. 8 | * 9 | * @author TCK 10 | */ 11 | public abstract class Entry { 12 | 13 | final Object[] data; 14 | 15 | public Entry(int n) { data=new Object[n]; } 16 | 17 | abstract public Entry copy(); // returns a copy of the entry, for B-Trees only 18 | 19 | public void copyFrom(Entry e){ 20 | throw new UnsupportedOperationException(); 21 | }; //copies contents of "e" into this 22 | 23 | Entry next, prev; 24 | 25 | //abstract public boolean zero(); // the tuple can safely be deleted from the map 26 | //abstract public void merge(Entry e); // combine e in this (some kine of aggregation) 27 | 28 | // backward compatibility 29 | //public boolean zero() { return false; } 30 | //public void merge(Entry e) {} // again we create typing issues here 31 | } 32 | -------------------------------------------------------------------------------- /storelib/src/lib/store/MultiRes.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.store 2 | 3 | abstract class MultiRes { 4 | def isEmpty(): Boolean 5 | } 6 | 7 | class SliceRes[E <: Entry](val sliceHead: IdxHashEntry[E]) extends MultiRes { 8 | override def isEmpty(): Boolean = sliceHead == null 9 | } 10 | 11 | class ForEachRes[E <: Entry](val head: E) extends MultiRes { 12 | override def isEmpty(): Boolean = false //TODO: Fix later 13 | } 14 | -------------------------------------------------------------------------------- /storelib/src/lib/storeScala/Entry.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.storeScala 2 | 3 | abstract class Entry(n: Int) { 4 | val data = new Array[Any](n) 5 | 6 | 7 | def copy(): Entry // returns a copy of the entry, for B-Trees only 8 | 9 | 10 | def copyFrom(e: Entry): Unit = { 11 | throw new UnsupportedOperationException 12 | } 13 | 14 | //copies contents of "e" into this 15 | 16 | var next:Entry=null 17 | 18 | var prev:Entry=null 19 | } 20 | -------------------------------------------------------------------------------- /storelib/src/lib/storeScala/MultiRes.scala: -------------------------------------------------------------------------------- 1 | package ddbt.lib.storeScala 2 | 3 | abstract class MultiRes { 4 | def isEmpty(): Boolean 5 | } 6 | 7 | class SliceRes[E <: Entry](val sliceHead: IdxHashEntry[E]) extends MultiRes { 8 | override def isEmpty(): Boolean = sliceHead == null 9 | } 10 | 11 | class ForEachRes[E <: Entry](val head: E) extends MultiRes { 12 | override def isEmpty(): Boolean = false //TODO: Fix later 13 | } 14 | --------------------------------------------------------------------------------