├── .gitignore
├── PAK.Rproj
├── README.md
├── analysis_module
├── appinfo
│ ├── analysis.sh
│ ├── appinfo.R
│ └── featureinfo.xml
├── envinfo
│ ├── EnvGather.R
│ ├── analysis.sh
│ └── featureinfo.xml
├── paktimer
│ ├── Makefile
│ ├── analysis.sh
│ ├── featureinfo.xml
│ ├── paktimer
│ └── paktimer.c
└── tau
│ ├── analysis.sh
│ ├── featureinfo.xml
│ ├── featureofPAPI.r
│ └── outputformat.R
├── applications
├── multiplyexample.c
├── optimized.cpp
└── result.xml
├── framework
├── DBModule
│ └── functions.R
├── EvaluatorModule
│ └── Evaluator.R
├── ExtractorModule
│ ├── Analyze.old
│ └── Extractor.R
├── Interface
│ ├── Analyser.R
│ └── Generator.R
├── LearnerModule
│ └── Learner.R
├── OptimizerModule
│ └── Optimizer.R
├── ProducerModule
│ ├── .RData
│ └── Producer.R
├── Tuning
│ └── Tuner.R
├── dependencies.R
└── lib
│ ├── OptimizationSpace.R
│ ├── learners.R
│ └── producers.R
├── generator_module
└── optimizeCompilerFlag
│ ├── transform.sh
│ └── variantinfo.xml
├── pak.R
└── tutorial
└── autotuning_compilerflag.R
/.gitignore:
--------------------------------------------------------------------------------
1 | *.svn*
2 | .Rproj.user
3 | .Rhistory
--------------------------------------------------------------------------------
/PAK.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: Default
4 | SaveWorkspace: Default
5 | AlwaysSaveHistory: Default
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PAK
2 | A performance tuning and knowledge management suit
3 | #Introduction
4 | PAK is a general scientific application autotuning framework which can significantly decrease the work of the programmer
5 | and improve the speed of optimising code.
6 | We believe optimising code must be an enjoyable, creative experience. PAK attempts to take the pain out of programmers by
7 | taking different models used in processes of optimising projects, such as extracter feature model, optimiser model.
8 | PAK is accessible, yet powerful, providing powerful tools needed for large, robust applications.
9 | #PAK models
10 | 
11 |
12 | ##Analyser
13 | ###functional description of analyser
14 |
15 | Analyse the feature of application instance and the index of performance.
16 |
17 | ###Customizing
18 |
19 | First define the configuration file which including featureinfo.xml and analysis.sh.
20 |
21 | ####featureinfo.xml
22 |
23 | #####feature defination
24 | including static/dynamic, feature name, feature description, enable environment variable(when the variable is true meaning
25 | that the relative feature needs to be analysed), data type
26 | feature data type including numerical, category, boolen, combination(supporting nesting)
27 |
28 | #####examples
29 | ```
30 |
31 |
32 | static
33 | arrayshape
34 | the shape of array in target program
35 | Enable_arrayshape
36 |
37 | numerical
38 | numerical
39 | numerical
40 |
41 |
42 |
43 | ```
44 |
45 | ####analysis.sh
46 |
47 | - imput: target application, environment variable.
48 | - output: result file, the name of result file
49 | - example of output format:
50 |
51 | ```
52 |
53 |
54 | arrayshape
55 |
56 | 128
57 | 128
58 | 256
59 |
60 |
61 |
62 | ```
63 |
64 | ###Initializing object
65 |
66 | `C.Analyser(Name,Path,Features)`
67 |
68 | - Name: the name of analysis(the file name of the configuration file)
69 | - Path: the path of configuration file
70 | - Feature: the features during the analysis
71 |
72 | ##Generator
73 | ###functional description of generator
74 |
75 | based on the input parameters,optimise and change the application instance.
76 |
77 | ###Customizing
78 |
79 | First define the configuration file which including variantinfo.xml and atransform.sh.
80 |
81 | ####variantinfo.xml
82 |
83 | #####variable parameter defination
84 | including the name of variant parameter, the description of features, enable environment variable(transferring
85 | the parameter of variant), data type(the same with the data type of feature).
86 |
87 | #####examples
88 |
89 | ```
90 |
91 |
92 | Unrolling
93 | the unrolling factor
94 | ENABLE_Unrolling
95 | numerical
96 |
97 |
98 | ```
99 |
100 | ####transform.sh
101 | - input: the name of target application, the name of output file, environment variable.
102 | - output: the instance of having optimised, the name of output file
103 |
104 | ###Initializing object
105 |
106 | `C.Generator(Name,Path,Parameters)`
107 |
108 | - Name: the name of analysis(the file name of the configuration file)
109 | - Path: the path of configuration file
110 | - Feature: the received parameters
111 |
112 | ##Extractor
113 | ###functional description of extractor
114 |
115 | instance analyzer, in charge of static analyzing, environment analyzing and input analyzing of instance. Every
116 | extractor analyze the instance in the general through including one or multiple analyser objects. The result of
117 | analyzing could produce the parameters, predict and knoledge mining.
118 |
119 | ###Customizing
120 |
121 | leveraging the input parameters, instantiate the customized objects.
122 |
123 | ###Initializing object
124 |
125 | `C.Extractor$new(analysers)`
126 | - analysers: the table object appointing the extractor of feature.
127 |
128 | ###example
129 |
130 | ```
131 | # create a list that contains hpsFrontend-flop_intensity
132 | analyser.hpsFrontend<-list(hpsFrontend=c("flop_intensity"))
133 | #init an extractor object using previous list
134 | myextractor<-C.Extractor$new(list(hpsFrontend=analyser.hpsFrontend))
135 | ```
136 |
137 | ##Producer
138 | ###functional description of producer
139 | the base class of producer, defining the interface method of instantiating producer. producer optimise the
140 | process of producing parameters, and by using the result of analyzing instance and the evaluation of last time
141 | implement various complex algorithm including heuristic seaching method, exhaustive searching method and model
142 | predicting method.
143 |
144 | ###Customizing
145 |
146 | customise complex producer by implementing interface method--getParameter.
147 |
148 | ###base class: `C. Producer()` :
149 |
150 | `getParameter(step,extractor.result,score)`:
151 | - step: current interation step
152 | - extractor.result: running instance anf analyzing features
153 | - score: the score of parameter in the last time
154 |
155 | ###example:
156 | ```
157 | # an exhaustion search producer
158 | C.Producer.Exhaustion<-setRefClass(
159 | "C.Producer.Exhaustion",
160 | contains="C.Producer",
161 | fields = list(parameter.space="data.frame"),
162 | methods = list(
163 | #Init function
164 | initialize=function(parameter.space){
165 | parameter.space<<-parameter.space
166 | },
167 | #Implemente the interface method
168 | getParameter=function(step,extractor.result,score)
169 | {
170 | if(stepparameter.number)
208 | return (data.frame())
209 | v.score<<-c()
210 | v.pos<<-0
211 | }
212 | new.parameter<-local.optimal
213 | new.parameter[[v.idx]]<-parameter.range[[v.idx]][v.pos+1]
214 | return (new.parameter)
215 | }
216 | )
217 | )
218 | ```
219 |
220 | ##Optimizer
221 | ###functional description of optimiser
222 |
223 | optimise the instance including code change, generating optimising variant, environment set. Every optimiser only
224 | contain a generator for optimising variant currently.
225 |
226 | ###Customizing
227 |
228 | instantiating customised object by using the input parameter.
229 |
230 | ###Initializing object
231 |
232 | `C.Optimizer(generator.name,output.name)`
233 |
234 | - generator.name: optimising variant generator
235 | - output.name: the output of optimising variant, default: "optimized.cpp"
236 |
237 | ###example
238 |
239 | ```
240 | #init an optimizer use hpsGen which is a code generator for stencil
241 | myoptimizer<-C.Optimizer$new(generator.name="hpsGen")
242 | ```
243 |
244 | ##Evaluator
245 | ###functional description of evaluator
246 |
247 | evaluate the running instance during the process of autotuning, including obtaining the performance index,
248 | score index and so on. Every evaluator include one or mutiple analyser objects for evaluating the optimising
249 | variant index. Every evaluating index need to associate one evaluation function. When the index is enough,
250 | return 0, otherwise return a negative number. Absolute number mean the distance to requirement. The evalutor
251 | finally return a tatal score--sum of all evaluation index. When then total score is 0, autotuning is convergent.
252 |
253 | ###Customizing
254 |
255 | instantiating customised object by using the input parameter.
256 |
257 | ###Initializing object
258 |
259 | `C.Evaluator(sub.evaluators)`
260 | - sub.evaluators: include multiple evaluators. the index is the name of evaluator. the value is the table of evaluator
261 | function.
262 |
263 | ###example
264 | ```
265 | # create a sub.evaluaor, which is list of feautres to evluate functions
266 | sub.evaluator.tau<-list(P_WALL_CLOCK_TIME=function(x){if(x>100) return (100-x) else return(0)})
267 | #init a C.Evaluator object
268 | myevaluator<-C.Evaluator$new(sub.evaluators=list(tau=sub.evaluator.tau))
269 | ```
270 |
271 | ##example: implementing a full tuning
272 |
273 | ```
274 | # create a tuner
275 | mytuner<-C.Tuner$new(app=app,optimizer=myoptimizer,evaluator=myevaluator,producer =myproducer,need.store=TRUE)
276 | # perform tuning
277 | mytuner$tune()
278 | # output best parameters
279 | print(mytuner$best.parameters)
280 | ```
281 |
282 |
283 |
--------------------------------------------------------------------------------
/analysis_module/appinfo/analysis.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | source /home/lyl/.bashrc
4 | Rscript $(cd "$(dirname "$0")"; pwd)/appinfo.R $1
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/analysis_module/appinfo/appinfo.R:
--------------------------------------------------------------------------------
1 | library("XML")
2 |
3 | bashrc<-"/home/lyl/.bashrc"
4 | analysername<-"appinfo"
5 | appinfo.list<-list()
6 |
7 | args<-commandArgs(T)
8 | appname<-args[1]
9 | appinfo.list$MD5<-gsub(" .*","",system(paste0("md5sum ",appname),intern = TRUE))
10 |
11 |
12 |
13 | doc = newXMLDoc()
14 | fsnode<-newXMLNode(name="features",doc=doc)
15 | for(i in 1:length(appinfo.list))
16 | {
17 | fnode<-newXMLNode(name = "feature",parent = fsnode)
18 | addChildren(fnode,
19 | newXMLNode(name="name",names(appinfo.list[i])),
20 | newXMLNode(name="value",appinfo.list[[i]])
21 | )
22 |
23 |
24 | }
25 | rfilename<-paste0(Sys.time(),analysername,"anaylsisresult.xml")
26 | rfilename<-sub(":","",rfilename)
27 | rfilename<-sub("-","",rfilename)
28 | rfilename<-sub(" ","",rfilename)
29 | output<-saveXML(doc,file=rfilename,prefix = sprintf(" ",analysername))
30 | cat(output)
31 |
32 |
--------------------------------------------------------------------------------
/analysis_module/appinfo/featureinfo.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | MD5
5 | category
6 | static
7 | calculates and verifies 128-bit MD5 hashes, as described in RFC 1321
8 | TRUE
9 | ENABLE_MD5
10 |
11 |
12 |
--------------------------------------------------------------------------------
/analysis_module/envinfo/EnvGather.R:
--------------------------------------------------------------------------------
1 | library("XML")
2 |
3 | bashrc<-"/home/lyl/.bashrc"
4 |
5 | analysername<-"EnvGather"
6 | envlist<-list()
7 |
8 | cpuinfo<-system("lscpu",intern = TRUE)
9 | meminfo<-system("cat /proc/meminfo",intern = TRUE)
10 | info<-c(cpuinfo,meminfo)
11 | for(i in info)
12 | {
13 | tmp<-unlist(strsplit(i,": *"))
14 |
15 | if(tmp[1]=="Architecture")
16 | envlist[tmp[1]]<-as.character(tmp[2])
17 |
18 | if(tmp[1]=="CPUs")
19 | envlist[tmp[1]]<-as.numeric(tmp[2])
20 |
21 | if(tmp[1]=="CPU MHz")
22 | envlist[sub(" ","_",tmp[1])]<-as.numeric(tmp[2])
23 |
24 | if(tmp[1]=="Threads per core")
25 | envlist[sub(" ","_",tmp[1])]<-as.numeric(tmp[2])
26 |
27 | if(tmp[1]=="Cores per socket")
28 | envlist[sub(" ","_",tmp[1])]<-as.numeric(tmp[2])
29 |
30 | if(tmp[1]=="Byte Order")
31 | envlist[sub(" ","_",tmp[1])]<-as.character(tmp[2])
32 |
33 | if(tmp[1]=="Sockets")
34 | envlist[sub(" ","_",tmp[1])]<-as.numeric(tmp[2])
35 |
36 | if(tmp[1]=="NUMA nodes")
37 | envlist[sub(" ","_",tmp[1])]<-as.numeric(tmp[2])
38 |
39 | if(tmp[1]=="L1d cache")
40 | envlist["L1d_cache_K"]<-as.numeric(sub("K","",tmp[2]))
41 |
42 | if(tmp[1]=="L1i cache")
43 | envlist["L1i_cache_K"]<-as.numeric(sub("K","",tmp[2]))
44 |
45 | if(tmp[1]=="L2 cache")
46 | envlist["L2_cache_K"]<-as.numeric(sub("K","",tmp[2]))
47 |
48 | if(tmp[1]=="L3 cache")
49 | envlist["L3_cache_K"]<-as.numeric(sub("K","",tmp[2]))
50 |
51 | if(tmp[1]=="MemTotal")
52 | envlist["MemTotal_K"]<-as.numeric(sub("kB","",tmp[2]))
53 |
54 |
55 | }
56 | envlist["OS_version"]<-system("head -n 1 /etc/issue",intern = TRUE)
57 |
58 | envlist["gcc_version"]<-system(sprintf("source %s; gcc -dumpversion;",bashrc),intern = TRUE)
59 |
60 | envlist["icc_version"]<-system(sprintf("source %s; icc -dumpversion;",bashrc),intern = TRUE)
61 |
62 | envlist["nvcc_version"]<-gsub(".*release *|,.*","",system(sprintf("source %s; nvcc --version | grep release",bashrc),intern = TRUE))
63 |
64 |
65 |
66 |
67 | doc = newXMLDoc()
68 | fsnode<-newXMLNode(name="features",doc=doc)
69 | for(i in 1:length(envlist))
70 | {
71 | fnode<-newXMLNode(name = "feature",parent = fsnode)
72 | addChildren(fnode,
73 | newXMLNode(name="name",names(envlist[i])),
74 | newXMLNode(name="value",envlist[[i]])
75 | )
76 |
77 |
78 | }
79 | rfilename<-paste0(Sys.time(),analysername,"anaylsisresult.xml")
80 | rfilename<-sub(":","",rfilename)
81 | rfilename<-sub("-","",rfilename)
82 | rfilename<-sub(" ","",rfilename)
83 | output<-saveXML(doc,file=rfilename,prefix = sprintf(" ",analysername))
84 | cat(output)
85 |
86 |
87 | #
88 | # featureinfo_doc = newXMLDoc()
89 | # fsnode<-newXMLNode(name="features",doc=featureinfo_doc)
90 | # for(i in 1:length(envlist))
91 | # {
92 | # fnode<-newXMLNode(name = "feature",parent = fsnode)
93 | # if(is.numeric(envlist[[i]]))
94 | # type<-"numerical"
95 | # else
96 | # type<-"category"
97 | #
98 | # addChildren(fnode,
99 | # newXMLNode(name="name",names(envlist[i])),
100 | # newXMLNode(name="datatype",type),
101 | # newXMLNode(name="type","static"),
102 | # newXMLNode(name="description","As name shows"),
103 | # newXMLNode(name="avail","TRUE"),
104 | # newXMLNode(name="enable_variable",paste0("ENABLE_",names(envlist[i])))
105 | # )
106 | #
107 | #
108 | # }
109 | #
110 | # output<-saveXML(featureinfo_doc,file="featureinfo.xml",prefix = sprintf(" ",analysername))
111 |
112 |
--------------------------------------------------------------------------------
/analysis_module/envinfo/analysis.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | source /home/lyl/.bashrc
4 | Rscript $(cd "$(dirname "$0")"; pwd)/EnvGather.R
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/analysis_module/envinfo/featureinfo.xml:
--------------------------------------------------------------------------------
1 | Architecture category static As name shows TRUE ENABLE_Architecture Byte_Order category static As name shows TRUE ENABLE_Byte_Order CPU_MHz numerical static As name shows TRUE ENABLE_CPU_MHz L1d_cache_K numerical static As name shows TRUE ENABLE_L1d_cache_K L1i_cache_K numerical static As name shows TRUE ENABLE_L1i_cache_K L2_cache_K numerical static As name shows TRUE ENABLE_L2_cache_K L3_cache_K numerical static As name shows TRUE ENABLE_L3_cache_K MemTotal_K numerical static As name shows TRUE ENABLE_MemTotal_K OS_version category static As name shows TRUE ENABLE_OS_version gcc_version category static As name shows TRUE ENABLE_gcc_version icc_version category static As name shows TRUE ENABLE_icc_version nvcc_version category static As name shows TRUE ENABLE_nvcc_version
--------------------------------------------------------------------------------
/analysis_module/paktimer/Makefile:
--------------------------------------------------------------------------------
1 | CXX=gcc
2 | INCLUDES=-I.
3 | CXXFLAGS=$(INCLUDES)
4 |
5 | default:
6 | $(CXX) paktimer.c -o paktimer
7 | clean::
8 | -rm -f paktimer
9 |
--------------------------------------------------------------------------------
/analysis_module/paktimer/analysis.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #export ENABLE_PAKTIME=TRUE
3 | TIMEENABLE=`env |grep ENABLE_PAKTIME | grep TRUE`
4 | if [ $TIMEENABLE = "ENABLE_PAKTIME=TRUE" ];then
5 | $(cd "$(dirname "$0")"; pwd)/paktimer $1 1>/dev/null 2>/dev/null
6 | time=`cat temp.time`
7 | rm temp.time result.xml
8 | echo "
9 |
10 | time
11 | "$time"
12 |
13 | ">>result.xml
14 | echo "result.xml"
15 | fi
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/analysis_module/paktimer/featureinfo.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | dynamic
4 | time
5 | the execution time of a given application
6 | ENABLE_PAKTIME
7 | numerical
8 |
9 |
10 |
--------------------------------------------------------------------------------
/analysis_module/paktimer/paktimer:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCIC-PARALLEL/Graphine-SDK/89a5a562228d263ca1e16db3c3764435c57a6898/analysis_module/paktimer/paktimer
--------------------------------------------------------------------------------
/analysis_module/paktimer/paktimer.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | struct timeval t1;
4 | struct timeval t2;
5 |
6 | int main(int argc,char** argv)
7 | {
8 | FILE *fp;
9 | double time;
10 | //printf("the target file is %s \n", argv[1]);
11 | gettimeofday(&t1,0);
12 | system(argv[1]);
13 | gettimeofday(&t2,0);
14 | time = ((((1000000.0 * (t2.tv_sec - t1.tv_sec)) + t2.tv_usec) - t1.tv_usec) / 1000000.0);
15 |
16 | if(fp=fopen("temp.time","wb"))
17 | fprintf(fp,"%.4f",time);
18 |
19 | return 0;
20 | }
21 |
--------------------------------------------------------------------------------
/analysis_module/tau/analysis.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | source /home/lyl/.bashrc
3 | export PATH=/home/lyl/tools/tau2.23_icpc_pdt_papi/x86_64/bin:$PATH
4 |
5 |
6 | export TAU_MAKEFILE=/home/lyl/tools/tau2.23_icpc_pdt_papi/x86_64/lib/Makefile.tau-icpc-papi-pdt
7 | export TAU_THROTTLE=0
8 |
9 | i=1
10 | for f in `env |grep ENABLE_ | grep TRUE`
11 | do
12 | FNAME=${f#ENABLE_}
13 | FNAME=${FNAME%%=*}
14 | export COUNTER${i}=$FNAME
15 | let i+=1
16 | done
17 |
18 |
19 |
20 |
21 | icpc_flag=$icpc_flag
22 | icpc_flag=$icpc_flag
23 | icpc_flag=$icpc_flag
24 | icpc_flag=$icpc_flag
25 | icpc_flag=$icpc_flag
26 |
27 |
28 |
29 | #CC=icc
30 | CC=tau_cxx.sh
31 | rm MULTI__P* -rf
32 | $CC $icpc_flag -c -vec-report2 $1 -o mid.o 2>/dev/null 1>/dev/null
33 | $CC $icpc_flag mid.o main.cpp -o myexe 2>/dev/null 1>/dev/null
34 |
35 | ./myexe 2>/dev/null 1>/dev/null
36 |
37 |
38 | Rscript $(cd "$(dirname "$0")"; pwd)/outputformat.R
39 |
40 | rm *.o
41 | rm myexe
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/analysis_module/tau/featureinfo.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | P_WALL_CLOCK_TIME Time cost of target application
4 | dynamic TRUE ENABLE_P_WALL_CLOCK_TIME numerical
5 |
6 | PAPI_REF_CYC Reference clock cycles
7 | dynamic TRUE ENABLE_PAPI_REF_CYC numerical
8 |
9 | PAPI_VEC_DP Double precision vector/SIMD instructions
10 | dynamic TRUE ENABLE_PAPI_VEC_DP numerical
11 |
12 | PAPI_VEC_SP Single precision vector/SIMD instructions dynamic TRUE ENABLE_PAPI_VEC_SP numerical PAPI_DP_OPS Floating point operations; optimized to count scaled double precision vector operations dynamic TRUE ENABLE_PAPI_DP_OPS numerical PAPI_SP_OPS Floating point operations; optimized to count scaled single precision vector operations dynamic TRUE ENABLE_PAPI_SP_OPS numerical PAPI_FP_OPS Floating point operations dynamic TRUE ENABLE_PAPI_FP_OPS numerical PAPI_FNV_INS Floating point inverse instructions dynamic FALSE ENABLE_PAPI_FNV_INS numerical PAPI_FSQ_INS Floating point square root instructions dynamic FALSE ENABLE_PAPI_FSQ_INS numerical PAPI_FDV_INS Floating point divide instructions dynamic TRUE ENABLE_PAPI_FDV_INS numerical PAPI_FAD_INS Floating point add instructions dynamic FALSE ENABLE_PAPI_FAD_INS numerical PAPI_FML_INS Floating point multiply instructions dynamic FALSE ENABLE_PAPI_FML_INS numerical PAPI_L3_TCW Level 3 total cache writes dynamic TRUE ENABLE_PAPI_L3_TCW numerical PAPI_L2_TCW Level 2 total cache writes dynamic TRUE ENABLE_PAPI_L2_TCW numerical PAPI_L1_TCW Level 1 total cache writes dynamic FALSE ENABLE_PAPI_L1_TCW numerical PAPI_L3_TCR Level 3 total cache reads dynamic TRUE ENABLE_PAPI_L3_TCR numerical PAPI_L2_TCR Level 2 total cache reads dynamic TRUE ENABLE_PAPI_L2_TCR numerical PAPI_L1_TCR Level 1 total cache reads dynamic FALSE ENABLE_PAPI_L1_TCR numerical PAPI_L3_TCA Level 3 total cache accesses dynamic TRUE ENABLE_PAPI_L3_TCA numerical PAPI_L2_TCA Level 2 total cache accesses dynamic TRUE ENABLE_PAPI_L2_TCA numerical PAPI_L1_TCA Level 1 total cache accesses dynamic FALSE ENABLE_PAPI_L1_TCA numerical PAPI_L3_TCH Level 3 total cache hits dynamic FALSE ENABLE_PAPI_L3_TCH numerical PAPI_L2_TCH Level 2 total cache hits dynamic FALSE ENABLE_PAPI_L2_TCH numerical PAPI_L1_TCH Level 1 total cache hits dynamic FALSE ENABLE_PAPI_L1_TCH numerical PAPI_L3_ICW Level 3 instruction cache writes dynamic FALSE ENABLE_PAPI_L3_ICW numerical PAPI_L2_ICW Level 2 instruction cache writes dynamic FALSE ENABLE_PAPI_L2_ICW numerical PAPI_L1_ICW Level 1 instruction cache writes dynamic FALSE ENABLE_PAPI_L1_ICW numerical PAPI_L3_ICR Level 3 instruction cache reads dynamic TRUE ENABLE_PAPI_L3_ICR numerical PAPI_L2_ICR Level 2 instruction cache reads dynamic TRUE ENABLE_PAPI_L2_ICR numerical PAPI_L1_ICR Level 1 instruction cache reads dynamic FALSE ENABLE_PAPI_L1_ICR numerical PAPI_L3_ICA Level 3 instruction cache accesses dynamic TRUE ENABLE_PAPI_L3_ICA numerical PAPI_L2_ICA Level 2 instruction cache accesses dynamic TRUE ENABLE_PAPI_L2_ICA numerical PAPI_L1_ICA Level 1 instruction cache accesses dynamic FALSE ENABLE_PAPI_L1_ICA numerical PAPI_L3_ICH Level 3 instruction cache hits dynamic FALSE ENABLE_PAPI_L3_ICH numerical PAPI_L2_ICH Level 2 instruction cache hits dynamic TRUE ENABLE_PAPI_L2_ICH numerical PAPI_L1_ICH Level 1 instruction cache hits dynamic FALSE ENABLE_PAPI_L1_ICH numerical PAPI_L3_DCW Level 3 data cache writes dynamic TRUE ENABLE_PAPI_L3_DCW numerical PAPI_L2_DCW Level 2 data cache writes dynamic TRUE ENABLE_PAPI_L2_DCW numerical PAPI_L1_DCW Level 1 data cache writes dynamic FALSE ENABLE_PAPI_L1_DCW numerical PAPI_L3_DCR Level 3 data cache reads dynamic TRUE ENABLE_PAPI_L3_DCR numerical PAPI_L2_DCR Level 2 data cache reads dynamic TRUE ENABLE_PAPI_L2_DCR numerical PAPI_L1_DCR Level 1 data cache reads dynamic FALSE ENABLE_PAPI_L1_DCR numerical PAPI_L3_DCA Level 3 data cache accesses dynamic TRUE ENABLE_PAPI_L3_DCA numerical PAPI_L2_DCA Level 2 data cache accesses dynamic TRUE ENABLE_PAPI_L2_DCA numerical PAPI_L1_DCA Level 1 data cache accesses dynamic FALSE ENABLE_PAPI_L1_DCA numerical PAPI_L2_DCH Level 2 data cache hits dynamic TRUE ENABLE_PAPI_L2_DCH numerical PAPI_L1_DCH Level 1 data cache hits dynamic FALSE ENABLE_PAPI_L1_DCH numerical PAPI_SYC_INS Synchronization instructions completed dynamic FALSE ENABLE_PAPI_SYC_INS numerical PAPI_LST_INS Load/store instructions completed dynamic FALSE ENABLE_PAPI_LST_INS numerical PAPI_TOT_CYC Total cycles dynamic TRUE ENABLE_PAPI_TOT_CYC numerical PAPI_FP_STAL Cycles the FP unit(s) are stalled dynamic FALSE ENABLE_PAPI_FP_STAL numerical PAPI_RES_STL Cycles stalled on any resource dynamic FALSE ENABLE_PAPI_RES_STL numerical PAPI_VEC_INS Vector/SIMD instructions (could include integer) dynamic FALSE ENABLE_PAPI_VEC_INS numerical PAPI_BR_INS Branch instructions dynamic TRUE ENABLE_PAPI_BR_INS numerical PAPI_SR_INS Store instructions dynamic TRUE ENABLE_PAPI_SR_INS numerical PAPI_LD_INS Load instructions dynamic TRUE ENABLE_PAPI_LD_INS numerical PAPI_FP_INS Floating point instructions dynamic TRUE ENABLE_PAPI_FP_INS numerical PAPI_INT_INS Integer instructions dynamic FALSE ENABLE_PAPI_INT_INS numerical PAPI_TOT_INS Instructions completed dynamic TRUE ENABLE_PAPI_TOT_INS numerical PAPI_TOT_IIS Instructions issued dynamic FALSE ENABLE_PAPI_TOT_IIS numerical PAPI_FMA_INS FMA instructions completed dynamic FALSE ENABLE_PAPI_FMA_INS numerical PAPI_BR_PRC Conditional branch instructions correctly predicted dynamic TRUE ENABLE_PAPI_BR_PRC numerical PAPI_BR_MSP Conditional branch instructions mispredicted dynamic TRUE ENABLE_PAPI_BR_MSP numerical PAPI_BR_NTK Conditional branch instructions not taken dynamic TRUE ENABLE_PAPI_BR_NTK numerical PAPI_BR_TKN Conditional branch instructions taken dynamic TRUE ENABLE_PAPI_BR_TKN numerical PAPI_BR_CN Conditional branch instructions dynamic TRUE ENABLE_PAPI_BR_CN numerical PAPI_BR_UCN Unconditional branch instructions dynamic TRUE ENABLE_PAPI_BR_UCN numerical PAPI_HW_INT Hardware interrupts dynamic FALSE ENABLE_PAPI_HW_INT numerical PAPI_FUL_CCY Cycles with maximum instructions completed dynamic FALSE ENABLE_PAPI_FUL_CCY numerical PAPI_STL_CCY Cycles with no instructions completed dynamic FALSE ENABLE_PAPI_STL_CCY numerical PAPI_FUL_ICY Cycles with maximum instruction issue dynamic FALSE ENABLE_PAPI_FUL_ICY numerical PAPI_STL_ICY Cycles with no instruction issue dynamic TRUE ENABLE_PAPI_STL_ICY numerical PAPI_MEM_WCY Cycles Stalled Waiting for memory writes dynamic FALSE ENABLE_PAPI_MEM_WCY numerical PAPI_MEM_RCY Cycles Stalled Waiting for memory Reads dynamic FALSE ENABLE_PAPI_MEM_RCY numerical PAPI_MEM_SCY Cycles Stalled Waiting for memory accesses dynamic FALSE ENABLE_PAPI_MEM_SCY numerical PAPI_CSR_TOT Total store conditional instructions dynamic FALSE ENABLE_PAPI_CSR_TOT numerical PAPI_CSR_SUC Successful store conditional instructions dynamic FALSE ENABLE_PAPI_CSR_SUC numerical PAPI_CSR_FAL Failed store conditional instructions dynamic FALSE ENABLE_PAPI_CSR_FAL numerical PAPI_TLB_SD Translation lookaside buffer shootdowns dynamic FALSE ENABLE_PAPI_TLB_SD numerical PAPI_L3_DCH Level 3 data cache hits dynamic FALSE ENABLE_PAPI_L3_DCH numerical PAPI_PRF_DM Data prefetch cache misses dynamic FALSE ENABLE_PAPI_PRF_DM numerical PAPI_BTAC_M Branch target address cache misses dynamic FALSE ENABLE_PAPI_BTAC_M numerical PAPI_L2_STM Level 2 store misses dynamic TRUE ENABLE_PAPI_L2_STM numerical PAPI_L2_LDM Level 2 load misses dynamic FALSE ENABLE_PAPI_L2_LDM numerical PAPI_L1_STM Level 1 store misses dynamic TRUE ENABLE_PAPI_L1_STM numerical PAPI_L1_LDM Level 1 load misses dynamic TRUE ENABLE_PAPI_L1_LDM numerical PAPI_TLB_TL Total translation lookaside buffer misses dynamic FALSE ENABLE_PAPI_TLB_TL numerical PAPI_TLB_IM Instruction translation lookaside buffer misses dynamic TRUE ENABLE_PAPI_TLB_IM numerical PAPI_TLB_DM Data translation lookaside buffer misses dynamic TRUE ENABLE_PAPI_TLB_DM numerical PAPI_LSU_IDL Cycles load/store units are idle dynamic FALSE ENABLE_PAPI_LSU_IDL numerical PAPI_FPU_IDL Cycles floating point units are idle dynamic FALSE ENABLE_PAPI_FPU_IDL numerical PAPI_FXU_IDL Cycles integer units are idle dynamic FALSE ENABLE_PAPI_FXU_IDL numerical PAPI_BRU_IDL Cycles branch units are idle dynamic FALSE ENABLE_PAPI_BRU_IDL numerical PAPI_L3_STM Level 3 store misses dynamic FALSE ENABLE_PAPI_L3_STM numerical PAPI_L3_LDM Level 3 load misses dynamic FALSE ENABLE_PAPI_L3_LDM numerical PAPI_CA_ITV Requests for cache line intervention dynamic FALSE ENABLE_PAPI_CA_ITV numerical PAPI_CA_INV Requests for cache line invalidation dynamic FALSE ENABLE_PAPI_CA_INV numerical PAPI_CA_CLN Requests for exclusive access to clean cache line dynamic FALSE ENABLE_PAPI_CA_CLN numerical PAPI_CA_SHR Requests for exclusive access to shared cache line dynamic FALSE ENABLE_PAPI_CA_SHR numerical PAPI_CA_SNP Requests for a snoop dynamic FALSE ENABLE_PAPI_CA_SNP numerical PAPI_L3_TCM Level 3 cache misses dynamic TRUE ENABLE_PAPI_L3_TCM numerical PAPI_L2_TCM Level 2 cache misses dynamic TRUE ENABLE_PAPI_L2_TCM numerical PAPI_L1_TCM Level 1 cache misses dynamic TRUE ENABLE_PAPI_L1_TCM numerical PAPI_L3_ICM Level 3 instruction cache misses dynamic FALSE ENABLE_PAPI_L3_ICM numerical PAPI_L3_DCM Level 3 data cache misses dynamic FALSE ENABLE_PAPI_L3_DCM numerical PAPI_L2_ICM Level 2 instruction cache misses dynamic TRUE ENABLE_PAPI_L2_ICM numerical PAPI_L2_DCM Level 2 data cache misses dynamic TRUE ENABLE_PAPI_L2_DCM numerical PAPI_L1_ICM Level 1 instruction cache misses dynamic TRUE ENABLE_PAPI_L1_ICM numerical PAPI_L1_DCM Level 1 data cache misses dynamic TRUE ENABLE_PAPI_L1_DCM numerical
13 |
--------------------------------------------------------------------------------
/analysis_module/tau/featureofPAPI.r:
--------------------------------------------------------------------------------
1 | library(XML)
2 |
3 | papi_str<-system("source ~/.bashrc; papi_avail",intern=TRUE)
4 | str(papi_str)
5 | bg<-FALSE
6 | for(oneline in papi_str)
7 | {
8 | tmp<-unlist(strsplit(oneline,"[ ][ ][ ]*"))
9 |
10 | if(length(tmp)>2 && tmp[2]=="Name")
11 | {
12 | featuredata<<-data.frame(name=character(),avail=logical(),description=character(),enable_variable=character(),stringsAsFactors=FALSE)
13 | bg<-TRUE
14 | }else if(bg){
15 | if(length(tmp)<2)
16 | break;
17 |
18 | part1<-unlist(strsplit(oneline,"0x"))
19 | namestr<-part1[1]
20 |
21 | part2<-unlist(strsplit(part1[2],"[ ][ ][ ]*"))
22 |
23 | Avail<-part2[2]
24 | if(Avail=="Yes")
25 | Avail<- TRUE
26 | else
27 | Avail<-FALSE
28 |
29 | featuredata<<-rbind(data.frame(name=namestr,description=part2[4],avail=Avail,enable_variable=sprintf("ENABLE_%s",namestr),stringsAsFactors=FALSE),featuredata)
30 | }
31 | }
32 |
33 | featuredata$type<-"dynamic"
34 | featuredata$datatype<-"numerical"
35 |
36 |
37 | doc = newXMLDoc()
38 | fsnode<-newXMLNode(name="features",doc=doc)
39 | for(i in 1:nrow(featuredata))
40 | {
41 | feature<-featuredata[i,]
42 | fnode<-newXMLNode(name = "feature",parent = fsnode)
43 | addChildren(fnode,
44 | newXMLNode(name="name",feature$name),
45 | newXMLNode(name="description",feature$description),
46 | newXMLNode(name="type",feature$type),
47 | newXMLNode(name="avail",feature$avail),
48 | newXMLNode(name="enable_variable",feature$enable_variable),
49 | newXMLNode(name="datatype",feature$datatype)
50 | )
51 |
52 | }
53 | saveXML(doc,file="./featureinfo.xml",prefix = " ")
54 |
--------------------------------------------------------------------------------
/analysis_module/tau/outputformat.R:
--------------------------------------------------------------------------------
1 | library("XML")
2 | analysername<-"tau"
3 | metrics.vec<- system("env |grep ENABLE",intern = TRUE)
4 |
5 | metrics.list<-list()
6 | for(m in metrics.vec)
7 | {
8 | m.name<-gsub("ENABLE_||=TRUE","",m)
9 | if(length(metrics.vec)>1)
10 | r<-system(paste0("tail MULTI__",m.name,"/profile.0.0.0 |grep main;"),intern = TRUE)
11 | else
12 | r<-system(paste0("tail profile.0.0.0 |grep main;"),intern = TRUE)
13 | r<-unlist(strsplit(r,'"'))[3]
14 | r<-unlist(strsplit(r,' '))[5]
15 | metrics.list[m.name]<-r
16 | }
17 |
18 |
19 |
20 | doc = newXMLDoc()
21 | fsnode<-newXMLNode(name="features",doc=doc)
22 | for(i in 1:length(metrics.list))
23 | {
24 | fnode<-newXMLNode(name = "feature",parent = fsnode)
25 | addChildren(fnode,
26 | newXMLNode(name="name",names(metrics.list[i])),
27 | newXMLNode(name="value",metrics.list[[i]])
28 | )
29 |
30 |
31 | }
32 | rfilename<-paste0(Sys.time(),analysername,"anaylsisresult.xml")
33 | rfilename<-sub(":","",rfilename)
34 | rfilename<-sub("-","",rfilename)
35 | rfilename<-sub(" ","",rfilename)
36 | output<-saveXML(doc,file=rfilename,prefix = sprintf(" \n",analysername))
37 | cat(output)
--------------------------------------------------------------------------------
/applications/multiplyexample.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #define DATATYPE float
5 | #define NX 3000
6 | #define NY 3000
7 | #define ITER 30
8 | DATATYPE a[NX][NY];
9 | DATATYPE b[NX][NY];
10 | DATATYPE c[NX][NY];
11 | int main()
12 | {
13 | int i,j,t;
14 |
15 | for(i=0;i
2 |
3 | time
4 | 1.7945
5 |
6 |
7 |
--------------------------------------------------------------------------------
/framework/DBModule/functions.R:
--------------------------------------------------------------------------------
1 | OpenDB <- function() {
2 | # Open a SQL conn if the global.conn has not been initialized,
3 | # and save the conn to global.conn, or just return global.conn
4 | #
5 | # Returns:
6 | # The conn opened or geted from the global
7 | if(!exists("global.conn"))
8 | global.conn <<- odbcConnect(datasource,database.user,database.pwd)
9 | conn <- global.conn
10 | return(conn)
11 | }
12 |
13 |
14 | CloseDB <- function() {
15 | # Close the connection
16 | #
17 | # Args:
18 | # conn: The conn opened by performanceDB.SQL.dbopen
19 | if(exists("global.conn")) {
20 | close(global.conn)
21 | rm(global.conn)
22 | }
23 | }
24 |
25 | CheckTableExistence<-function(dbname,tbname){
26 | # check if a table 'tbname' exist in database 'dbname'
27 | # is specificed by format
28 | #
29 | # Returns:
30 | # TRUE if exist, or FALSE if not exist
31 | cmd.str <- sprintf('show tables in %s like "%s";',
32 | dbname, tbname)
33 | conn <- OpenDB()
34 | result <- sqlQuery(conn,cmd.str)
35 | if(!is.data.frame(result))
36 | stop(paste0("error when execute sql command in CreateTable: ",result))
37 | if(nrow(result)==0)
38 | return (FALSE)
39 | else
40 | return (TRUE)
41 | }
42 |
43 | CreateTable<-function(format,dbname,tbname){
44 | # create a table 'dbname' in datable 'dbname'. The structure of table
45 | # is specificed by dataframe 'format'
46 | #
47 | # Returns:
48 | # TRUE if success, or FALSE if fail
49 |
50 | cmd.str <- sprintf('show tables in %s like "%s";',
51 | dbname, tbname)
52 | conn <- OpenDB()
53 | result <- sqlQuery(conn,cmd.str)
54 | if(!is.data.frame(result))
55 | stop(paste0("error when execute sql command in CreateTable: ",result))
56 | if(nrow(result)==1)
57 | {
58 | print("there has existed table %s in database %s ",tbname,dbname)
59 | return (FALSE)
60 | }
61 |
62 | FormatTable<-function(format)
63 | {
64 | fmt.str<-"id int(10) primary key not null auto_increment,"
65 | for(i in 1:nrow(format))
66 | {
67 | name<-format[i,]$name
68 | datatype<-format[i,]$datatype
69 | if(datatype=="numerical")
70 | fmt.str<-paste0(fmt.str,name," DOUBLE")
71 | if(datatype=="category")
72 | fmt.str<-paste0(fmt.str,name," VARCHAR(255)")
73 | if(datatype=="boolen")
74 | fmt.str<-paste0(fmt.str,name," TINYINT")
75 |
76 | if(i!=nrow(format))
77 | fmt.str<-paste0(fmt.str,",")
78 | }
79 | return (fmt.str)
80 | }
81 | cmd.str<-sprintf("create table %s.%s (%s);",dbname,tbname,FormatTable(format))
82 | result <- sqlQuery(conn,cmd.str)
83 | return(TRUE)
84 | }
85 |
86 | CheckAndUpdateMainTableCol<-function(subtable.names,dbname="hpts"){
87 | # check if the main table contains colmun that connects to subtable.
88 | # If not, alter the main table.
89 | # Args:
90 | # subtable.names: the names of subtable that will connecte to main table
91 | # dbname: the name of database
92 | #
93 |
94 | cmd.str<-sprintf("select COLUMN_NAME from information_schema.COLUMNS where table_name = 'main' and table_schema = '%s';",dbname)
95 | conn<-OpenDB()
96 | result<-sqlQuery(conn,cmd.str)
97 |
98 | maintable.names<-as.character(result[[1]])
99 |
100 | notin.names<-subtable.names[!(subtable.names %in% maintable.names)]
101 | if(length(notin.names)>0)
102 | for(i in 1:length(notin.names))
103 | {
104 | result<-sqlQuery(conn,sprintf("alter table %s.main add %s int(10);",dbname,notin.names[i]))
105 | }
106 | }
107 |
108 | CheckAndUpdateTableStructure<-function(data.names,dbname="hpts",tbname="main"){
109 | # check if the table 'tbname' contains column that named as data.names.
110 | # If not, alter the table.
111 | # Args:
112 | # data.names: the names of data that will be inserted to the table
113 | # dbname: the name of database
114 | # tbname: the name of table
115 |
116 | cmd.str<-sprintf("select COLUMN_NAME from information_schema.COLUMNS where table_name = '%s' and table_schema = '%s';",tbname,dbname)
117 | conn<-OpenDB()
118 | result<-sqlQuery(conn,cmd.str)
119 |
120 | table.names<-as.character(result[[1]])
121 |
122 | not.in.names<-data.names[!(data.names %in% table.names)]
123 | if(length(not.in.names)>0)
124 | for(i in 1:length(not.in.names))
125 | {
126 | result<-sqlQuery(conn,sprintf("alter table %s.%s add %s int(10);",dbname,tbname,not.in.names[i]))
127 | }
128 | }
129 |
130 | StoreAnalysis<-function(analysis.results,override=TRUE,
131 | analysis_module.path=path.generator_tools)
132 | {
133 | # store the analysis result to DB.
134 | # Args:
135 | # analysis.results: a list that contain analysis result of mutiple analyzers
136 | # analysis_module.path: the directory path of analyzers
137 | #
138 | # Returns:
139 | # the id in main table if success, or 0 if fail
140 | key.analyser.names<-c("appinfo","envinfo")
141 | for(ka in key.analyser.names)
142 | {
143 | if(length(analysis.results[[ka]])==0)
144 | stop(sprintf("%s module can not be NULL!",ka))
145 | }
146 | analyser.names<-names(analysis.results)
147 | nonkey.analyser.names<-analyser.names[!analyser.names %in% key.analyser.names]
148 |
149 | keytable.id<-data.frame(name=character(),value=integer(),stringsAsFactors = FALSE)
150 | keytable.id.format<-data.frame(name=character(),datatype=character(),stringsAsFactors = FALSE)
151 | nonkeytable.id<-data.frame(name=character(),value=integer(),stringsAsFactors = FALSE)
152 | nonkeytable.id.format<-data.frame(name=character(),datatype=character(),stringsAsFactors = FALSE)
153 |
154 | # key
155 | for(analyser in key.analyser.names)
156 | {
157 | result<-analysis.results[[analyser]]
158 | format<-SerializeXmlDoc(paste0(analysis_module.path,analyser,"/featureinfo.xml"),"datatype")
159 | if(CheckTableExistence("hpts",analyser)==FALSE)
160 | CreateTable(format,"hpts",analyser)
161 |
162 | if(override)
163 | {
164 | df<-SelectFromDB(result,format,"hpts",analyser)
165 | if(nrow(df)>0)
166 | sub.id<-df[1,]$id
167 | else
168 | sub.id<-InsertToDB(result,format,"hpts",analyser)
169 | }
170 | else
171 | sub.id<-InsertToDB(result,format,"hpts",analyser)
172 | subtable.id<-data.frame(name=analyser,value=sub.id,stringsAsFactors = FALSE)
173 | subtable.id.format<-data.frame(name=analyser,datatype="numerical",stringsAsFactors = FALSE)
174 | keytable.id<-rbind(keytable.id,subtable.id)
175 | keytable.id.format<-rbind(keytable.id.format,subtable.id.format)
176 | }
177 |
178 |
179 | # non key
180 | for(analyser in nonkey.analyser.names)
181 | {
182 | result<-analysis.results[[analyser]]
183 | format<-SerializeXmlDoc(paste0(analysis_module.path,analyser,"/featureinfo.xml"),"datatype")
184 | if(CheckTableExistence("hpts",analyser)==FALSE)
185 | CreateTable(format,"hpts",analyser)
186 |
187 | sub.id<-InsertToDB(result,format,"hpts",analyser)
188 | subtable.id<-data.frame(name=analyser,value=sub.id,stringsAsFactors = FALSE)
189 | subtable.id.format<-data.frame(name=analyser,datatype="numerical",stringsAsFactors = FALSE)
190 | nonkeytable.id<-rbind(nonkeytable.id,subtable.id)
191 | nonkeytable.id.format<-rbind(nonkeytable.id.format,subtable.id.format)
192 | }
193 |
194 |
195 |
196 | if(CheckTableExistence("hpts","main")==FALSE)
197 | {
198 | print("main table does not exist, please create a main table!")
199 | return(0)
200 | }else{
201 | subtable<-rbind(keytable.id,nonkeytable.id)
202 | subtable.format<-rbind(keytable.id.format,nonkeytable.id.format)
203 | #check if the table structure is same to subtable.format. if not, alter table structure in database
204 | CheckAndUpdateMainTableCol(subtable.format$name,"hpts")
205 |
206 |
207 | if(override)
208 | {
209 | target.row<-SelectFromDB(keytable.id,keytable.id.format,"hpts","main")
210 | if(nrow(target.row)>0)
211 | UpdateForDB(keytable.id,keytable.id.format,nonkeytable.id,nonkeytable.id.format,"hpts","main")
212 | else
213 | InsertToDB(subtable,subtable.format,"hpts","main")
214 |
215 | newdata<-SelectFromDB(keytable.id,keytable.id.format,"hpts","main")
216 | mid<-newdata[1,]$id
217 | }
218 | else
219 | {
220 |
221 | mid<-InsertToDB(subtable,subtable.format,"hpts","main")
222 | }
223 | return (mid)
224 | }
225 | }
226 |
227 |
228 | StoreTransformation<-function(main.id, generator.results,analysis.results,override=TRUE,
229 | generator_module.path=path.generator_tools,
230 | analysis_module.path=path.analysis_tools)
231 | {
232 | # store the main table id, generator parameters, analysis result to DB.
233 | # Args:
234 | # generator.results: a list that contain data of a generator. The size of list is 1
235 | # generator_module.path: the directory path of generators
236 | # override: if override record that have same generator parameter and main.id
237 | # analysis.results: a list that contain analysis result of mutiple analyzers
238 | # analysis_module.path: the directory path of analyzers
239 | #
240 | # Returns:
241 | # the id in main table if success, or 0 if fail
242 |
243 | #check if the generator.result size =1
244 | if(length(generator.results)!=1)
245 | stop(sprintf("Error length of generator.results in StoreResultForGeneratorToDB,should be 1 but actual be %d",length(generator.results)))
246 |
247 |
248 | # format analysis.results to subtable
249 | subtable<-data.frame(name=character(),value=character(),stringsAsFactors = FALSE)
250 | subtable.format<-data.frame(name=character(),value=character(),stringsAsFactors = FALSE)
251 | for(i in 1:length(analysis.results))
252 | {
253 | analyser<-names(analysis.results[i])
254 | result<-analysis.results[[i]]
255 | format<-SerializeXmlDoc(paste0(analysis_module.path,analyser,"/featureinfo.xml"),"datatype")
256 |
257 | if(CheckTableExistence("hpts",analyser)==FALSE)
258 | CreateTable(format,"hpts",analyser)
259 |
260 | sub.id<-InsertToDB(result,format,"hpts",analyser)
261 |
262 | subtable<-rbind(subtable,data.frame(name=analyser,value=as.character(sub.id),stringsAsFactors = FALSE))
263 | subtable.format<-rbind(subtable.format,data.frame(name=analyser,datatype="numerical",stringsAsFactors = FALSE))
264 | }
265 |
266 | #combine generator.parameter and subtable
267 | generator.name<-names(generator.results[1])
268 | generator.parameters<-generator.results[[1]]
269 | generator.format<-SerializeXmlDoc(paste0(generator_module.path,generator.name,"/variantinfo.xml"),"datatype")
270 |
271 | generator.parameters<-rbind(generator.parameters,data.frame(name="instanceId",value=as.character(main.id),stringsAsFactors = FALSE))
272 | generator.format<-rbind(generator.format,data.frame(name="instanceId",datatype="numerical",stringsAsFactors = FALSE))
273 |
274 | cond.parameters<-generator.parameters
275 | cond.format<-generator.format
276 |
277 | generator.parameters<-rbind(generator.parameters,subtable)
278 | generator.format<-rbind(generator.format,subtable.format)
279 |
280 | if(CheckTableExistence("hpts",generator.name)==FALSE)
281 | CreateTable(generator.format,"hpts",generator.name)
282 | else
283 | CheckAndUpdateTableStructure(generator.format$name,"hpts",generator.name)
284 |
285 | if(override)
286 | {
287 | target.row<-SelectFromDB(cond.parameters,cond.format,"hpts",generator.name)
288 | if(nrow(target.row)>0)
289 | {
290 | UpdateForDB(cond.parameters,cond.format,subtable,subtable.format,"hpts",generator.name)
291 | newdata<-SelectFromDB(generator.parameters,generator.format,"hpts",generator.name)
292 | generator.table.id<-newdata[1,]$id
293 | }
294 | else
295 | generator.table.id<-InsertToDB(generator.parameters,generator.format,"hpts",generator.name)
296 | }
297 | else
298 | generator.table.id<-InsertToDB(generator.parameters,generator.format,"hpts",generator.name)
299 |
300 | return(generator.table.id)
301 | }
302 |
303 | parseCombinedData<-function(name,data,datastr){
304 | name<-sub(" ","",name)
305 | r<-eval(parse(text=paste0("data.frame(name=character(),",datastr,"=character(),stringsAsFactors = FALSE)")))
306 | if(length(data)==0)
307 | stop("data is NULL in function parseCombinedData")
308 | if(is.list(data))
309 | {
310 | for(i in 1:length(data))
311 | {
312 | subname<-paste0(name,"_",i)
313 | r<-rbind(r,parseCombinedData(subname,data[i][[datastr]],datastr))
314 | }
315 | }else{
316 | #filter blank for features defination in xml
317 | name<-gsub(" ","",name)
318 | data<-gsub(" ","",data)
319 | r<-eval(parse(text=paste0("data.frame(name=name,",datastr,"=data,stringsAsFactors = FALSE)")))
320 | }
321 | return (r)
322 | }
323 |
324 |
325 | # serialize xml feature/variant file to a dataframe
326 | SerializeXmlDoc<-function(doc.xml,datastr)
327 | {
328 | doc.list<-xmlToList(doc.xml)
329 | doc.seril<-eval(parse(text=paste0("data.frame(name=character(),",datastr,"=character())")))
330 | for(i in 1:length(doc.list))
331 | {
332 | f<-doc.list[i]
333 | if(names(f)!="feature"&&names(f)!="variant")
334 | stop(sprintf("error format in xml with %s!\n",datastr))
335 |
336 | doc.seril<-eval(parse(text=paste0('rbind(doc.seril,parseCombinedData(f$',names(f),'$name,f$',names(f),'$',
337 | datastr,',"',datastr,'"))')))
338 | }
339 | return(doc.seril)
340 | }
341 |
342 |
343 |
344 | GetEnableList<-function(doc.xml,Nameaskey=TRUE)
345 | {
346 | doc.list<-xmlToList(doc.xml)
347 | doc.seril<-data.frame(name=character(),datatype=character())
348 | for(i in 1:length(doc.list))
349 | {
350 | f<-doc.list[i]
351 | if(names(f)!="feature"&&names(f)!="variant")
352 | stop("error format in xml with datatype!\n")
353 |
354 | tmp<-eval(parse(text=paste0('parseCombinedData(f$',names(f),
355 | '$name,f$',names(f),'$datatype,"datatype")')))
356 | tmp$enable_variable<-eval(parse(text=paste0('f$',names(f),'$enable_variable')))
357 | tmp$oldname<-gsub(" ","",eval(parse(text=paste0('f$',names(f),'$name'))))
358 | doc.seril<-rbind(doc.seril,tmp)
359 | }
360 | enable.list<-list()
361 | for(i in 1:nrow(doc.seril))
362 | {
363 | tmp<-doc.seril[i,]
364 | enable_str<-sub(tmp$oldname,tmp$name,tmp$enable_variable)
365 | if(Nameaskey)
366 | enable.list[[tmp$name]]<-enable_str
367 | else
368 | enable.list[[enable_str]]<-tmp$name
369 | }
370 | return(enable.list)
371 | }
372 |
373 | GetDatatypeList<-function(doc.xml)
374 | {
375 | doc.seril<-SerializeXmlDoc(doc.xml,"datatype")
376 | datatype.list<-list()
377 | for(i in 1:nrow(doc.seril))
378 | {
379 | tmp<-doc.seril[i,]
380 | datatype.list[[tmp$name]]<-tmp$datatype
381 | }
382 | return(datatype.list)
383 | }
384 |
385 |
386 |
387 | # unserialize a dataframe to a xml format file, which need a xmlformat file
388 | UnSerializeXmlDoc<-function(doc.seril,xmlformat)
389 | {
390 | # subformat
391 | FillXMLNode<-function(subformat,data,name)
392 | {
393 | if(is.list(subformat))
394 | {
395 | fnode<-newXMLNode(name = "value",parent = fsnode);
396 | for(i in 1:length(str))
397 | {
398 | subname<-paste0(name,"_",i);
399 | addChildren(fnode,
400 | FillXMLNode(subformat[i],data,name)
401 | );
402 | }
403 | }else{
404 | value<-data[which(data$name==name),];
405 | if(nrow(value)==0)
406 | return (NA);
407 | if(nrow(value)==1)
408 | {
409 | return (newXMLNode(name="value",value));
410 | }else{
411 | stop("stop in Fill FillXMLNode, because there are two atrribute have same name");
412 | }
413 | }
414 | }
415 | doc.xml = newXMLDoc();
416 | fsnode<-newXMLNode(name="features",doc=doc.xml);
417 | xmlformat.list<-xmlToList(xmlformat);
418 | for(i in 1:nrow(xmlformat.list))
419 | {
420 | feature<-xmlformat.list[i,]$feature;
421 | subformat<-feature$value;
422 |
423 | xmlnode<-FillXMLNode(subformat,doc.seril,feature$name);
424 | if(is.na(xmlnode))
425 | {
426 | next;
427 | } else{
428 | fnode<-newXMLNode(name = "feature",parent = fsnode);
429 | addChildren(fnode,
430 | newXMLNode(name="name",feature$name),
431 | xmlnode
432 | );
433 | }
434 |
435 | }
436 | return (doc.xml);
437 | }
438 |
439 |
440 | InsertToDB<-function(data,format,
441 | dbname = "hpts", tbname = "ttable"){
442 | # Perform a insert operation using 'data' in 'format' to table 'tbname' of database 'dbname'
443 | #
444 | # Args:
445 | # data: The data need to insert to database, need be a dataframe
446 | # format: The format that specifics data structure and type
447 | # dbname: The database name
448 | # tbname: The table name
449 | #
450 | # Returns:
451 | # The last insert it
452 | stopifnot(is.data.frame(data)==TRUE)
453 | stopifnot(is.vector(data$name)==TRUE)
454 | stopifnot(is.vector(data$value)==TRUE)
455 |
456 | formatvalue<-function(data,format)
457 | {
458 | values_str<-""
459 | for(i in 1:nrow(data))
460 | {
461 | fname<-data[i,]$name
462 | value<-data[i,]$value
463 | if(i!=1)
464 | values_str<-paste0(values_str,",")
465 | for(j in 1:nrow(format))
466 | {
467 | if(fname==format[j,]$name)
468 | {
469 | datatype<-format[j,]$datatype
470 | if(datatype=="numerical")
471 | values_str<-paste0(values_str,value)
472 | if(datatype=="category")
473 | values_str<-paste0(values_str,'"',value,'"')
474 | if(datatype=="boolen")
475 | values_str<-paste0(values_str,value)
476 | break
477 | }
478 | }
479 | }
480 | return (values_str)
481 | }
482 | s1 <- paste(data$name, collapse = ",")
483 |
484 | s2 <- formatvalue(data,format)
485 |
486 | cmd.str <- sprintf('insert into %s.%s(%s) values(%s);', dbname, tbname, s1, s2)
487 | conn <- OpenDB()
488 | result<-sqlQuery(conn, cmd.str)
489 |
490 | if(length(result)!=0)
491 | stop(result)
492 |
493 | tableid <- sqlQuery(conn, "select last_insert_id()")
494 | return(tableid[[1]])
495 | }
496 |
497 | SelectFromDB<-function(condition,format,
498 | dbname = "hpts", tbname = "ttable"){
499 | # Perform a select operation using 'condition' in 'format' to table 'tbname' of database 'dbname'
500 | #
501 | # Args:
502 | # condition: The condition of select, need be a dataframe
503 | # format: The format that specifics condition structure and type
504 | # dbname: The database name
505 | # tbname: The table name
506 | #
507 | # Returns:
508 | # The last insert it
509 | stopifnot(is.data.frame(condition)==TRUE)
510 | stopifnot(is.vector(condition$name)==TRUE)
511 | stopifnot(is.vector(condition$value)==TRUE)
512 |
513 | FormatCondition<-function(condition,format)
514 | {
515 | condition.str<-""
516 | for(i in 1:nrow(condition))
517 | {
518 | fname<-condition[i,]$name
519 | value<-condition[i,]$value
520 | for(j in 1:nrow(format))
521 | {
522 | if(fname==format[j,]$name)
523 | {
524 | datatype<-format[j,]$datatype
525 | if(datatype=="numerical")
526 | condition.str<-sprintf(" %s and %s=%s",condition.str,fname,value)
527 | if(datatype=="category")
528 | condition.str<-sprintf(" %s and %s='%s'",condition.str,fname,value)
529 | if(datatype=="boolen")
530 | condition.str<-sprintf(" %s and %s=%s",condition.str,fname,value)
531 | break
532 | }
533 | }
534 | }
535 | return (condition.str)
536 | }
537 |
538 | condition.str <- FormatCondition(condition,format)
539 | cmd.str <- sprintf('select * from %s.%s where TRUE %s ;', dbname, tbname, condition.str)
540 | conn <- OpenDB()
541 | result<-sqlQuery(conn, cmd.str)
542 |
543 | return (result)
544 | }
545 |
546 |
547 |
548 | RemoveFromDB<-function(condition,format,
549 | dbname = "hpts", tbname = "ttable"){
550 | # Perform a rm operation using 'condition' in 'format' to table 'tbname' of database 'dbname'
551 | #
552 | # Args:
553 | # condition: The condition of select, need be a dataframe
554 | # format: The format that specifics condition structure and type
555 | # dbname: The database name
556 | # tbname: The table name
557 | #
558 | # Returns:
559 | #
560 | stopifnot(is.data.frame(condition)==TRUE)
561 | stopifnot(is.vector(condition$name)==TRUE)
562 | stopifnot(is.vector(condition$value)==TRUE)
563 |
564 | FormatCondition<-function(condition,format)
565 | {
566 | condition.str<-""
567 | for(i in 1:nrow(condition))
568 | {
569 | fname<-condition[i,]$name
570 | value<-condition[i,]$value
571 | for(j in 1:nrow(format))
572 | {
573 | if(fname==format[j,]$name)
574 | {
575 | datatype<-format[j,]$datatype
576 | if(datatype=="numerical")
577 | condition.str<-sprintf(" %s and %s=%s",condition.str,fname,value)
578 | if(datatype=="category")
579 | condition.str<-sprintf(" %s and %s='%s'",condition.str,fname,value)
580 | if(datatype=="boolen")
581 | condition.str<-sprintf(" %s and %s=%s",condition.str,fname,value)
582 | break
583 | }
584 | }
585 | }
586 | return (condition.str)
587 | }
588 |
589 | condition.str <- FormatCondition(condition,format)
590 | if(condition.str=="")
591 | stop("error ! you will clean up the table")
592 | cmd.str <- sprintf('rm from %s.%s where TRUE %s ;', dbname, tbname, condition.str)
593 | print(cmd.str)
594 | conn <- OpenDB()
595 | result<-sqlQuery(conn, cmd.str)
596 |
597 | return (result)
598 | }
599 |
600 |
601 | UpdateForDB<-function(condition,condition.format,
602 | newdata,newdata.format,
603 | dbname = "hpts", tbname = "ttable"){
604 | # Perform a rm operation using 'condition' in 'format' to table 'tbname' of database 'dbname'
605 | #
606 | # Args:
607 | # condition: The condition of select, need be a dataframe
608 | # format: The format that specifics condition structure and type
609 | # dbname: The database name
610 | # tbname: The table name
611 | #
612 | # Returns:
613 | # 0: there is no need to updata
614 | #
615 | stopifnot(is.data.frame(condition)==TRUE)
616 | stopifnot(is.vector(condition$name)==TRUE)
617 | stopifnot(is.vector(condition$value)==TRUE)
618 |
619 | stopifnot(is.data.frame(newdata)==TRUE)
620 | stopifnot(is.vector(newdata$name)==TRUE)
621 | stopifnot(is.vector(newdata$value)==TRUE)
622 |
623 | if(nrow(newdata)==0)
624 | return(0)
625 |
626 | Format<-function(data,format,linker=",")
627 | {
628 | data.str<-""
629 | for(i in 1:nrow(data))
630 | {
631 |
632 | fname<-data[i,]$name
633 | value<-data[i,]$value
634 | for(j in 1:nrow(format))
635 | {
636 | if(fname==format[j,]$name)
637 | {
638 | if(i>1)
639 | data.str<-paste(data.str,linker)
640 |
641 | datatype<-format[j,]$datatype
642 | if(datatype=="numerical")
643 | data.str<-sprintf(" %s %s=%s",data.str,fname,value)
644 | if(datatype=="category")
645 | data.str<-sprintf(" %s %s='%s'",data.str,fname,value)
646 | if(datatype=="boolen")
647 | data.str<-sprintf(" %s %s=%s",data.str,fname,value)
648 | break
649 | }
650 | }
651 | }
652 | return (data.str)
653 | }
654 |
655 | update.str<- Format(newdata,newdata.format)
656 | condition.str <- Format(condition,condition.format,"and")
657 |
658 | if(condition.str=="")
659 | stop("error ! you will update the whole table")
660 | cmd.str <- sprintf('update %s.%s set %s where TRUE and %s ;', dbname, tbname, update.str,condition.str)
661 |
662 | conn <- OpenDB()
663 | result<-sqlQuery(conn, cmd.str)
664 | return (result)
665 | }
666 |
--------------------------------------------------------------------------------
/framework/EvaluatorModule/Evaluator.R:
--------------------------------------------------------------------------------
1 | PAK.Evaluator<-setRefClass(
2 | "PAK.Evaluator",
3 | fields = list(sub.evaluators = "list",
4 | analysers.results.store ="list",
5 | analysers.results.evaluate="list"),
6 | methods = list(
7 | #init function
8 | initialize=function(sub.evaluators=list()){
9 | sub.evaluators<<-sub.evaluators
10 | },
11 | # the max score is 0
12 | getScore=function(analysers.results){
13 | score<-0
14 | for(analyser in names(sub.evaluators))
15 | {
16 | analyser.evaluator<-sub.evaluators[[analyser]]
17 | analyser.results<-analysers.results[[analyser]]
18 |
19 | #for each measurement
20 | for(m.name in names(analyser.results))
21 | {
22 | m.result<-analyser.results[[m.name]]
23 | score<-score+analyser.evaluator[[m.name]](m.result)
24 | }
25 | }
26 | return (score)
27 | },
28 | evaluate=function(app){
29 | for(aly in names(sub.evaluators))
30 | {
31 | one.analyser<-PAK.Analyser$new(name=aly,features=names(sub.evaluators[[aly]]),app=app)
32 | one.analyser$anaylze()
33 | analysers.results.store[[aly]]<<-one.analyser$getResultForDB()
34 | analysers.results.evaluate[[aly]]<<-one.analyser$getResult()
35 | }
36 | score<-getScore(analysers.results.evaluate)
37 | return (score)
38 | },
39 | getResults2store=function(){
40 | return (analysers.results.store)
41 | }
42 | )
43 | )
--------------------------------------------------------------------------------
/framework/ExtractorModule/Analyze.old:
--------------------------------------------------------------------------------
1 | library(XML)
2 | InstanceAnaylze<-function(analyser,analyser.features,app,
3 | analysis_module.path="/home/lyl/program/hpts/web/script/ExtractorModule/anaylsis_module/"){
4 | # perform analysis on target running instance
5 | #
6 | # Args:
7 | # analyzer:a string give generator name
8 | # analyser.feature: a string vector that contain enable variable for feauture to analysis
9 | # app:a string give target program name(with path)
10 |
11 | envstr<-""
12 | for(i in 1:length(analyser.features))
13 | {
14 | envvar<-gsub(" ","",analyser.features[i])
15 | envstr<-paste(envstr,sprintf("export %s=TRUE;",envvar))
16 | }
17 |
18 | app.name<-basename(app)
19 | app.path<-dirname(app)
20 |
21 | #go applications directions
22 | envstr<-paste0(envstr,'cd ',app.path,';')
23 | envstr<-paste0(envstr,'sh ',analysis_module.path,analyser,"/analysis.sh ",app.name)
24 |
25 | r<-system(envstr,intern = TRUE)
26 | resultfile<-paste0(app.path,"/",r)
27 | anaylsis.result<-xmlToDataFrame(paste0(app.path,"/",r),stringsAsFactors=FALSE)
28 |
29 | return(anaylsis.result)
30 | }
31 |
32 | #anaylsis.result<-InstanceAnaylze("tau",c("ENABLE_P_WALL_CLOCK_TIME"),"/home/lyl/program/hpts/applications/optimized_1.cpp")
--------------------------------------------------------------------------------
/framework/ExtractorModule/Extractor.R:
--------------------------------------------------------------------------------
1 | PAK.Extractor<-setRefClass(
2 | "PAK.Extractor",
3 | fields = list(analysers = "list",
4 | analysers.results.store="list",
5 | analysers.results.produce="list"
6 | ),
7 | methods = list(
8 | #init function
9 | initialize=function(analysers=list()){
10 | analysers<<-analysers
11 | },
12 |
13 | # the max score is 0
14 | extractFeatures=function(app){
15 | for(aly in names(analysers))
16 | {
17 | one.analyser<-PAK.Analyser$new(name=aly,features=analysers[[aly]],app=app)
18 | one.analyser$anaylze()
19 | analysers.results.store[[aly]]<<-one.analyser$getResultForDB()
20 | analysers.results.produce[[aly]]<<-one.analyser$getResult()
21 | }
22 | return (analysers.results.produce)
23 | },
24 | getFeatures2produce=function(){
25 | return (analysers.results.produce)
26 | },
27 | getFeatures2store=function(){
28 | return (analysers.results.store)
29 | }
30 | )
31 | )
--------------------------------------------------------------------------------
/framework/Interface/Analyser.R:
--------------------------------------------------------------------------------
1 | PAK.Analyser<-setRefClass(
2 | "PAK.Analyser",
3 | fields = list(app = "character",
4 | name = "character",
5 | features = "character",
6 | result = "data.frame",
7 | path = "character",
8 | enable.list ="list",
9 | datatype.list="list"),
10 | methods = list(
11 | #init function
12 | initialize=function(name="",app="",features=character()){
13 | name<<-name
14 | app<<-app
15 | features<<-features
16 | path<<-path.analysis_tools
17 | enable.list<<-GetEnableList(paste0(path,"/",name,"/featureinfo.xml"),Nameaskey = TRUE)
18 | datatype.list<<-GetDatatypeList(paste0(path,"/",name,"/featureinfo.xml"))
19 | },
20 | enableAllfeatures=function(){
21 | features<<-names(enable.list)
22 | },
23 | anaylze=function(){
24 | # perform analysis on target running instance
25 | #
26 | # Args:
27 | # analyzer:a string give generator name
28 | # analyser.feature: a string vector that contain enable variable for feauture to analysis
29 | # app:a string give target program name(with path)
30 | envstr<-""
31 | if(length(features)==0)
32 | stop(sprintf("error! features of analyser %s is NULL!",name))
33 |
34 | for(i in 1:length(features))
35 | {
36 | envvar<-gsub(" ","",enable.list[[features[i]]])
37 | envstr<-paste(envstr,sprintf("export %s=TRUE;",envvar))
38 | }
39 | app.name<-basename(app)
40 | app.path<-dirname(app)
41 | #go applications directions
42 | envstr<-paste0(envstr,'cd ',app.path,';')
43 | envstr<-paste0(envstr,'sh ',path,name,"/analysis.sh ","./",app.name)
44 | r<-system(envstr,intern = TRUE)
45 | resultfile<-paste0(app.path,"/",r)
46 | result<<-xmlToDataFrame(paste0(app.path,"/",r),stringsAsFactors=FALSE)
47 | },
48 | getResultForDB=function()
49 | {
50 | return (result)
51 | },
52 | getResult=function()
53 | {
54 | result.evaluate<-list()
55 | for(i in 1:nrow(result))
56 | {
57 | tmp<-result[i,]
58 |
59 | if(datatype.list[[tmp$name]]!="category")
60 | result.evaluate[[tmp$name]]<-as.numeric(tmp$value)
61 | else
62 | result.evaluate[[tmp$name]]<-tmp$value
63 | }
64 | return (result.evaluate)
65 | }
66 | )
67 | )
--------------------------------------------------------------------------------
/framework/Interface/Generator.R:
--------------------------------------------------------------------------------
1 | library("methods")
2 | PAK.Generator<-setRefClass(
3 | "PAK.Generator",
4 | fields = list(app = "character",
5 | name = "character",
6 | parameters = "data.frame",
7 | output = "character",
8 | result = "character",
9 | path="character"),
10 | methods = list(
11 | #init function
12 | initialize=function(name="",app="",output="",p.os=NA){
13 | name<<-name
14 | app<<-app
15 | output<<-output
16 | path<<-path.generator_tools
17 |
18 | if(is.data.frame(p.os))
19 | setPUsingSpace(p.os)
20 | },
21 | transform=function(){
22 | # perform code transforming on target program
23 | envstr<-""
24 | for(i in 1:nrow(parameters))
25 | {
26 | envvar.name<-gsub(" ","",parameters[i,]$enable_variable)
27 | envvar.value<-gsub(" ","",parameters[i,]$parameter)
28 | envstr<-paste(
29 | envstr,sprintf("export %s='%s';",
30 | envvar.name,
31 | envvar.value))
32 | }
33 | #go applications directions
34 | app.name<-basename(app)
35 | app.path<-dirname(app)
36 | envstr<-paste0(envstr,'cd ',app.path,';')
37 | envstr<-paste0(envstr,'sh ',path,name,
38 | "/transform.sh ",app.name," ",output)
39 | variant.file<-system(envstr,intern = TRUE)
40 | result<<-paste0(app.path,"/",variant.file)
41 | },
42 | setPUsingSpace=function(p.in.space){
43 | enable.list<-GetEnableList(paste0(path,"/",name,"/variantinfo.xml"))
44 | parameters<<-data.frame(enable_variable=character(),
45 | parameter=character())
46 | for(p in names(p.in.space) )
47 | parameters<<-
48 | rbind(parameters,data.frame(enable_variable=enable.list[[p]],
49 | parameter=p.in.space[[p]]))
50 | },
51 | getParameterForDB=function(){
52 | enable.list<-GetEnableList(paste0(path,"/",name,"/variantinfo.xml"),FALSE)
53 | df<-as.data.frame(do.call(rbind,mapply(SIMPLIFY = FALSE,function(e,p)
54 | {data.frame(name=enable.list[[e]],value=p,stringsAsFactors = FALSE)},
55 | parameters$enable_variable,parameters$parameter)))
56 | return(df)
57 | }
58 | )
59 | )
--------------------------------------------------------------------------------
/framework/LearnerModule/Learner.R:
--------------------------------------------------------------------------------
1 | # the Learner module
2 | PAK.Learner<-setRefClass(
3 | "PAK.Learner",
4 | fields = list(model="list",dv.name="character",
5 | idv.name="character"),
6 | methods = list(
7 | #init function
8 | initialize=function(){
9 | },
10 | learnModel=function(training.data,idv,dv){
11 |
12 | }
13 | )
14 | )
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/framework/OptimizerModule/Optimizer.R:
--------------------------------------------------------------------------------
1 |
2 | PAK.Optimizer<-setRefClass(
3 | "PAK.Optimizer",
4 | fields = list(generator.name ="character",
5 | generator= "PAK.Generator",
6 | output.name="character"),
7 | methods = list(
8 | #init function
9 | initialize=function(generator.name=NA,output.name="optimized.cpp"){
10 |
11 | output.name<<-output.name
12 | if(is.character(generator.name))
13 | {
14 | generator.name<<-generator.name
15 | generator<<-PAK.Generator$new(name=generator.name,output=output.name)
16 | }
17 | },
18 | optimize=function(app,parameters){
19 | generator$setPUsingSpace(parameters)
20 | generator$app<<-app
21 | r<-generator$transform()
22 | return(r)
23 | },
24 | getParameters2store=function(){
25 | parameters.list<-list()
26 | print(generator$getParameterForDB())
27 | print(generator.name)
28 | parameters.list[[generator.name]]<-generator$getParameterForDB()
29 | return(parameters.list)
30 | }
31 | )
32 | )
--------------------------------------------------------------------------------
/framework/ProducerModule/.RData:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCIC-PARALLEL/Graphine-SDK/89a5a562228d263ca1e16db3c3764435c57a6898/framework/ProducerModule/.RData
--------------------------------------------------------------------------------
/framework/ProducerModule/Producer.R:
--------------------------------------------------------------------------------
1 | # a class to producer parameter
2 | PAK.Producer<-setRefClass(
3 | "PAK.Producer",
4 | fields = list(),
5 | methods = list(
6 | # init function
7 | initialize=function(){
8 | },
9 | getParameter=function(step,extractor.result,last.score)
10 | {
11 |
12 | }
13 | )
14 | )
15 |
16 |
--------------------------------------------------------------------------------
/framework/Tuning/Tuner.R:
--------------------------------------------------------------------------------
1 | library("methods")
2 | PAK.Tuner<-setRefClass(
3 | "PAK.Tuner",
4 | fields = list(app = "character",
5 | extractor ="PAK.Extractor",
6 | evaluator = "PAK.Evaluator",# list(analyser=list(name=eval.fun)): name is measurement name, and eval.fun is its evaluate function
7 | producer ="PAK.Producer",#parameter producer
8 | optimizer ="PAK.Optimizer",#optimizer
9 | need.store ="logical",
10 | best.score="numeric",
11 | best.parameters="data.frame",
12 | best.results="list"),
13 | methods = list(
14 | #init function
15 | initialize=function(app,extractor=PAK.Extractor$new(),evaluator,producer,optimizer,need.store=FALSE){
16 | app<<-app
17 | extractor<<-extractor
18 | evaluator<<-evaluator
19 | producer<<-producer
20 | optimizer<<-optimizer
21 | need.store<<-need.store
22 | },
23 | checkpoint=function(step,score,r.producer,r.best.score,r.best.parameters,r.best.result){
24 | if(length(dir(pattern="stoptuning"))!=0)
25 | {
26 | print("stop tuning!")
27 | save(step,score,r.producer,r.best.score,r.best.parameters,r.best.result,file="tuningRecord")
28 | return (TRUE)
29 | }
30 | return (FALSE)
31 | },
32 | tune=function(resume.file=NA){
33 | step<-0
34 | score<-numeric()
35 | extractor$extractFeatures(app)
36 | if(!is.na(resume.file))
37 | {
38 | print("load file")
39 | load(resume.file)
40 | producer<<-r.producer
41 | best.score<<-r.best.score
42 | best.parameters<<-r.best.parameters
43 | best.results<<-r.best.result
44 | }
45 | while(TRUE)
46 | {
47 | step<-step+1
48 | cat(sprintf("step : %d",step))
49 |
50 | #produce parameters
51 | parameters<-producer$getParameter(step=step,
52 | extractor.result=extractor$getFeatures2produce(),
53 | last.score=score)
54 |
55 | #search is end
56 | if(length(parameters)==0)
57 | break
58 |
59 | optimized.instance<-optimizer$optimize(app,parameters)
60 | print(parameters)
61 | score<-evaluator$evaluate(optimized.instance)
62 |
63 | if(need.store)
64 | store2DB()
65 |
66 | if(step==1||score>best.score)
67 | {
68 | best.score<<-score
69 | best.parameters<<-parameters
70 | best.results<<-evaluator$analysers.results.evaluate
71 | }
72 | print(score)
73 | if(score==0||checkpoint(step,score,producer,best.score,best.parameters,best.results))
74 | break
75 | cat("end a step \n")
76 | }
77 | },
78 |
79 | store2DB=function(){
80 | keyAnalyser.result<-PerformKeyAnalysis(app)
81 | unkeyAnalyser.result<-extractor$getFeatures2store()
82 | Analyser.result<-rbind(keyAnalyser.result,unkeyAnalyser.result)
83 | main.id<-StoreAnalysis(keyAnalyser.result,override = TRUE)
84 | StoreTransformation(main.id,optimizer$getParameters2store(),evaluator$getResults2store())
85 | }
86 | )
87 | )
88 |
89 |
--------------------------------------------------------------------------------
/framework/dependencies.R:
--------------------------------------------------------------------------------
1 | library("XML")
2 | library("jsonlite")
3 | library("RODBC")
4 | library("methods")
5 | print("load dependencies")
--------------------------------------------------------------------------------
/framework/lib/OptimizationSpace.R:
--------------------------------------------------------------------------------
1 | GenerationParameterSpace<-function(parameter.list)
2 | {
3 | loop.str<-""
4 | df.str<-""
5 | for(i in 1:length(parameter.list))
6 | {
7 | range<-unlist(strsplit(parameter.list[[i]],";"))[1]
8 | condition<-unlist(strsplit(parameter.list[[i]],";"))[2]
9 | parameter.name<-names(parameter.list[i])
10 | if(is.na(condition))
11 | tmp.str<-sprintf("for(%s in c(%s) )\n",names(parameter.list[i]),range)
12 | else
13 | tmp.str<-sprintf("for(%s in c(%s) )\n if(%s)\n",names(parameter.list[i]),range,condition)
14 | loop.str<-paste0(loop.str,tmp.str)
15 | df.str<-paste0(df.str,sprintf("%s=%s",
16 | parameter.name,parameter.name))
17 | if(i!=length(parameter.list))
18 | df.str<-paste0(df.str,",")
19 | }
20 | df.str<-paste0("combinations.tmp<-data.frame(",df.str,",stringsAsFactors = FALSE)")
21 | body.str<-paste0(df.str,"\nif(nrow(combinations.parameter)==0)\n",
22 | "combinations.parameter<-combinations.tmp\n",
23 | "else\ncombinations.parameter<-rbind(",
24 | "combinations.parameter,combinations.tmp)")
25 |
26 | space.str<-paste0(loop.str,"{\n",body.str,"\n}")
27 | combinations.parameter<-data.frame()
28 | eval(parse(text=space.str))
29 | return(combinations.parameter)
30 | }
31 |
32 | #combinations.parameter<-GenerationSpace(parameter.list)
--------------------------------------------------------------------------------
/framework/lib/learners.R:
--------------------------------------------------------------------------------
1 | # an decision tree learner
2 | PAK.Learner.DecisionTree<-setRefClass(
3 | "PAK.Learner.DecisionTree",
4 | contains="PAK.Learner",
5 | fields = list(model="list",dv.name="character",
6 | idv.name="character"),
7 | methods = list(
8 | #init function
9 | initialize=function(){
10 | },
11 | learnModel=function(training.data,idv,dv){
12 | buildstr<-sprintf("rpart(%s~.,training.data)",dv)
13 | model[["rp"]]<<-eval(parse(text=buildstr) )
14 | model[["dv.name"]]<<-dv
15 | model[["idv.name"]]<<-idv
16 | return (model)
17 | }
18 | )
19 | )
--------------------------------------------------------------------------------
/framework/lib/producers.R:
--------------------------------------------------------------------------------
1 | library(rpart)
2 |
3 | PAK.Producer.Exhaustion<-setRefClass(
4 | "PAK.Producer.Exhaustion",
5 | contains="PAK.Producer",
6 | fields = list(parameter.space="data.frame"),
7 | methods = list(
8 | #init function
9 | initialize=function(parameter.list){
10 | parameter.space<<-GenerationParameterSpace(parameter.list)
11 | },
12 | getParameter=function(step,extractor.result,last.score)
13 | {
14 | if(step<=nrow(parameter.space))
15 | {
16 | if(ncol(parameter.space)==1)
17 | return (eval(parse(text=sprintf("data.frame(%s=parameter.space[step,])",names(parameter.space)))))
18 | else
19 | return(parameter.space[step,])
20 | }
21 | }
22 | )
23 | )
24 |
25 |
26 | PAK.Producer.Greedy<-setRefClass(
27 | "PAK.Producer.Greedy",
28 | contains="PAK.Producer",
29 | fields = list(parameter.range="list",#list(p1=c(1,2,3..),p2=c(1,2,3..)..)
30 | v.idx ="numeric",
31 | v.score ="numeric",
32 | v.pos ="numeric",
33 | local.optimal= "data.frame"),
34 | methods = list(
35 | #init function
36 | initialize=function(parameter.range=list()){
37 | parameter.range<<-parameter.range
38 | },
39 |
40 | getParameter=function(step,extractor.result,last.score)
41 | {
42 | if(step==1)
43 | {
44 | local.optimal<<-defulat.parameters
45 | v.idx<<-1
46 | v.score<<-c()
47 | v.pos<<-0
48 | }
49 | else if(v.posparameter.number)
58 | return (data.frame())
59 |
60 | v.score<<-c()
61 | v.pos<<-0
62 | }
63 | new.parameter<-local.optimal
64 | new.parameter[[v.idx]]<-parameter.range[[v.idx]][v.pos+1]
65 | return (new.parameter)
66 | }
67 | )
68 | )
69 |
70 |
71 | PAK.Producer.OptimalSpace<-setRefClass(
72 | "PAK.Producer.OptimalSpace",
73 | contains="PAK.Producer",
74 | fields = list(parameter.space="data.frame"),
75 | methods = list(
76 | #init function
77 | initialize=function(tid){
78 | os<-PredictOptimalSpace(tid,combine.os=c(1,3,5,8,10),os.size=25,model=os.models[[1]],cond.str=cond.str,omp.number = 16)
79 | convertOS2parameterspace(os)
80 | cat("length of os: ",nrow(parameter.space),"\n")
81 | print(parameter.space)
82 | },
83 | getParameter=function(step,extractor.result,last.score)
84 | {
85 | if(stepnrow(dv.prs))
170 | parameter<-eval(parse(text=sprintf("data.frame(%s=NULL)",dv.name)))
171 | else
172 | parameter<-eval(parse(text=sprintf("data.frame(%s='%s')",dv.name,dv.prs$type[step])))
173 |
174 |
175 | return (parameter)
176 | }
177 | )
178 | )
179 |
180 |
181 |
182 |
183 |
184 |
185 |
--------------------------------------------------------------------------------
/generator_module/optimizeCompilerFlag/transform.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | CC=gcc
3 | #export ENABLE_COMPILERFLAG="-O3"
4 | CFLAGENABLE=`env |grep ENABLE_COMPILERFLAG`
5 | FNAME=${CFLAGENABLE#ENABLE_}
6 | FNAME=${FNAME%%=*}
7 | SUFFIX=${FNAME##*_}
8 | FNAME=${FNAME%%_*}
9 |
10 | if [ $FNAME = "COMPILERFLAG" ] ; then
11 | CFLAG=${CFLAGENABLE#*=}
12 | $CC $CFLAG $1 -o $2
13 | fi
14 | echo $2
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/generator_module/optimizeCompilerFlag/variantinfo.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | compilerflag
4 | the compiler flag
5 | ENABLE_COMPILERFLAG
6 | string
7 |
8 |
9 |
--------------------------------------------------------------------------------
/pak.R:
--------------------------------------------------------------------------------
1 | path.analysis_tools<-"/home/liujh/PAK-master/analysis_module/"
2 | path.generator_tools<-"/home/liujh/PAK-master/generator_module/"
3 | odbc.source<-"kdb"
4 | database.user<-"user"
5 | database.pwd<-"pwd"
6 |
7 |
8 |
9 | GetSourceFileDir<-function()
10 | {
11 | frame_files <- lapply(sys.frames(), function(x) x$ofile)
12 | frame_files <- Filter(Negate(is.null), frame_files)
13 | path.sourcefile <- dirname(frame_files[[length(frame_files)]])
14 | return(path.sourcefile)
15 | }
16 |
17 | SourceDir <- function(path, trace = FALSE)
18 | {
19 | #if(missing(path)) path <- getwd()
20 | for(i in 1:length(path))
21 | {
22 | for (nm in list.files(path, pattern = ".[Rr]",recursive=TRUE))
23 | {
24 | if(trace) cat(nm,":")
25 | source(file.path(path, nm))
26 | if(trace) cat("\n")
27 | }
28 | }
29 | }
30 |
31 | sfdir<-GetSourceFileDir()
32 | source(file.path(sfdir,"framework/dependencies.R"))
33 | SourceDir(file.path(sfdir,"framework/Interface"))
34 | SourceDir(file.path(sfdir,"framework/ExtractorModule"))
35 | SourceDir(file.path(sfdir,"framework/ProducerModule"))
36 | SourceDir(file.path(sfdir,"framework/OptimizerModule"))
37 | SourceDir(file.path(sfdir,"framework/EvaluatorModule"))
38 | SourceDir(file.path(sfdir,"framework/LearnerModule"))
39 | SourceDir(file.path(sfdir,"framework/DBModule"))
40 | SourceDir(file.path(sfdir,"framework/Tuning"))
41 | SourceDir(file.path(sfdir,"framework/lib"))
42 |
43 |
44 | #PerformKeyAnalysis<-function(app){
45 | # key.analyser.names<-c("appinfo","envinfo")
46 | #result<-list()
47 | #for(ka in key.analyser.names)
48 | #{
49 | # analyser<-PAK.Analyser$new(name=ka,app)
50 | #analyser$enableAllfeatures()
51 | #analyser$anaylze()
52 | #result[[ka]]<-analyser$getResultForDB()
53 | # }
54 | # return (result)
55 | #}
56 |
--------------------------------------------------------------------------------
/tutorial/autotuning_compilerflag.R:
--------------------------------------------------------------------------------
1 | GetSourceFileDir<-function()
2 | {
3 | frame_files <- lapply(sys.frames(), function(x) x$ofile)
4 | frame_files <- Filter(Negate(is.null), frame_files)
5 | path.sourcefile <- dirname(frame_files[[length(frame_files)]])
6 | return(path.sourcefile)
7 | }
8 |
9 | sfdir<-GetSourceFileDir()
10 | source(file.path(sfdir,"../pak.R"))
11 |
12 |
13 | # Exhaustion algorithm autotuner
14 | if(TRUE || FALSE)
15 | {
16 | app<-"/home/liujh/PAK-master/applications/multiplyexample.c"
17 |
18 | parameter.list<-list()
19 | parameter.list["compilerflag"]<-'" -O0" ," -O1"," -O2"," -O3";'
20 |
21 | myproducer<-PAK.Producer.Exhaustion$new(parameter.list)
22 | myoptimizer<-PAK.Optimizer$new(generator.name="optimizeCompilerFlag")
23 | myevaluator<-PAK.Evaluator$new(sub.evaluators=list(paktimer=list(time=function(x){if(x>0) return (0-x) else return(0)})))
24 |
25 | tuning<-PAK.Tuner$new(app=app,optimizer=myoptimizer,evaluator=myevaluator,producer =myproducer,need.store=FALSE)
26 | tuning$tune()
27 | }
28 |
29 |
30 |
--------------------------------------------------------------------------------