├── Makefile
├── .gitignore
├── README.md
├── minted.sty
├── paper.bib
├── paper.tex
└── llncs.cls
/Makefile:
--------------------------------------------------------------------------------
1 | paper.pdf: paper.bib paper.tex
2 | pdflatex -shell-escape paper.tex
3 | bibtex paper
4 | pdflatex -shell-escape paper.tex
5 | pdflatex -shell-escape paper.tex
6 |
7 | partial:
8 | pdflatex -shell-escape paper.tex
9 |
10 | clean:
11 | rm -f *.log *.out *.aux *.bbl *.blg paper.pdf
12 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Vim temporaries
2 | .*.sw[op]
3 |
4 | # LaTeX output
5 | *.dvi
6 | *.pdf
7 | *.ps
8 |
9 | # LaTeX cruft
10 | *.aux
11 | *.glo
12 | *.idx
13 | *.log
14 | *.toc
15 | *.ist
16 | *.acn
17 | *.acr
18 | *.alg
19 | *.bbl
20 | *.blg
21 | *.dvi
22 | *.glg
23 | *.gls
24 | *.ilg
25 | *.ind
26 | *.lof
27 | *.lot
28 | *.maf
29 | *.mtc
30 | *.mtc1
31 | *.out
32 | *.synctex.gz
33 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ECML/PKDD 2013 workshop paper
2 | =============================
3 |
4 | Paper by scikit-learn contributers for the ECML/PKDD 2013 Workshop "Languages
5 | for Data Mining and Machine Learning".
6 |
7 | Website: http://dtai.cs.kuleuven.be/lml/
8 | Submission deadline: Fri. 28 June
9 | Deadline for revisions: Thu. 15 August
10 |
11 | Maximum number of pages: 15
12 |
--------------------------------------------------------------------------------
/minted.sty:
--------------------------------------------------------------------------------
1 | %%
2 | %% This is file `minted.sty',
3 | %% generated with the docstrip utility.
4 | %%
5 | %% The original source files were:
6 | %%
7 | %% minted.dtx (with options: `package')
8 | %% Copyright 2010--2011 Konrad Rudolph
9 | %%
10 | %% This work may be distributed and/or modified under the
11 | %% conditions of the LaTeX Project Public License, either version 1.3
12 | %% of this license or (at your option) any later version.
13 | %% The latest version of this license is in
14 | %% http://www.latex-project.org/lppl.txt
15 | %% and version 1.3 or later is part of all distributions of LaTeX
16 | %% version 2005/12/01 or later.
17 | %%
18 | %% Additionally, the project may be distributed under the terms of the new BSD
19 | %% license.
20 | %%
21 | %% This work has the LPPL maintenance status `maintained'.
22 | %%
23 | %% The Current Maintainer of this work is Konrad Rudolph.
24 | %%
25 | %% This work consists of the files minted.dtx and minted.ins
26 | %% and the derived file minted.sty.
27 | \NeedsTeXFormat{LaTeX2e}
28 | \ProvidesPackage{minted}[2011/09/17 v1.7 Yet another Pygments shim for LaTeX]
29 | \RequirePackage{keyval}
30 | \RequirePackage{fancyvrb}
31 | \RequirePackage{xcolor}
32 | \RequirePackage{float}
33 | \RequirePackage{ifthen}
34 | \RequirePackage{calc}
35 | \RequirePackage{ifplatform}
36 | \DeclareOption{chapter}{\def\minted@float@within{chapter}}
37 | \DeclareOption{section}{\def\minted@float@within{section}}
38 | \ProcessOptions\relax
39 | \ifwindows
40 | \providecommand\DeleteFile[1]{\immediate\write18{del #1}}
41 | \else
42 | \providecommand\DeleteFile[1]{\immediate\write18{rm #1}}
43 | \fi
44 | \newboolean{AppExists}
45 | \newcommand\TestAppExists[1]{
46 | \ifwindows
47 | \DeleteFile{\jobname.aex}
48 | \immediate\write18{for \string^\@percentchar i in (#1.exe #1.bat #1.cmd)
49 | do set >\jobname.aex >\jobname.aex} %$
50 | \newread\@appexistsfile
51 | \immediate\openin\@appexistsfile\jobname.aex
52 | \expandafter\def\expandafter\@tmp@cr\expandafter{\the\endlinechar}
53 | \endlinechar=-1\relax
54 | \readline\@appexistsfile to \@apppathifexists
55 | \endlinechar=\@tmp@cr
56 | \ifthenelse{\equal{\@apppathifexists}{}}
57 | {\AppExistsfalse}
58 | {\AppExiststrue}
59 | \immediate\closein\@appexistsfile
60 | \DeleteFile{\jobname.aex}
61 | \immediate\typeout{file deleted}
62 | \else
63 | \immediate\write18{which #1 && touch \jobname.aex}
64 | \IfFileExists{\jobname.aex}
65 | {\AppExiststrue
66 | \DeleteFile{\jobname.aex}}
67 | {\AppExistsfalse}
68 | \fi}
69 | \newcommand\minted@resetoptions{}
70 | \newcommand\minted@defopt[1]{
71 | \expandafter\def\expandafter\minted@resetoptions\expandafter{%
72 | \minted@resetoptions
73 | \@namedef{minted@opt@#1}{}}}
74 | \newcommand\minted@opt[1]{
75 | \expandafter\detokenize%
76 | \expandafter\expandafter\expandafter{\csname minted@opt@#1\endcsname}}
77 | \newcommand\minted@define@opt[3][]{
78 | \minted@defopt{#2}
79 | \ifthenelse{\equal{#1}{}}{
80 | \define@key{minted@opt}{#2}{\@namedef{minted@opt@#2}{#3}}}
81 | {\define@key{minted@opt}{#2}[#1]{\@namedef{minted@opt@#2}{#3}}}}
82 | \newcommand\minted@define@switch[3][]{
83 | \minted@defopt{#2}
84 | \define@booleankey{minted@opt}{#2}
85 | {\@namedef{minted@opt@#2}{#3}}
86 | {\@namedef{minted@opt@#2}{#1}}}
87 | \minted@defopt{extra}
88 | \newcommand\minted@define@extra[1]{
89 | \define@key{minted@opt}{#1}{
90 | \expandafter\def\expandafter\minted@opt@extra\expandafter{%
91 | \minted@opt@extra,#1=##1}}}
92 | \newcommand\minted@define@extra@switch[1]{
93 | \define@booleankey{minted@opt}{#1}
94 | {\expandafter\def\expandafter\minted@opt@extra\expandafter{%
95 | \minted@opt@extra,#1}}
96 | {\expandafter\def\expandafter\minted@opt@extra\expandafter{%
97 | \minted@opt@extra,#1=false}}}
98 | \minted@define@switch{texcl}{-P texcomments}
99 | \minted@define@switch{mathescape}{-P mathescape}
100 | \minted@define@switch{linenos}{-P linenos}
101 | \minted@define@switch{startinline}{-P startinline}
102 | \minted@define@switch[-P funcnamehighlighting=False]%
103 | {funcnamehighlighting}{-P funcnamehighlighting}
104 | \minted@define@opt{gobble}{-F gobble:n=#1}
105 | \minted@define@opt{bgcolor}{#1}
106 | \minted@define@extra{frame}
107 | \minted@define@extra{framesep}
108 | \minted@define@extra{framerule}
109 | \minted@define@extra{rulecolor}
110 | \minted@define@extra{numbersep}
111 | \minted@define@extra{firstnumber}
112 | \minted@define@extra{stepnumber}
113 | \minted@define@extra{firstline}
114 | \minted@define@extra{lastline}
115 | \minted@define@extra{baselinestretch}
116 | \minted@define@extra{xleftmargin}
117 | \minted@define@extra{xrightmargin}
118 | \minted@define@extra{fillcolor}
119 | \minted@define@extra{tabsize}
120 | \minted@define@extra{fontfamily}
121 | \minted@define@extra{fontsize}
122 | \minted@define@extra{fontshape}
123 | \minted@define@extra{fontseries}
124 | \minted@define@extra{formatcom}
125 | \minted@define@extra{label}
126 | \minted@define@extra@switch{numberblanklines}
127 | \minted@define@extra@switch{showspaces}
128 | \minted@define@extra@switch{resetmargins}
129 | \minted@define@extra@switch{samepage}
130 | \minted@define@extra@switch{showtabs}
131 | \minted@define@extra@switch{obeytabs}
132 | \newsavebox{\minted@bgbox}
133 | \newenvironment{minted@colorbg}[1]{
134 | \def\minted@bgcol{#1}
135 | \noindent
136 | \begin{lrbox}{\minted@bgbox}
137 | \begin{minipage}{\linewidth-2\fboxsep}}
138 | {\end{minipage}
139 | \end{lrbox}%
140 | \colorbox{\minted@bgcol}{\usebox{\minted@bgbox}}}
141 | \newwrite\minted@code
142 | \newcommand\minted@savecode[1]{
143 | \immediate\openout\minted@code\jobname.pyg
144 | \immediate\write\minted@code{#1}
145 | \immediate\closeout\minted@code}
146 | \newcommand\minted@pygmentize[2][\jobname.pyg]{
147 | \def\minted@cmd{pygmentize -l #2 -f latex -F tokenmerge
148 | \minted@opt{gobble} \minted@opt{texcl} \minted@opt{mathescape}
149 | \minted@opt{startinline} \minted@opt{funcnamehighlighting}
150 | \minted@opt{linenos} -P "verboptions=\minted@opt{extra}"
151 | -o \jobname.out.pyg #1}
152 | \immediate\write18{\minted@cmd}
153 | % For debugging, uncomment:
154 | %\immediate\typeout{\minted@cmd}
155 | \ifthenelse{\equal{\minted@opt@bgcolor}{}}
156 | {}
157 | {\begin{minted@colorbg}{\minted@opt@bgcolor}}
158 | \input{\jobname.out.pyg}
159 | \ifthenelse{\equal{\minted@opt@bgcolor}{}}
160 | {}
161 | {\end{minted@colorbg}}
162 | \DeleteFile{\jobname.out.pyg}}
163 | \newcommand\minted@usedefaultstyle{\usemintedstyle{default}}
164 | \newcommand\usemintedstyle[1]{
165 | \renewcommand\minted@usedefaultstyle{}
166 | \immediate\write18{pygmentize -S #1 -f latex > \jobname.pyg}
167 | \input{\jobname.pyg}}
168 | \newcommand\mint[3][]{
169 | \DefineShortVerb{#3}
170 | \minted@resetoptions
171 | \setkeys{minted@opt}{#1}
172 | \SaveVerb[aftersave={
173 | \UndefineShortVerb{#3}
174 | \minted@savecode{\FV@SV@minted@verb}
175 | \minted@pygmentize{#2}
176 | \DeleteFile{\jobname.pyg}}]{minted@verb}#3}
177 | \newcommand\minted@proglang[1]{}
178 | \newenvironment{minted}[2][]
179 | {\VerbatimEnvironment
180 | \renewcommand{\minted@proglang}[1]{#2}
181 | \minted@resetoptions
182 | \setkeys{minted@opt}{#1}
183 | \begin{VerbatimOut}[codes={\catcode`\^^I=12}]{\jobname.pyg}}%
184 | {\end{VerbatimOut}
185 | \minted@pygmentize{\minted@proglang{}}
186 | \DeleteFile{\jobname.pyg}}
187 | \newcommand\inputminted[3][]{
188 | \minted@resetoptions
189 | \setkeys{minted@opt}{#1}
190 | \minted@pygmentize[#3]{#2}}
191 | \newcommand\newminted[3][]{
192 | \ifthenelse{\equal{#1}{}}
193 | {\def\minted@envname{#2code}}
194 | {\def\minted@envname{#1}}
195 | \newenvironment{\minted@envname}
196 | {\VerbatimEnvironment\begin{minted}[#3]{#2}}
197 | {\end{minted}}
198 | \newenvironment{\minted@envname *}[1]
199 | {\VerbatimEnvironment\begin{minted}[#3,##1]{#2}}
200 | {\end{minted}}}
201 | \newcommand\newmint[3][]{
202 | \ifthenelse{\equal{#1}{}}
203 | {\def\minted@shortname{#2}}
204 | {\def\minted@shortname{#1}}
205 | \expandafter\newcommand\csname\minted@shortname\endcsname[2][]{
206 | \mint[#3,##1]{#2}##2}}
207 | \newcommand\newmintedfile[3][]{
208 | \ifthenelse{\equal{#1}{}}
209 | {\def\minted@shortname{#2file}}
210 | {\def\minted@shortname{#1}}
211 | \expandafter\newcommand\csname\minted@shortname\endcsname[2][]{
212 | \inputminted[#3,##1]{#2}{##2}}}
213 | \@ifundefined{minted@float@within}
214 | {\newfloat{listing}{h}{lol}}
215 | {\newfloat{listing}{h}{lol}[\minted@float@within]}
216 | \newcommand\listingscaption{Listing}
217 | \floatname{listing}{\listingscaption}
218 | \newcommand\listoflistingscaption{List of listings}
219 | \providecommand\listoflistings{\listof{listing}{\listoflistingscaption}}
220 | \AtBeginDocument{
221 | \minted@usedefaultstyle}
222 | \AtEndOfPackage{
223 | \ifnum\pdf@shellescape=1\relax\else
224 | \PackageError{minted}
225 | {You must invoke LaTeX with the
226 | -shell-escape flag}
227 | {Pass the -shell-escape flag to LaTeX. Refer to the minted.sty
228 | documentation for more information.}\fi
229 | \TestAppExists{pygmentize}
230 | \ifAppExists\else
231 | \PackageError{minted}
232 | {You must have `pygmentize' installed
233 | to use this package}
234 | {Refer to the installation instructions in the minted
235 | documentation for more information.}
236 | \fi}
237 | \endinput
238 | %%
239 | %% End of file `minted.sty'.
240 |
--------------------------------------------------------------------------------
/paper.bib:
--------------------------------------------------------------------------------
1 | @article{behnel2011cython,
2 | title={Cython: the best of both worlds},
3 | author={Behnel, Stefan and Bradshaw, Robert and Citro, Craig
4 | and Dalcin, Lisandro and Seljebotn, Dag Sverre and Smith, Kurt},
5 | journal={Comp. in Sci. \& Eng.},
6 | volume={13},
7 | number={2},
8 | pages={31--39},
9 | year={2011},
10 | publisher={IEEE}
11 | }
12 |
13 | @INPROCEEDINGS{vanderplas2012astroML,
14 | author={{Vanderplas}, J.T. and {Connolly}, A.J.
15 | and {Ivezi{\'c}}, {\v Z}. and {Gray}, A.},
16 | booktitle={Conf. on Intelligent Data Understanding (CIDU)},
17 | title={Introduction to {astroML}: Machine learning for astrophysics},
18 | pages={47--54},
19 | year={2012}
20 | }
21 |
22 | @article{bergstra2012,
23 | title={Random Search for Hyper-Parameter Optimization},
24 | author={James Bergstra and Joshua Bengio},
25 | journal={JMLR},
26 | volume=13,
27 | year=2012,
28 | pages={281--305},
29 | }
30 |
31 | @article{bezanson2012julia,
32 | author = {Jeff Bezanson and
33 | Stefan Karpinski and
34 | Viral B. Shah and
35 | Alan Edelman},
36 | title = {Julia: A Fast Dynamic Language for Technical Computing},
37 | journal = {CoRR},
38 | volume = {abs/1209.5145},
39 | year = {2012},
40 | ee = {http://arxiv.org/abs/1209.5145},
41 | bibsource = {DBLP, http://dblp.uni-trier.de}
42 | }
43 |
44 | @inproceedings{bolz2009tracing,
45 | title={Tracing the meta-level: {PyPy}'s tracing {JIT} compiler},
46 | author={Bolz, Carl Friedrich and Cuni, Antonio and Fijalkowski, Maciej and Rigo, Armin},
47 | booktitle={Proc. 4th Workshop on the Implementation, Compilation, Optimization
48 | of Object-Oriented Languages and Programming Systems},
49 | pages={18--25},
50 | year={2009},
51 | organization={ACM}
52 | }
53 |
54 | @article{chang2011libsvm,
55 | title={{LIBSVM}: a library for support vector machines},
56 | author={Chang, Chih-Chung and Lin, Chih-Jen},
57 | journal={ACM Trans. on Intelligent Systems and Technology},
58 | volume={2},
59 | number={3},
60 | pages={27},
61 | year={2011},
62 | }
63 |
64 | @article{dagum1998openmp,
65 | title={{OpenMP}: an industry standard {API} for shared-memory programming},
66 | author={Dagum, Leonardo and Menon, Ramesh},
67 | journal={Computational Sci. \& Eng.},
68 | volume={5},
69 | number={1},
70 | pages={46--55},
71 | year={1998},
72 | publisher={IEEE}
73 | }
74 |
75 | @article{dietterich1995solving,
76 | title={Solving multiclass learning problems
77 | via error-correcting output codes},
78 | author={Dietterich, Thomas G. and Bakiri, Ghulum},
79 | journal={J. AI Research},
80 | volume=2,
81 | year={1995}
82 | }
83 |
84 |
85 | @book{elemstatlearn,
86 | title={The Elements of Statistical Learning},
87 | author={Trevor Hastie and Robert Tibshirani and Jerome Friedman},
88 | publisher={Springer},
89 | year={2009},
90 | }
91 |
92 | @article{fan2008,
93 | title={{LIBLINEAR}: A library for large linear classification},
94 | author={Rong-En Fan and Kai-Wei Chang and Cho-Jui Hsieh
95 | and Xiang-Rui Wang and Chih-Jen Lin},
96 | journal={JMLR},
97 | volume=9,
98 | year=2008,
99 | pages={1871--1874},
100 | }
101 |
102 | @article{gansner2000,
103 | author = {Emden R. Gansner and Stephen C. North},
104 | title = {An open graph visualization system
105 | and its applications to software engineering},
106 | journal = {Software---Practice and Experience},
107 | year = {2000},
108 | volume = {30},
109 | number = {11},
110 | pages = {1203--1233}
111 | }
112 |
113 | @article{hall2009weka,
114 | title={The {WEKA} data mining software: an update},
115 | author={Hall, Mark and Frank, Eibe and Holmes, Geoffrey and
116 | Pfahringer, Bernhard and Reutemann, Peter and Witten, Ian H.},
117 | journal={ACM SIGKDD Explorations Newsletter},
118 | volume={11},
119 | number={1},
120 | pages={10--18},
121 | year={2009},
122 | }
123 |
124 | @article{hunter2007matplotlib,
125 | title={Matplotlib: A 2D graphics environment},
126 | author={Hunter, John D.},
127 | journal={Comp. in Sci. \& Eng.},
128 | pages={90--95},
129 | year={2007},
130 | publisher={IEEE}
131 | }
132 |
133 | @article{langford2009sparse,
134 | title={Sparse online learning via truncated gradient},
135 | author={Langford, John and Li, Lihong and Zhang, Tong},
136 | journal={JMLR},
137 | volume={10},
138 | pages={777--801},
139 | year={2009},
140 | }
141 |
142 | @inproceedings{li2010random,
143 | title={Random {F}ourier approximations for skewed multiplicative histogram kernels},
144 | author={Li, Fuxin and Ionescu, Catalin and Sminchisescu, Cristian},
145 | booktitle={Proc. 32nd DAGM Conf. on Pattern Recognition},
146 | pages={262--271},
147 | year={2010},
148 | publisher={Springer}
149 | }
150 |
151 | @article{mblondel-mlj2013,
152 | year={2013},
153 | journal={Machine Learning},
154 | volume={93},
155 | number={1},
156 | title={Block coordinate descent algorithms for large-scale sparse multiclass classification},
157 | author={Blondel, Mathieu and Seki, Kazuhiro and Uehara, Kuniaki},
158 | pages={31-52},
159 | }
160 |
161 | @article{pedregosa2011,
162 | title={Scikit-learn: Machine Learning in {P}ython},
163 | author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V.
164 | and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P.
165 | and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A.
166 | and Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.},
167 | journal={JMLR},
168 | volume={12},
169 | pages={2825--2830},
170 | year={2011},
171 | }
172 |
173 | @article{perez2007ipython,
174 | title={{IPython}: a system for interactive scientific computing},
175 | author={Perez, Fernando and Granger, Brian E.},
176 | journal={Comp. in Sci. \& Eng.},
177 | volume={9},
178 | number={3},
179 | pages={21--29},
180 | year={2007},
181 | publisher={IEEE}
182 | }
183 |
184 | @misc{pythontut,
185 | author={{\VAN{Rossum}{Van}{van}} Rossum, Guido},
186 | publisher={Python Software Foundation},
187 | title={The {P}ython Tutorial},
188 | url={http://docs.python.org/2/tutorial/classes.html},
189 | year=2013
190 | }
191 |
192 | @inproceedings{rahimi2007random,
193 | title={Random features for large-scale kernel machines},
194 | author={Rahimi, Ali and Recht, Benjamin},
195 | booktitle={Advances in Neural Information Processing Systems},
196 | volume={20},
197 | pages={1177--1184},
198 | year={2007}
199 | }
200 |
201 | @inproceedings{rehurek2010gensim,
202 | title={Software framework for topic modelling with large corpora},
203 | author = {Radim {\v R}eh{\r u}{\v r}ek and Petr Sojka},
204 | booktitle={Proc. LREC Workshop on New Challenges for NLP Frameworks},
205 | pages={46--50},
206 | year={2010},
207 | }
208 |
209 | @article{Schreiber:2007,
210 | author={Schreiber, Rob},
211 | title= {{MATLAB}},
212 | year= 2007,
213 | journal=Scholarpedia,
214 | volume=2,
215 | number=7,
216 | pages=2929
217 | }
218 |
219 | @inproceedings{sculley2010web,
220 | title={Web-scale $k$-means clustering},
221 | author={Sculley, D.},
222 | booktitle={Proc. 19th Int'l WWW Conf.},
223 | pages={1177--1178},
224 | year={2010},
225 | }
226 |
227 | @book{seibel2009coders,
228 | title={Coders at Work: Reflections on the Craft of Programming},
229 | author={Peter Seibel},
230 | publisher={Apress},
231 | year={2009},
232 | }
233 |
234 | @manual{r,
235 | title = {R: A Language and Environment for Statistical Computing},
236 | author = {{R Core Team}},
237 | organization = {R Foundation},
238 | address = {Vienna, Austria},
239 | year = 2013,
240 | url = {http://www.R-project.org}
241 | }
242 |
243 | @article{vanderwalt2011,
244 | title={The {NumPy} array: a structure for efficient numerical computation},
245 | author={{\VAN{Walt}{Van der}{van der}} Walt, St\'efan
246 | and Colbert, S. Chris and Varoquaux, Ga\"el},
247 | journal={Comp. in Sci. \& Eng.},
248 | volume={13},
249 | number={2},
250 | pages={22--30},
251 | year={2011},
252 | publisher={IEEE}
253 | }
254 |
255 | @misc{varoquaux2013scipy,
256 | title={Python Scientific Lecture Notes},
257 | author={Valentin Haenel and Emmanuelle Gouillart and Ga\"el Varoquaux},
258 | year=2013,
259 | url={http://scipy-lectures.github.io/}
260 | }
261 |
262 | @inproceedings{vedaldi2010efficient,
263 | title={Efficient additive kernels via explicit feature maps},
264 | author={Vedaldi, Andrea and Zisserman, Andrew},
265 | booktitle={IEEE Conf. on Computer Vision and Pattern Recognition (CVPR)},
266 | pages={3539--3546},
267 | year={2010},
268 | }
269 |
270 | @inproceedings{weinberger2009,
271 | title={Feature Hashing for Large Scale Multitask Learning},
272 | author={Kilian Weinberger and Anirban Dasgupta and John Langford
273 | and Alex Smola and Josh Attenberg},
274 | booktitle={Proc. ICML},
275 | year={2009},
276 | }
277 |
278 | @article{guazzelli2009pmml,
279 | title={PMML: An open standard for sharing models},
280 | author={Guazzelli, Alex and Zeller, Michael and Lin, Wen-Ching and Williams, Graham},
281 | journal={The R Journal},
282 | volume={1},
283 | number={1},
284 | pages={60--65},
285 | year={2009}
286 | }
287 |
288 | @inproceedings{Demsar2004,
289 | author = {Dem\v{s}ar, J and Zupan, B and Leban, G and Curk, T},
290 | booktitle = {Knowledge Discovery in Databases PKDD 2004},
291 | publisher = {Springer},
292 | series = {Lecture Notes in Computer Science},
293 | title = {Orange: From Experimental Machine Learning to Interactive Data Mining},
294 | year = {2004}
295 | }
296 |
297 | @inproceedings{sculley2009large,
298 | title={Large scale learning to rank},
299 | author={Sculley, D},
300 | booktitle={NIPS Workshop on Advances in Ranking},
301 | pages={1--6},
302 | year={2009}
303 | }
304 |
--------------------------------------------------------------------------------
/paper.tex:
--------------------------------------------------------------------------------
1 | \documentclass{llncs}
2 |
3 | \usepackage[utf8]{inputenc}
4 | \usepackage{amsmath}
5 | %\usepackage{hyperref}
6 | \usepackage[authoryear,round]{natbib}
7 | \usepackage{xcolor}
8 | \usepackage{xspace}
9 | \usepackage{minted}
10 |
11 | \definecolor{rulecolor}{rgb}{0.80,0.80,0.80}
12 | \newminted{python}{frame=single,rulecolor=\color{rulecolor}}
13 |
14 | % tt font with bold support
15 | %\renewcommand{\ttdefault}{pcr}
16 |
17 | \pagestyle{headings}
18 |
19 | \newcommand{\sklearn}{\textit{scikit-learn}\xspace}
20 |
21 | \title{API design for machine learning software: experiences from the
22 | scikit-learn project}
23 |
24 | \author{Lars~Buitinck~\inst{1} \and
25 | Gilles~Louppe~\inst{2} \and
26 | Mathieu~Blondel~\inst{3} \and
27 | Fabian~Pedregosa~\inst{4} \and
28 | Andreas~C.~M\"uller~\inst{5} \and
29 | Olivier~Grisel~\inst{6} \and
30 | Vlad~Niculae~\inst{7} \and
31 | Peter~Prettenhofer~\inst{8} \and
32 | Alexandre~Gramfort~\inst{4,9} \and
33 | Jaques~Grobler~\inst{4} \and
34 | Robert~Layton~\inst{10} \and
35 | Jake~Vanderplas~\inst{11} \and
36 | Arnaud~Joly~\inst{2} \and
37 | Brian Holt~\inst{12} \and
38 | Gaël~Varoquaux~\inst{4}}
39 |
40 | \institute{ILPS, Informatics Institute, University of Amsterdam \and
41 | University of Liège \and
42 | Kobe University \and
43 | Parietal, INRIA Saclay \and
44 | University of Bonn \and
45 | Independent consultant \and
46 | University of Bucharest \and
47 | Ciuvo GmbH \and
48 | Institut Mines-Telecom, Telecom ParisTech, CNRS LTCI \and
49 | University of Ballarat \and
50 | University of Washington \and
51 | Samsung Electronics Research Institute}
52 |
53 | % Dutch name sorting hack as per http://tex.stackexchange.com/a/40750/2806
54 | \DeclareRobustCommand{\VAN}[3]{#2}
55 |
56 | \begin{document}
57 |
58 | \maketitle
59 |
60 | \begin{abstract}
61 | \sklearn is an increasingly popular machine learning
62 | library. Written in Python, it is designed to be simple and efficient, accessible to
63 | non-experts, and reusable in various contexts. In this paper, we present and
64 | discuss our design choices for the application programming interface (API) of
65 | the project. In particular, we describe the simple and elegant interface shared
66 | by all learning and processing units in the library and then discuss its
67 | advantages in terms of composition and reusability. The paper also comments on
68 | implementation details specific to the Python ecosystem and analyzes obstacles
69 | faced by users and developers of the library.
70 | \end{abstract}
71 |
72 | \setcounter{footnote}{0}
73 |
74 | \section{Introduction}
75 |
76 | The \sklearn project\footnote{\url{http://scikit-learn.org}}
77 | \citep{pedregosa2011} provides an open source machine learning
78 | library for the Python programming language. The ambition of the project is to
79 | provide efficient and well-established machine learning tools within a
80 | programming environment that is accessible to non-machine learning experts and
81 | reusable in various scientific areas. The project is not a novel domain-specific
82 | language, but a library that provides machine learning idioms to a
83 | general-purpose high-level language. Among other things, it includes classical learning
84 | algorithms, model evaluation and selection tools, as well as
85 | preprocessing procedures. The library is
86 | distributed under the simplified BSD license, encouraging its use in both
87 | academic and commercial settings.
88 |
89 | \sklearn is a library, i.e.\ a collection of classes
90 | and functions that users import into Python programs. Using \sklearn therefore
91 | requires basic Python programming knowledge. No command-line interface, let
92 | alone a graphical user interface, is offered for non-programmer users. Instead,
93 | interactive use is made possible by the Python interactive interpreter, and its
94 | enhanced replacement IPython \citep{perez2007ipython}, which offer a
95 | \textsc{matlab}-like working environment specifically designed for scientific
96 | use.
97 |
98 | The library has been designed to tie in with the set of numeric
99 | and scientific packages centered around the NumPy and SciPy libraries. NumPy
100 | \citep{vanderwalt2011} augments Python with a contiguous numeric array datatype
101 | and fast array computing primitives,
102 | while SciPy \citep{varoquaux2013scipy} extends it
103 | further with common numerical operations, either by implementing
104 | these in Python/NumPy or by wrapping existing C/C{}\verb!++!/Fortran
105 | implementations. Building upon this stack, a series of libraries called
106 | \textit{scikits} were created, to complement SciPy with
107 | domain-specific toolkits. Currently, the two most popular and feature-complete
108 | ones are by far \sklearn and
109 | \textit{scikit-image},\footnote{\url{http://scikit-image.org}}
110 | which does image processing.
111 |
112 | Started in 2007, \sklearn is developed by an international team of over a dozen
113 | core developers, mostly researchers from various fields (e.g.,
114 | computer science, neuroscience, astrophysics). The project also benefits
115 | from many occasional contributors proposing small bugfixes or
116 | improvements. Development proceeds on GitHub\footnote{\url{https://github.com/scikit-learn}},
117 | a platform which greatly facilitates this kind of
118 | collaboration. Because of the large number of developers, emphasis is
119 | put on keeping the project maintainable. In particular, code must follow
120 | specific quality guidelines, such as style consistency and unit-test coverage.
121 | Documentation and examples are required for all features,
122 | and major changes must pass code review by at least two
123 | developers not involved in the implementation of the proposed change.
124 |
125 | \sklearn's popularity can be gauged from various indicators such as the hundreds
126 | of citations in scientific publications, successes in various machine learning
127 | challenges (e.g., Kaggle), and statistics derived from our
128 | repositories and mailing list. At the time of writing, the project is watched
129 | by 1365 people and forked 693 times on GitHub; the mailing list receives more
130 | than 300 mails per month; version control logs
131 | % ddaa494c116e3c16bf032003c5cccbed851733d2
132 | show 183 unique contributors to the codebase and the online documentation
133 | receives 37,000 unique visitors and 295,000 pageviews per month.
134 |
135 | \citet{pedregosa2011} briefly presented \sklearn and
136 | benchmarked it against several competitors.
137 | In this paper, we instead present an
138 | in-depth analysis of design choices made when building the library,
139 | detailing how we organized and operationalized
140 | common machine learning concepts.
141 | We first present in section~\ref{sec:core-api} the central application
142 | programming interface (API) and then describe, in section~\ref{sec:advanced-api},
143 | advanced API mechanisms built on the core interface.
144 | Section~\ref{sec:implementation} briefly describes the implementation.
145 | Section~\ref{sec:comparison} then
146 | compares \sklearn to other major projects in terms of API\@.
147 | Section~\ref{sec:future_work} outlines some of the objectives for
148 | a \sklearn 1.0 release.
149 | We conclude by summarizing the major points of this paper in
150 | section~\ref{sec:conclusions}.
151 |
152 | \section{Core API}
153 |
154 | \label{sec:core-api}
155 |
156 | All objects within \sklearn share a uniform common basic API consisting of three
157 | complementary interfaces: an \textit{estimator} interface for building and
158 | fitting models, a \textit{predictor} interface for making predictions and a
159 | \textit{transformer} interface for converting data. In this section, we describe
160 | these three interfaces, after reviewing our general principles and data
161 | representation choices.
162 |
163 | \subsection{General principles}
164 |
165 | As much as possible, our
166 | design choices have been guided so as to avoid the proliferation of framework
167 | code. We try to adopt simple conventions and to limit to a minimum the number of
168 | methods an object must implement. The API is designed to adhere to the following
169 | broad principles:
170 |
171 | \begin{description}
172 | \item[Consistency.]
173 | All objects (basic or composite) share a consistent interface composed of
174 | a limited set of methods. This interface is documented in a consistent
175 | manner for all objects.
176 | \item[Inspection.]
177 | Constructor parameters and parameter values determined by learning
178 | algorithms are stored and exposed as public attributes.
179 | \item[Non-proliferation of classes.]
180 | Learning algorithms are the only objects to be represented using custom classes.
181 | Datasets are represented as NumPy arrays or SciPy sparse matrices.
182 | Hyper-parameter names and values are represented as standard
183 | Python strings or numbers whenever possible.
184 | This keeps \sklearn easy to use and easy to combine with other libraries.
185 | \item[Composition.]
186 | Many machine learning tasks are expressible
187 | as sequences or combinations of transformations to data.
188 | Some learning algorithms are also naturally viewed
189 | as meta-algorithms parametrized on other algorithms.
190 | Whenever feasible, such algorithms are implemented and composed from
191 | existing building blocks.
192 | \item[Sensible defaults.]
193 | Whenever an operation requires a user-defined parameter,
194 | an appropriate default value is defined by the library.
195 | The default value should cause the operation to be performed
196 | in a sensible way (giving a baseline solution for the task at hand).
197 | \end{description}
198 |
199 | \subsection{Data representation}
200 | \label{sec:arrays}
201 |
202 | In most machine learning tasks, data is modeled as a set of variables. For
203 | example, in a supervised learning task, the goal is to find a mapping
204 | from input variables $X_1, \ldots X_p$, called features, to
205 | some output variables $Y$. A sample is then defined as a pair of
206 | values $([x_1, \ldots, x_p]^\mathrm{T}, y)$ of these variables. A widely used
207 | representation of a dataset, a collection of
208 | such samples, is a pair of matrices with numerical values: one for the input
209 | values and one for the output values. Each row of these matrices
210 | corresponds to one sample of the dataset and each column to one variable of
211 | the problem.
212 |
213 | In \sklearn, we chose a representation of data that is as close as
214 | possible to the matrix representation: datasets are encoded as NumPy
215 | multidimensional arrays for dense data and as SciPy sparse matrices for sparse
216 | data. While these may seem rather unsophisticated data representations when
217 | compared to more object-oriented constructs, such as the ones used by
218 | Weka \citep{hall2009weka}, they bring the prime advantage of allowing us to rely
219 | on efficient NumPy and SciPy vectorized operations while keeping
220 | the code short and readable. This design choice has also been motivated by
221 | the fact that, given their pervasiveness in many other scientific Python
222 | packages, many scientific users of Python are already familiar with NumPy dense
223 | arrays and SciPy sparse matrices.
224 | From a practical point of view, these formats also provide a collection of
225 | data loading and conversion tools which make them very easy to use in many
226 | contexts. Moreover, for tasks where the inputs are text files or semi-structured
227 | objects, we provide \textit{vectorizer} objects that efficiently convert such
228 | data to the NumPy or SciPy formats.
229 |
230 | For efficiency reasons, the public interface is oriented towards processing
231 | batches of samples rather than single samples per API call. While classification
232 | and regression algorithms can indeed make predictions for single samples,
233 | \sklearn objects are not optimized for this use case. (The few online learning
234 | algorithms implemented are intended to take mini-batches.) Batch processing makes
235 | optimal use of NumPy and SciPy by preventing the overhead inherent to Python
236 | function calls or due to per-element dynamic type checking. Although this might
237 | seem to be an artifact of the Python language, and therefore an implementation
238 | detail that leaks into the API, we argue that APIs should be designed so as not
239 | to tie a library to a suboptimal implementation strategy. As such, batch
240 | processing enables fast implementations in lower-level languages (where memory
241 | hierarchy effects and the possibility of internal parallelization come into
242 | play).
243 |
244 |
245 | \subsection{Estimators}
246 | \label{sec:estimators}
247 |
248 | The \textit{estimator} interface is at the core of the
249 | library. It defines instantiation mechanisms of objects and exposes a
250 | \texttt{fit} method for learning a model from training data. All supervised and
251 | unsupervised learning algorithms (e.g., for classification, regression or
252 | clustering) are offered as objects implementing this interface. Machine
253 | learning tasks like feature extraction, feature selection or dimensionality
254 | reduction are also provided as estimators.
255 |
256 | Estimator initialization and actual learning are strictly separated,
257 | in a way that is similar to partial function application:
258 | an estimator is initialized from a set of named constant hyper-parameter values
259 | (e.g., the $C$ constant in SVMs)
260 | and can be considered as a function
261 | that maps these values to actual learning algorithms.
262 | The constructor of an estimator does not see any actual data, nor does it perform any actual learning.
263 | All it does is attach the given parameters to the object.
264 | For the sake of convenient model inspection, hyper-parameters are set as public attributes,
265 | which is especially important in model selection settings.
266 | For ease of use, default hyper-parameter values are also provided
267 | for all built-in estimators.
268 | These default values are set to be relevant in many common
269 | situations in order to make estimators as effective as possible
270 | \textit{out-of-box} for non-experts.
271 |
272 | Actual learning is performed by the \texttt{fit} method. This method is called
273 | with training data (e.g., supplied as two arrays \texttt{X\_train} and
274 | \texttt{y\_train} in supervised learning estimators). Its task is to run a
275 | learning algorithm and to determine model-specific parameters from the training
276 | data and set these as attributes on the estimator object. As a convention, the
277 | parameters learned by an estimator are exposed as public attributes with names
278 | suffixed with a trailing underscore (e.g., \texttt{coef\_} for the
279 | learned coefficients of a linear model),
280 | again to facilitate model inspection.
281 | In the partial application view,
282 | \texttt{fit} is a function from data to a model of that data.
283 | It always returns the estimator object it was called on,
284 | which now serves as a model of its input and can be used to perform predictions or transformations of input data.
285 |
286 | From the start, the choice to let a single object serve dual purpose as
287 | estimator and model has mostly been driven by usability and technical
288 | considerations. From the user point of view, having two coupled instances (i.e.,
289 | an estimator object, used as a factory, and a model object, produced by the
290 | estimator) indeed decreases the ease of use and is also more likely to
291 | unnecessarily confuse newcomers. From the developer point of view, decoupling
292 | estimators from models also creates parallel class hierarchies and increases the
293 | overall maintenance complexity of the project. For these practical reasons, we
294 | believe that decoupling estimators from models is not worth the effort. A good
295 | reason for decoupling however, would be that it makes it possible to ship a
296 | model in a new environment without having to deal with potentially complex
297 | software dependencies. Such a feature could however still be implemented in
298 | \sklearn by making estimators able to export a fitted model, using the
299 | information from its public attributes, to an agnostic model description such as
300 | PMML~\citep{guazzelli2009pmml}.
301 |
302 | To illustrate the initialize-fit sequence,
303 | let us consider a supervised learning task using logistic regression.
304 | Given the API defined above, solving this problem is as simple as the following
305 | example.
306 | \begin{pythoncode}
307 | from sklearn.linear_model import LogisticRegression
308 |
309 | clf = LogisticRegression(penalty="l1")
310 | clf.fit(X_train, y_train)
311 | \end{pythoncode}
312 | In this snippet, a \texttt{LogisticRegression} estimator is first initialized by
313 | setting the \texttt{penalty} hyper-parameter to \texttt{"l1"} for
314 | $\ell_1$ regularization. Other hyper-parameters (such as \texttt{C},
315 | the strength of the regularization) are not explicitly given and
316 | thus set to the default values. Upon calling \texttt{fit}, a model is
317 | learned from the training arrays \texttt{X\_train} and \texttt{y\_train},
318 | and stored within the object for later use.
319 | %% (In this example, \texttt{fit} is called solely for its side effects.)
320 | Since all estimators share the same interface, using a different learning algorithm is
321 | as simple as replacing the constructor (the class name);
322 | to build a random forest on
323 | the same data, one would simply replace
324 | \texttt{LogisticRegression(penalty="l1")} in the snippet above by
325 | \texttt{RandomForestClassifier()}.
326 |
327 | In \sklearn, classical learning algorithms are not the only objects to be
328 | implemented as estimators. For example, preprocessing routines (e.g., scaling of
329 | features) or feature extraction techniques (e.g., vectorization of text
330 | documents) also implement the \textit{estimator} interface. Even stateless
331 | processing steps, that do not require the \texttt{fit} method to
332 | perform useful work, implement the estimator interface. As we will illustrate
333 | in the next sections, this design pattern is indeed of prime importance for
334 | consistency, composition and model selection reasons.
335 |
336 | \subsection{Predictors}
337 |
338 | The \textit{predictor} interface extends the notion of an estimator
339 | by adding a \texttt{predict}
340 | method that takes an array \texttt{X\_test} and produces
341 | predictions for \texttt{X\_test}, based on the learned parameters of the
342 | estimator (we call the input to \texttt{predict} ``\texttt{X\_test}'' in order
343 | to emphasize that \texttt{predict} generalizes to new data). In the case of
344 | supervised learning estimators, this method typically returns the predicted
345 | labels or values computed by the model. Continuing with the previous example,
346 | predicted labels for \texttt{X\_test} can be obtained using the following
347 | snippet:
348 | \begin{pythoncode}
349 | y_pred = clf.predict(X_test)
350 | \end{pythoncode}
351 |
352 | Some unsupervised learning estimators may also implement the \texttt{predict}
353 | interface. The code in the snippet below fits a $k$-means model with $k=10$ on
354 | training data \texttt{X\_train}, and then uses the \texttt{predict} method to
355 | obtain cluster labels (integer indices) for unseen data \texttt{X\_test}.
356 | \begin{pythoncode}
357 | from sklearn.cluster import KMeans
358 |
359 | km = KMeans(n_clusters=10)
360 | km.fit(X_train)
361 | clust_pred = km.predict(X_test)
362 | \end{pythoncode}
363 |
364 | Apart from \texttt{predict}, predictors may also implement methods
365 | that quantify the confidence of predictions. In the case of
366 | linear models, the \texttt{decision\_function} method returns
367 | the distance of samples to the separating hyperplane. Some
368 | predictors also provide a \texttt{predict\_proba} method which returns
369 | class probabilities.
370 |
371 | Finally, predictors must provide a \texttt{score} function to assess their
372 | performance on a batch of input data. In supervised estimators, this method
373 | takes as input arrays \texttt{X\_test} and \texttt{y\_test} and typically
374 | computes the coefficient of determination between \texttt{y\_test} and
375 | \texttt{predict(X\_test)} in regression, or the accuracy
376 | in classification.
377 | The only requirement is that the \texttt{score} method return a value
378 | that quantifies the quality of its predictions (the higher, the better).
379 | An unsupervised estimator may also expose a \texttt{score} function
380 | to compute, for instance, the likelihood of the given data under its model.
381 |
382 | \subsection{Transformers}
383 |
384 | Since it is common to modify or filter data before feeding it to a learning
385 | algorithm, some estimators in the library implement a \textit{transformer}
386 | interface which defines a \texttt{transform} method. It takes as input some new
387 | data \texttt{X\_test} and yields as output a transformed version of
388 | \texttt{X\_test}. Preprocessing, feature selection, feature extraction and dimensionality reduction
389 | algorithms are all provided as transformers within the library. In our example,
390 | to standardize the input \texttt{X\_train} to zero mean and unit variance
391 | before fitting the logistic regression estimator,
392 | one would write:
393 | \begin{pythoncode}
394 | from sklearn.preprocessing import StandardScaler
395 |
396 | scaler = StandardScaler()
397 | scaler.fit(X_train)
398 | X_train = scaler.transform(X_train)
399 | \end{pythoncode}
400 | Of course, in practice, it is important to apply the same preprocessing to the
401 | test data \texttt{X\_test}. Since a \texttt{StandardScaler} estimator stores the
402 | mean and standard deviation that it computed for the training set, transforming
403 | an unseen test set \texttt{X\_test} maps it into the appropriate region of
404 | feature space:
405 | \begin{pythoncode}
406 | X_test = scaler.transform(X_test)
407 | \end{pythoncode}
408 | Transformers also include a variety of learning algorithms, such as
409 | dimension reduction (PCA, manifold learning), kernel approximation,
410 | and other mappings from one feature space to another.
411 |
412 | Additionally, by leveraging the fact that \texttt{fit} always returns the
413 | estimator it was called on, the \texttt{StandardScaler} example above can be
414 | rewritten in a single line using method chaining:
415 | \begin{pythoncode}
416 | X_train = StandardScaler().fit(X_train).transform(X_train)
417 | \end{pythoncode}
418 |
419 | Furthermore, every transformer allows \texttt{fit(X\_train).transform(X\_train)}
420 | to be written as \texttt{fit\_transform(X\_train)}.
421 | The combined \texttt{fit\_transform} method prevents repeated computations.
422 | Depending on the transformer,
423 | it may skip only an input validation step,
424 | or in fact use a more efficient algorithm for the transformation.
425 | In the same spirit, clustering algorithms provide a
426 | \texttt{fit\_predict} method
427 | that is equivalent to \texttt{fit} followed by \texttt{predict},
428 | returning cluster labels assigned to the training samples.
429 |
430 |
431 | \section{Advanced API}
432 |
433 | \label{sec:advanced-api}
434 |
435 | Building on the core interface introduced in the previous section, we now
436 | present advanced API mechanisms for building meta-estimators,
437 | composing complex estimators and selecting models. We also discuss design
438 | choices allowing for easy usage and extension of \sklearn.
439 |
440 | \subsection{Meta-estimators}
441 |
442 | Some machine learning algorithms are expressed naturally
443 | as meta-algorithms parametrized on simpler algorithms.
444 | Examples include ensemble methods which
445 | build and combine several simpler models (e.g., decision trees), or multiclass
446 | and multilabel classification schemes which can be used to turn a binary
447 | classifier into a multiclass or multilabel classifier. In \sklearn, such algorithms are
448 | implemented as \textit{meta-estimators}. They take as input an existing base
449 | estimator and use it internally for learning and making predictions.
450 | All meta-estimators implement the regular estimator interface.
451 |
452 | As an example, a logistic regression classifier
453 | uses by default a one-vs.-rest scheme
454 | for performing multiclass classification.
455 | A different scheme can be achieved
456 | by a meta-estimator wrapping a logistic regression estimator:
457 | \begin{pythoncode}
458 | from sklearn.multiclass import OneVsOneClassifier
459 |
460 | ovo_lr = OneVsOneClassifier(LogisticRegression(penalty="l1"))
461 | \end{pythoncode}
462 | For learning, the \texttt{OneVsOneClassifier} object
463 | \textit{clones} the logistic regression estimator multiple times,
464 | resulting in a set of $\frac{K(K-1)}{2}$ estimator objects
465 | for $K$-way classification,
466 | all with the same settings.
467 | For predictions, all estimators perform a binary classification and then vote to make the final decision.
468 | The snippet exemplifies the importance
469 | of separating object instantiation and actual learning.
470 |
471 | Since meta-estimators require users to construct nested objects,
472 | the decision to implement a meta-estimator
473 | rather than integrate the behavior it implements
474 | into existing estimators classes
475 | is always based on a trade-off between generality and ease of use.
476 | Relating to the example just given,
477 | all \sklearn classifiers are designed to do multiclass classification
478 | and the use of the \texttt{multiclass} module
479 | is only necessary in advanced use cases.
480 |
481 | % TODO: show a grid search on this estimator
482 |
483 | \subsection{Pipelines and feature unions}
484 |
485 | A distinguishing feature of the \sklearn API is its ability to
486 | compose new estimators from several base estimators. Composition mechanisms can
487 | be used to combine typical machine learning workflows into a single object which
488 | is itself an estimator, and can be employed wherever usual estimators can be used.
489 | In particular, \sklearn's model selection routines
490 | can be applied to composite estimators, allowing global optimization
491 | of all parameters in a complex workflow.
492 | Composition of estimators can be done in two
493 | ways: either sequentially through \texttt{Pipeline} objects, or in a parallel
494 | fashion through \texttt{FeatureUnion} objects.
495 |
496 | \texttt{Pipeline} objects chain multiple estimators into a single one. This is
497 | useful since a machine learning workflow typically involves a fixed sequence of
498 | processing steps (e.g., feature extraction, dimensionality reduction, learning
499 | and making predictions), many of which perform some kind of learning.
500 | A sequence of $N$ such steps can be combined into a
501 | \texttt{Pipeline} if the first $N-1$ steps are transformers; the last can be
502 | either a predictor, a transformer or both.
503 |
504 | Conceptually, fitting a pipeline to
505 | a training set amounts to the following recursive procedure: i) when only one
506 | step remains, call its \texttt{fit} method; ii) otherwise, \texttt{fit} the
507 | first step, use it to \texttt{transform} the training set and \texttt{fit} the
508 | rest of the pipeline with the transformed data. The pipeline exposes all the
509 | methods the last estimator in the pipe exposes. That is, if the last estimator
510 | is a predictor, the pipeline can itself be used as a predictor. If the last
511 | estimator is a transformer, then the pipeline is itself a transformer.
512 |
513 | \texttt{FeatureUnion} objects combine multiple transformers into a single one
514 | that concatenates their outputs. A union of two transformers that
515 | map input having $d$ features to $d'$ and $d''$ features respectively is
516 | a transformer that maps its $d$ input features to $d' + d''$ features.
517 | This generalizes in the obvious way to more than two transformers.
518 | In terms of API, a \texttt{FeatureUnion} takes as input a list of transformers.
519 | Calling \texttt{fit} on the union is the same as calling \texttt{fit}
520 | independently on each of the transformers and then joining their outputs.
521 |
522 | \texttt{Pipeline} and \texttt{FeatureUnion} can be
523 | combined to create complex and nested workflows.
524 | The following snippet illustrates how to create a complex estimator
525 | that computes both linear PCA and kernel PCA features on \texttt{X\_train}
526 | (through a \texttt{FeatureUnion}),
527 | selects the 10 best features in the combination according to an ANOVA test
528 | and feeds those to an $\ell_2$-regularized logistic regression model.
529 | \begin{pythoncode}
530 | from sklearn.pipeline import FeatureUnion, Pipeline
531 | from sklearn.decomposition import PCA, KernelPCA
532 | from sklearn.feature_selection import SelectKBest
533 |
534 | union = FeatureUnion([("pca", PCA()),
535 | ("kpca", KernelPCA(kernel="rbf"))])
536 |
537 | Pipeline([("feat_union", union),
538 | ("feat_sel", SelectKBest(k=10)),
539 | ("log_reg", LogisticRegression(penalty="l2"))
540 | ]).fit(X_train, y_train).predict(X_test)
541 | \end{pythoncode}
542 |
543 | \subsection{Model selection}
544 |
545 | As introduced in Section~\ref{sec:estimators}, hyper-parameters set in the
546 | constructor of an estimator
547 | determine the behavior of the learning algorithm
548 | and hence the performance of the resulting model on unseen data.
549 | The problem of \textit{model selection} is therefore to find, within
550 | some hyper-parameter space, the best combination of hyper-parameters, with
551 | respect to some user-specified criterion. For example, a decision
552 | tree with too small a value for the maximal tree depth
553 | parameter will tend to underfit, while too large a value will make it overfit.
554 |
555 | In \sklearn, model selection is supported in two distinct meta-estimators,
556 | \texttt{GridSearchCV} and \texttt{RandomizedSearchCV}. They take as input an
557 | estimator (basic or composite), whose hyper-parameters must be optimized, and a
558 | set of hyperparameter settings to search through.
559 | This set is represented as a mapping of parameter names
560 | to a set of discrete choices in the case of grid search,
561 | which exhaustively enumerates the ``grid'' (cartesian product)
562 | of complete parameter combinations.
563 | Randomized search is a smarter algorithm
564 | that avoids the combinatorial explosion in grid search
565 | by sampling a fixed number of times from its parameter distributions
566 | (see \citealp{bergstra2012}).
567 |
568 | Optionally, the model selection algorithms
569 | also take a cross-validation scheme and a score function. \sklearn provides
570 | various such cross-validation schemes, including $k$-fold (default),
571 | stratified $k$-fold and leave-one-out.
572 | The score function used by default is the estimator's \texttt{score} method,
573 | but the library provides a variety of
574 | alternatives that the user can choose from,
575 | including accuracy, AUC and $F_1$ score for classification,
576 | $R^2$ score and mean squared error for regression.
577 |
578 | For each hyper-parameter combination and each train/validation split
579 | generated by the cross-validation scheme, \texttt{GridSearchCV}
580 | and \texttt{RandomizedSearchCV} fit their base estimator on the training set and
581 | evaluate its performance on the validation set. In the end, the best performing
582 | model on average is retained and exposed as the public attribute
583 | \texttt{best\_estimator\_}.
584 |
585 | The snippet below illustrates how to find
586 | hyper-parameter settings for an SVM classifier (SVC)
587 | that maximize $F_1$ score
588 | through 10-fold cross-validation on the training set.
589 | \begin{pythoncode}
590 | from sklearn.grid_search import GridSearchCV
591 | from sklearn.svm import SVC
592 |
593 | param_grid = [
594 | {"kernel": ["linear"], "C": [1, 10, 100, 1000]},
595 | {"kernel": ["rbf"], "C": [1, 10, 100, 1000],
596 | "gamma": [0.001, 0.0001]}
597 | ]
598 |
599 | clf = GridSearchCV(SVC(), param_grid, scoring="f1", cv=10)
600 | clf.fit(X_train, y_train)
601 | y_pred = clf.predict(X_test)
602 | \end{pythoncode}
603 | In this example, two distinct hyper-parameter grids are
604 | considered for the linear and radial basis function (RBF) kernels;
605 | an SVM with a linear kernel accepts a $\gamma$ parameter, but ignores it,
606 | so using a single parameter grid would waste computing time
607 | trying out effectively equivalent settings.
608 | Additionally, we see that
609 | \texttt{GridSearchCV} has a \texttt{predict} method, just like any other classifier:
610 | it delegates the \texttt{predict}, \texttt{predict\_proba}, \texttt{transform} and
611 | \texttt{score} methods to the best estimator
612 | (optionally after re-fitting it on the whole training set).
613 |
614 | \subsection{Extending scikit-learn}
615 |
616 | To ease code reuse, simplify implementation and skip the introduction of
617 | superfluous classes, the Python principle of \textit{duck typing} is exploited
618 | throughout the codebase. This means that estimators are defined by interface,
619 | not by inheritance, where the interface is entirely implicit
620 | as far as the programming language is concerned.
621 | Duck typing allows both for extensibility and
622 | flexibility: as long as an estimator follows the API and conventions
623 | outlined in Section~\ref{sec:core-api}, then it can be used in lieu of a
624 | built-in estimator (e.g., it can be plugged into pipelines or grid search)
625 | and external developers are not forced to inherit from any \sklearn class.
626 |
627 | In other places of the library, in particular in the vectorization code
628 | for unstructured input, the toolkit is also designed to be
629 | extensible. Here, estimators provide hooks for user-defined code: objects or
630 | functions that follow a specific API can be given as arguments at vectorizer
631 | construction time. The library then calls into this code, communicating with it by passing objects of standard Python/NumPy types.
632 | Again, such external user code can be kept agnostic of the \sklearn
633 | class hierarchy.
634 |
635 | Our rule of thumb is that user code should not be tied to \sklearn---which is a
636 | \textit{library}, and not a \textit{framework}. This principle indeed avoids a
637 | well-known problem with object-oriented design, which is that users wanting a
638 | ``banana'' should not get ``a gorilla holding the banana and the entire jungle''
639 | (J.~Armstrong, cited by \citealp[p.~213]{seibel2009coders}).
640 | That is, programs using \sklearn should not be intimately tied to it,
641 | so that their code can be reused with other toolkits or in other contexts.
642 |
643 |
644 | \section{Implementation}
645 | \label{sec:implementation}
646 |
647 | Our implementation guidelines emphasize writing efficient but readable code.
648 | In particular, we focus on making the codebase easily maintainable and
649 | understandable in order to favor external contributions. Whenever practicable,
650 | algorithms implemented in \sklearn are written in Python,
651 | using NumPy vector operations for numerical work.
652 | This allows for the code to remain concise, readable and
653 | efficient. For critical algorithms that cannot be easily and efficiently
654 | expressed as NumPy operations, we rely on Cython \citep{behnel2011cython}
655 | to achieve competitive performance and scalability. Cython is a
656 | compiled programming language that extends Python with static typing. It
657 | produces efficient C extension modules that are directly importable from the
658 | Python run-time system. Examples of algorithms written in Cython include
659 | stochastic gradient descent for linear models, some graph-based clustering
660 | algorithms and decision trees.
661 |
662 | To facilitate the installation and thus adoption of \sklearn,
663 | the set of external dependencies is kept to a bare minimum:
664 | only Python, NumPy and SciPy are required for a functioning installation.
665 | Binary distributions of these are available for the major platforms.
666 | Visualization functionality depends on Matplotlib \citep{hunter2007matplotlib}
667 | and/or Graphviz \citep{gansner2000},
668 | but neither is required to perform machine learning or prediction.
669 | When feasible, external libraries are integrated into the codebase.
670 | In particular, \sklearn includes modified versions of \textsf{LIBSVM} and \textsf{LIBLINEAR}
671 | \citep{chang2011libsvm,fan2008}, both written in C{}\verb!++!
672 | and wrapped using Cython modules.
673 |
674 | \section{Related software}
675 | \label{sec:comparison}
676 |
677 | Recent years have witnessed a rising interest in machine learning and data
678 | mining with applications in many fields. With this rise comes a host of machine
679 | learning packages (both open source and proprietary) with which \sklearn
680 | competes. Some of those, including Weka~\citep{hall2009weka} or
681 | Orange~\citep{Demsar2004}, offer APIs but actually focus on the use of a graphical user interface (GUI)
682 | which allows novices to easily apply machine learning algorithms. By
683 | contrast, the target audience of \sklearn is capable of programming, and
684 | therefore we focus on developing a usable and consistent API, rather than expend
685 | effort into creating a GUI\@. In addition, while GUIs are useful tools, they
686 | sometimes make reproducibility difficult in the case of complex workflows
687 | (although those packages usually have developed a GUI for managing complex
688 | tasks).
689 |
690 | Other existing machine learning packages
691 | such as SofiaML\footnote{\url{https://code.google.com/p/sofia-ml}}~\citep{sculley2009large}
692 | and Vowpal~Wabbit\footnote{\url{http://hunch.net/$\sim$vw}}
693 | are intended to be used as command-line tools
694 | (and sometimes do not offer any type of API).
695 | While these packages have the advantage
696 | that their users are not tied to a particular programming language,
697 | the users will find that they still need programming to process input/output,
698 | and will do so in a variety of languages.
699 | By contrast, \sklearn allows users to implement that entire workflow
700 | in a single program, written in a single language,
701 | and developed in a single working environment.
702 | This also makes it easier for researchers and developers
703 | to exchange and collaborate on software, as dependencies and setup are kept to a
704 | minimum.
705 |
706 | Similar benefits hold in the case of specialized languages
707 | for numeric and statistical programming
708 | such as \textsc{matlab} and R \citep{r}.
709 | In comparison to these, though, Python has the distinct advantage
710 | that it is a \textit{general purpose} language,
711 | while NumPy and SciPy extend it with functionality
712 | similar to that offered by \textsc{matlab} and R.
713 | Python has strong language and standard library support for such tasks as
714 | string/text processing, interprocess communication, networking
715 | and many of the other auxiliary tasks that machine learning programs
716 | (whether academic or commercial) routinely need to perform.
717 | While support for many of these tasks is improving in languages such as
718 | \textsc{matlab} and R, they still lag behind Python in their general purpose
719 | applicability.
720 | In many applications of machine learning these tasks, such as data access,
721 | data preprocessing and reporting, can be a more significant task than applying
722 | the actual learning algorithm.
723 |
724 | Within the realm of Python,
725 | a package that deserves mention is the Gensim topic modeling toolkit
726 | \citep{rehurek2010gensim},
727 | which exemplifies a different style of API design
728 | geared toward scalable processing of ``big data''.
729 | Gensim's method of dealing with large datasets is to use algorithms
730 | that have $O(1)$ space complexity and can be updated online.
731 | The API is designed around the Python concept of an \textit{iterable}
732 | (supported in the language by a restricted form of co-routines called
733 | \textit{generators}).
734 | While the text vectorizers part of \sklearn
735 | also use iterables to some extent,
736 | they still produce entire sparse matrices, intended to be used for batch or
737 | mini-batch learning. This is the case
738 | even in the stateless, O(1) memory vectorizers
739 | that implement the hashing trick of \citet{weinberger2009}.
740 | This way of processing, as argued earlier in Section~\ref{sec:arrays},
741 | reduces various forms of overhead
742 | and allows effective use of the vectorized operations provided by NumPy and
743 | SciPy. We make no attempt to hide this batch-oriented processing from the user,
744 | allowing control over the amount of memory actually dedicated
745 | to \sklearn algorithms.
746 |
747 | \section{Future directions}
748 | \label{sec:future_work}
749 | There are several directions that the \sklearn project
750 | aims to focus on in future development.
751 | At present, the library does not support some classical machine learning
752 | algorithms,
753 | including neural networks, ensemble meta-estimators for
754 | bagging or subsampling strategies and missing value completion algorithms.
755 | However, tasks like structured prediction or reinforcement learning are
756 | considered out of scope for the project,
757 | since they would require quite different data representations and APIs.
758 |
759 | At a lower-level, parallel processing is a potential point of improvement.
760 | Some estimators in \sklearn are already able to leverage multicore processors,
761 | but only in a coarse-grained fashion.
762 | At present, parallel processing is difficult to accomplish in the Python environment;
763 | \sklearn targets the main implementation, CPython,
764 | which cannot execute Python code on multiple CPUs simultaneously.
765 | It follows that any parallel task decomposition must either be done
766 | inside Cython modules,
767 | or at a level high enough to warrant the overhead
768 | of creating multiple OS-level processes,
769 | and the ensuing inter-process communication.
770 | Parallel grid search is an example of the latter approach
771 | which has already been implemented.
772 | Recent versions of Cython include support for the OpenMP standard
773 | \citep{dagum1998openmp},
774 | which is a viable candidate technology
775 | for more fine-grained multicore support in \sklearn.
776 |
777 | Finally, a long-term solution for model persistence is missing.
778 | Currently, Python's \texttt{pickle} module is recommended for serialization,
779 | but this only offers a file format,
780 | not a way of preserving compatibility between versions.
781 | Also, it has security problems because its deserializer
782 | may execute arbitrary Python code,
783 | so models from untrusted sources cannot be safely ``unpickled''.
784 |
785 | These API issues will be addressed in the future in preparation for
786 | the 1.0 release of \sklearn.
787 |
788 |
789 | \section{Conclusion}
790 | \label{sec:conclusions}
791 |
792 | We have discussed the \sklearn API
793 | and the way it maps machine learning concepts and tasks
794 | onto objects and operations in the Python programming language.
795 | We have shown how a consistent API across the package makes \sklearn
796 | very \textbf{usable} in practice: experimenting with different learning
797 | algorithm is as simple as substituting a new class definition.
798 | Through composition interfaces such as Pipelines, Feature Unions,
799 | and meta-estimators, these simple building blocks lead to an API which is
800 | \textbf{powerful}, and can accomplish a wide variety of learning tasks
801 | within a small amount of easy-to-read code.
802 | Through duck-typing, the consistent API leads to a library that is
803 | easily \textbf{extensible}, and allows user-defined estimators to be
804 | incorporated into the \sklearn workflow without any explicit object
805 | inheritance.
806 |
807 | While part of the \sklearn API is necessarily Python-specific,
808 | core concepts may be applicable to
809 | machine learning applications and toolkits
810 | written in other (dynamic) programming languages.
811 | The power, and extensibility of the \sklearn API is evidenced
812 | by the large and growing user-base, its use to solve real
813 | problems across a wide array of fields,
814 | as well as the appearance of third-party packages
815 | that follow the \sklearn conventions. Examples of such packages include
816 | \textit{astroML}\footnote{\url{http://astroml.org}}
817 | \citep{vanderplas2012astroML}, a package providing
818 | machine learning tools for astronomers, and
819 | \textit{wiseRF}\footnote{\url{http://wise.io}}, a commercial random forest
820 | implementation. The source code of
821 | the recently-proposed sparse multiclass
822 | algorithm of \citet{mblondel-mlj2013}, released as
823 | part of the
824 | \textit{lightning}\footnote{\url{https://github.com/mblondel/lightning}}
825 | package, also follows the \sklearn conventions.
826 | To maximize ease of use, we encourage more researchers
827 | to follow these conventions when releasing their software.
828 |
829 | \subsection*{Acknowledgments}
830 |
831 | The authors and contributors acknowledge active support from INRIA\@. Past and
832 | present sponsors of the project also include Google for funding
833 | scholarships through its Summer of Code program,
834 | the Python Software Foundation and Tinyclues for funding coding sprints.
835 |
836 | Gilles~Louppe and Arnaud~Joly are research fellows of the Belgian
837 | Fonds de la Recherche Scientifique (FNRS)
838 | and acknowledge its financial support.
839 |
840 | {\small
841 | \bibliographystyle{abbrvnat}
842 | \DeclareRobustCommand{\VAN}[3]{#3}
843 | \bibliography{paper}
844 | }
845 |
846 | \end{document}
847 |
--------------------------------------------------------------------------------
/llncs.cls:
--------------------------------------------------------------------------------
1 | % LLNCS DOCUMENT CLASS -- version 2.17 (12-Jul-2010)
2 | % Springer Verlag LaTeX2e support for Lecture Notes in Computer Science
3 | %
4 | %%
5 | %% \CharacterTable
6 | %% {Upper-case \A\B\C\D\E\F\G\H\I\J\K\L\M\N\O\P\Q\R\S\T\U\V\W\X\Y\Z
7 | %% Lower-case \a\b\c\d\e\f\g\h\i\j\k\l\m\n\o\p\q\r\s\t\u\v\w\x\y\z
8 | %% Digits \0\1\2\3\4\5\6\7\8\9
9 | %% Exclamation \! Double quote \" Hash (number) \#
10 | %% Dollar \$ Percent \% Ampersand \&
11 | %% Acute accent \' Left paren \( Right paren \)
12 | %% Asterisk \* Plus \+ Comma \,
13 | %% Minus \- Point \. Solidus \/
14 | %% Colon \: Semicolon \; Less than \<
15 | %% Equals \= Greater than \> Question mark \?
16 | %% Commercial at \@ Left bracket \[ Backslash \\
17 | %% Right bracket \] Circumflex \^ Underscore \_
18 | %% Grave accent \` Left brace \{ Vertical bar \|
19 | %% Right brace \} Tilde \~}
20 | %%
21 | \NeedsTeXFormat{LaTeX2e}[1995/12/01]
22 | \ProvidesClass{llncs}[2010/07/12 v2.17
23 | ^^J LaTeX document class for Lecture Notes in Computer Science]
24 | % Options
25 | \let\if@envcntreset\iffalse
26 | \DeclareOption{envcountreset}{\let\if@envcntreset\iftrue}
27 | \DeclareOption{citeauthoryear}{\let\citeauthoryear=Y}
28 | \DeclareOption{oribibl}{\let\oribibl=Y}
29 | \let\if@custvec\iftrue
30 | \DeclareOption{orivec}{\let\if@custvec\iffalse}
31 | \let\if@envcntsame\iffalse
32 | \DeclareOption{envcountsame}{\let\if@envcntsame\iftrue}
33 | \let\if@envcntsect\iffalse
34 | \DeclareOption{envcountsect}{\let\if@envcntsect\iftrue}
35 | \let\if@runhead\iffalse
36 | \DeclareOption{runningheads}{\let\if@runhead\iftrue}
37 |
38 | \let\if@openright\iftrue
39 | \let\if@openbib\iffalse
40 | \DeclareOption{openbib}{\let\if@openbib\iftrue}
41 |
42 | % languages
43 | \let\switcht@@therlang\relax
44 | \def\ds@deutsch{\def\switcht@@therlang{\switcht@deutsch}}
45 | \def\ds@francais{\def\switcht@@therlang{\switcht@francais}}
46 |
47 | \DeclareOption*{\PassOptionsToClass{\CurrentOption}{article}}
48 |
49 | \ProcessOptions
50 |
51 | \LoadClass[twoside]{article}
52 | \RequirePackage{multicol} % needed for the list of participants, index
53 | \RequirePackage{aliascnt}
54 |
55 | \setlength{\textwidth}{12.2cm}
56 | \setlength{\textheight}{19.3cm}
57 | \renewcommand\@pnumwidth{2em}
58 | \renewcommand\@tocrmarg{3.5em}
59 | %
60 | \def\@dottedtocline#1#2#3#4#5{%
61 | \ifnum #1>\c@tocdepth \else
62 | \vskip \z@ \@plus.2\p@
63 | {\leftskip #2\relax \rightskip \@tocrmarg \advance\rightskip by 0pt plus 2cm
64 | \parfillskip -\rightskip \pretolerance=10000
65 | \parindent #2\relax\@afterindenttrue
66 | \interlinepenalty\@M
67 | \leavevmode
68 | \@tempdima #3\relax
69 | \advance\leftskip \@tempdima \null\nobreak\hskip -\leftskip
70 | {#4}\nobreak
71 | \leaders\hbox{$\m@th
72 | \mkern \@dotsep mu\hbox{.}\mkern \@dotsep
73 | mu$}\hfill
74 | \nobreak
75 | \hb@xt@\@pnumwidth{\hfil\normalfont \normalcolor #5}%
76 | \par}%
77 | \fi}
78 | %
79 | \def\switcht@albion{%
80 | \def\abstractname{Abstract.}
81 | \def\ackname{Acknowledgement.}
82 | \def\andname{and}
83 | \def\lastandname{\unskip, and}
84 | \def\appendixname{Appendix}
85 | \def\chaptername{Chapter}
86 | \def\claimname{Claim}
87 | \def\conjecturename{Conjecture}
88 | \def\contentsname{Table of Contents}
89 | \def\corollaryname{Corollary}
90 | \def\definitionname{Definition}
91 | \def\examplename{Example}
92 | \def\exercisename{Exercise}
93 | \def\figurename{Fig.}
94 | \def\keywordname{{\bf Keywords:}}
95 | \def\indexname{Index}
96 | \def\lemmaname{Lemma}
97 | \def\contriblistname{List of Contributors}
98 | \def\listfigurename{List of Figures}
99 | \def\listtablename{List of Tables}
100 | \def\mailname{{\it Correspondence to\/}:}
101 | \def\noteaddname{Note added in proof}
102 | \def\notename{Note}
103 | \def\partname{Part}
104 | \def\problemname{Problem}
105 | \def\proofname{Proof}
106 | \def\propertyname{Property}
107 | \def\propositionname{Proposition}
108 | \def\questionname{Question}
109 | \def\remarkname{Remark}
110 | \def\seename{see}
111 | \def\solutionname{Solution}
112 | \def\subclassname{{\it Subject Classifications\/}:}
113 | \def\tablename{Table}
114 | \def\theoremname{Theorem}}
115 | \switcht@albion
116 | % Names of theorem like environments are already defined
117 | % but must be translated if another language is chosen
118 | %
119 | % French section
120 | \def\switcht@francais{%\typeout{On parle francais.}%
121 | \def\abstractname{R\'esum\'e.}%
122 | \def\ackname{Remerciements.}%
123 | \def\andname{et}%
124 | \def\lastandname{ et}%
125 | \def\appendixname{Appendice}
126 | \def\chaptername{Chapitre}%
127 | \def\claimname{Pr\'etention}%
128 | \def\conjecturename{Hypoth\`ese}%
129 | \def\contentsname{Table des mati\`eres}%
130 | \def\corollaryname{Corollaire}%
131 | \def\definitionname{D\'efinition}%
132 | \def\examplename{Exemple}%
133 | \def\exercisename{Exercice}%
134 | \def\figurename{Fig.}%
135 | \def\keywordname{{\bf Mots-cl\'e:}}
136 | \def\indexname{Index}
137 | \def\lemmaname{Lemme}%
138 | \def\contriblistname{Liste des contributeurs}
139 | \def\listfigurename{Liste des figures}%
140 | \def\listtablename{Liste des tables}%
141 | \def\mailname{{\it Correspondence to\/}:}
142 | \def\noteaddname{Note ajout\'ee \`a l'\'epreuve}%
143 | \def\notename{Remarque}%
144 | \def\partname{Partie}%
145 | \def\problemname{Probl\`eme}%
146 | \def\proofname{Preuve}%
147 | \def\propertyname{Caract\'eristique}%
148 | %\def\propositionname{Proposition}%
149 | \def\questionname{Question}%
150 | \def\remarkname{Remarque}%
151 | \def\seename{voir}
152 | \def\solutionname{Solution}%
153 | \def\subclassname{{\it Subject Classifications\/}:}
154 | \def\tablename{Tableau}%
155 | \def\theoremname{Th\'eor\`eme}%
156 | }
157 | %
158 | % German section
159 | \def\switcht@deutsch{%\typeout{Man spricht deutsch.}%
160 | \def\abstractname{Zusammenfassung.}%
161 | \def\ackname{Danksagung.}%
162 | \def\andname{und}%
163 | \def\lastandname{ und}%
164 | \def\appendixname{Anhang}%
165 | \def\chaptername{Kapitel}%
166 | \def\claimname{Behauptung}%
167 | \def\conjecturename{Hypothese}%
168 | \def\contentsname{Inhaltsverzeichnis}%
169 | \def\corollaryname{Korollar}%
170 | %\def\definitionname{Definition}%
171 | \def\examplename{Beispiel}%
172 | \def\exercisename{\"Ubung}%
173 | \def\figurename{Abb.}%
174 | \def\keywordname{{\bf Schl\"usselw\"orter:}}
175 | \def\indexname{Index}
176 | %\def\lemmaname{Lemma}%
177 | \def\contriblistname{Mitarbeiter}
178 | \def\listfigurename{Abbildungsverzeichnis}%
179 | \def\listtablename{Tabellenverzeichnis}%
180 | \def\mailname{{\it Correspondence to\/}:}
181 | \def\noteaddname{Nachtrag}%
182 | \def\notename{Anmerkung}%
183 | \def\partname{Teil}%
184 | %\def\problemname{Problem}%
185 | \def\proofname{Beweis}%
186 | \def\propertyname{Eigenschaft}%
187 | %\def\propositionname{Proposition}%
188 | \def\questionname{Frage}%
189 | \def\remarkname{Anmerkung}%
190 | \def\seename{siehe}
191 | \def\solutionname{L\"osung}%
192 | \def\subclassname{{\it Subject Classifications\/}:}
193 | \def\tablename{Tabelle}%
194 | %\def\theoremname{Theorem}%
195 | }
196 |
197 | % Ragged bottom for the actual page
198 | \def\thisbottomragged{\def\@textbottom{\vskip\z@ plus.0001fil
199 | \global\let\@textbottom\relax}}
200 |
201 | \renewcommand\small{%
202 | \@setfontsize\small\@ixpt{11}%
203 | \abovedisplayskip 8.5\p@ \@plus3\p@ \@minus4\p@
204 | \abovedisplayshortskip \z@ \@plus2\p@
205 | \belowdisplayshortskip 4\p@ \@plus2\p@ \@minus2\p@
206 | \def\@listi{\leftmargin\leftmargini
207 | \parsep 0\p@ \@plus1\p@ \@minus\p@
208 | \topsep 8\p@ \@plus2\p@ \@minus4\p@
209 | \itemsep0\p@}%
210 | \belowdisplayskip \abovedisplayskip
211 | }
212 |
213 | \frenchspacing
214 | \widowpenalty=10000
215 | \clubpenalty=10000
216 |
217 | \setlength\oddsidemargin {63\p@}
218 | \setlength\evensidemargin {63\p@}
219 | \setlength\marginparwidth {90\p@}
220 |
221 | \setlength\headsep {16\p@}
222 |
223 | \setlength\footnotesep{7.7\p@}
224 | \setlength\textfloatsep{8mm\@plus 2\p@ \@minus 4\p@}
225 | \setlength\intextsep {8mm\@plus 2\p@ \@minus 2\p@}
226 |
227 | \setcounter{secnumdepth}{2}
228 |
229 | \newcounter {chapter}
230 | \renewcommand\thechapter {\@arabic\c@chapter}
231 |
232 | \newif\if@mainmatter \@mainmattertrue
233 | \newcommand\frontmatter{\cleardoublepage
234 | \@mainmatterfalse\pagenumbering{Roman}}
235 | \newcommand\mainmatter{\cleardoublepage
236 | \@mainmattertrue\pagenumbering{arabic}}
237 | \newcommand\backmatter{\if@openright\cleardoublepage\else\clearpage\fi
238 | \@mainmatterfalse}
239 |
240 | \renewcommand\part{\cleardoublepage
241 | \thispagestyle{empty}%
242 | \if@twocolumn
243 | \onecolumn
244 | \@tempswatrue
245 | \else
246 | \@tempswafalse
247 | \fi
248 | \null\vfil
249 | \secdef\@part\@spart}
250 |
251 | \def\@part[#1]#2{%
252 | \ifnum \c@secnumdepth >-2\relax
253 | \refstepcounter{part}%
254 | \addcontentsline{toc}{part}{\thepart\hspace{1em}#1}%
255 | \else
256 | \addcontentsline{toc}{part}{#1}%
257 | \fi
258 | \markboth{}{}%
259 | {\centering
260 | \interlinepenalty \@M
261 | \normalfont
262 | \ifnum \c@secnumdepth >-2\relax
263 | \huge\bfseries \partname~\thepart
264 | \par
265 | \vskip 20\p@
266 | \fi
267 | \Huge \bfseries #2\par}%
268 | \@endpart}
269 | \def\@spart#1{%
270 | {\centering
271 | \interlinepenalty \@M
272 | \normalfont
273 | \Huge \bfseries #1\par}%
274 | \@endpart}
275 | \def\@endpart{\vfil\newpage
276 | \if@twoside
277 | \null
278 | \thispagestyle{empty}%
279 | \newpage
280 | \fi
281 | \if@tempswa
282 | \twocolumn
283 | \fi}
284 |
285 | \newcommand\chapter{\clearpage
286 | \thispagestyle{empty}%
287 | \global\@topnum\z@
288 | \@afterindentfalse
289 | \secdef\@chapter\@schapter}
290 | \def\@chapter[#1]#2{\ifnum \c@secnumdepth >\m@ne
291 | \if@mainmatter
292 | \refstepcounter{chapter}%
293 | \typeout{\@chapapp\space\thechapter.}%
294 | \addcontentsline{toc}{chapter}%
295 | {\protect\numberline{\thechapter}#1}%
296 | \else
297 | \addcontentsline{toc}{chapter}{#1}%
298 | \fi
299 | \else
300 | \addcontentsline{toc}{chapter}{#1}%
301 | \fi
302 | \chaptermark{#1}%
303 | \addtocontents{lof}{\protect\addvspace{10\p@}}%
304 | \addtocontents{lot}{\protect\addvspace{10\p@}}%
305 | \if@twocolumn
306 | \@topnewpage[\@makechapterhead{#2}]%
307 | \else
308 | \@makechapterhead{#2}%
309 | \@afterheading
310 | \fi}
311 | \def\@makechapterhead#1{%
312 | % \vspace*{50\p@}%
313 | {\centering
314 | \ifnum \c@secnumdepth >\m@ne
315 | \if@mainmatter
316 | \large\bfseries \@chapapp{} \thechapter
317 | \par\nobreak
318 | \vskip 20\p@
319 | \fi
320 | \fi
321 | \interlinepenalty\@M
322 | \Large \bfseries #1\par\nobreak
323 | \vskip 40\p@
324 | }}
325 | \def\@schapter#1{\if@twocolumn
326 | \@topnewpage[\@makeschapterhead{#1}]%
327 | \else
328 | \@makeschapterhead{#1}%
329 | \@afterheading
330 | \fi}
331 | \def\@makeschapterhead#1{%
332 | % \vspace*{50\p@}%
333 | {\centering
334 | \normalfont
335 | \interlinepenalty\@M
336 | \Large \bfseries #1\par\nobreak
337 | \vskip 40\p@
338 | }}
339 |
340 | \renewcommand\section{\@startsection{section}{1}{\z@}%
341 | {-18\p@ \@plus -4\p@ \@minus -4\p@}%
342 | {12\p@ \@plus 4\p@ \@minus 4\p@}%
343 | {\normalfont\large\bfseries\boldmath
344 | \rightskip=\z@ \@plus 8em\pretolerance=10000 }}
345 | \renewcommand\subsection{\@startsection{subsection}{2}{\z@}%
346 | {-18\p@ \@plus -4\p@ \@minus -4\p@}%
347 | {8\p@ \@plus 4\p@ \@minus 4\p@}%
348 | {\normalfont\normalsize\bfseries\boldmath
349 | \rightskip=\z@ \@plus 8em\pretolerance=10000 }}
350 | \renewcommand\subsubsection{\@startsection{subsubsection}{3}{\z@}%
351 | {-18\p@ \@plus -4\p@ \@minus -4\p@}%
352 | {-0.5em \@plus -0.22em \@minus -0.1em}%
353 | {\normalfont\normalsize\bfseries\boldmath}}
354 | \renewcommand\paragraph{\@startsection{paragraph}{4}{\z@}%
355 | {-12\p@ \@plus -4\p@ \@minus -4\p@}%
356 | {-0.5em \@plus -0.22em \@minus -0.1em}%
357 | {\normalfont\normalsize\itshape}}
358 | \renewcommand\subparagraph[1]{\typeout{LLNCS warning: You should not use
359 | \string\subparagraph\space with this class}\vskip0.5cm
360 | You should not use \verb|\subparagraph| with this class.\vskip0.5cm}
361 |
362 | \DeclareMathSymbol{\Gamma}{\mathalpha}{letters}{"00}
363 | \DeclareMathSymbol{\Delta}{\mathalpha}{letters}{"01}
364 | \DeclareMathSymbol{\Theta}{\mathalpha}{letters}{"02}
365 | \DeclareMathSymbol{\Lambda}{\mathalpha}{letters}{"03}
366 | \DeclareMathSymbol{\Xi}{\mathalpha}{letters}{"04}
367 | \DeclareMathSymbol{\Pi}{\mathalpha}{letters}{"05}
368 | \DeclareMathSymbol{\Sigma}{\mathalpha}{letters}{"06}
369 | \DeclareMathSymbol{\Upsilon}{\mathalpha}{letters}{"07}
370 | \DeclareMathSymbol{\Phi}{\mathalpha}{letters}{"08}
371 | \DeclareMathSymbol{\Psi}{\mathalpha}{letters}{"09}
372 | \DeclareMathSymbol{\Omega}{\mathalpha}{letters}{"0A}
373 |
374 | \let\footnotesize\small
375 |
376 | \if@custvec
377 | \def\vec#1{\mathchoice{\mbox{\boldmath$\displaystyle#1$}}
378 | {\mbox{\boldmath$\textstyle#1$}}
379 | {\mbox{\boldmath$\scriptstyle#1$}}
380 | {\mbox{\boldmath$\scriptscriptstyle#1$}}}
381 | \fi
382 |
383 | \def\squareforqed{\hbox{\rlap{$\sqcap$}$\sqcup$}}
384 | \def\qed{\ifmmode\squareforqed\else{\unskip\nobreak\hfil
385 | \penalty50\hskip1em\null\nobreak\hfil\squareforqed
386 | \parfillskip=0pt\finalhyphendemerits=0\endgraf}\fi}
387 |
388 | \def\getsto{\mathrel{\mathchoice {\vcenter{\offinterlineskip
389 | \halign{\hfil
390 | $\displaystyle##$\hfil\cr\gets\cr\to\cr}}}
391 | {\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr\gets
392 | \cr\to\cr}}}
393 | {\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr\gets
394 | \cr\to\cr}}}
395 | {\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr
396 | \gets\cr\to\cr}}}}}
397 | \def\lid{\mathrel{\mathchoice {\vcenter{\offinterlineskip\halign{\hfil
398 | $\displaystyle##$\hfil\cr<\cr\noalign{\vskip1.2pt}=\cr}}}
399 | {\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr<\cr
400 | \noalign{\vskip1.2pt}=\cr}}}
401 | {\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr<\cr
402 | \noalign{\vskip1pt}=\cr}}}
403 | {\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr
404 | <\cr
405 | \noalign{\vskip0.9pt}=\cr}}}}}
406 | \def\gid{\mathrel{\mathchoice {\vcenter{\offinterlineskip\halign{\hfil
407 | $\displaystyle##$\hfil\cr>\cr\noalign{\vskip1.2pt}=\cr}}}
408 | {\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr>\cr
409 | \noalign{\vskip1.2pt}=\cr}}}
410 | {\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr>\cr
411 | \noalign{\vskip1pt}=\cr}}}
412 | {\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr
413 | >\cr
414 | \noalign{\vskip0.9pt}=\cr}}}}}
415 | \def\grole{\mathrel{\mathchoice {\vcenter{\offinterlineskip
416 | \halign{\hfil
417 | $\displaystyle##$\hfil\cr>\cr\noalign{\vskip-1pt}<\cr}}}
418 | {\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr
419 | >\cr\noalign{\vskip-1pt}<\cr}}}
420 | {\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr
421 | >\cr\noalign{\vskip-0.8pt}<\cr}}}
422 | {\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr
423 | >\cr\noalign{\vskip-0.3pt}<\cr}}}}}
424 | \def\bbbr{{\rm I\!R}} %reelle Zahlen
425 | \def\bbbm{{\rm I\!M}}
426 | \def\bbbn{{\rm I\!N}} %natuerliche Zahlen
427 | \def\bbbf{{\rm I\!F}}
428 | \def\bbbh{{\rm I\!H}}
429 | \def\bbbk{{\rm I\!K}}
430 | \def\bbbp{{\rm I\!P}}
431 | \def\bbbone{{\mathchoice {\rm 1\mskip-4mu l} {\rm 1\mskip-4mu l}
432 | {\rm 1\mskip-4.5mu l} {\rm 1\mskip-5mu l}}}
433 | \def\bbbc{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm C$}\hbox{\hbox
434 | to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}
435 | {\setbox0=\hbox{$\textstyle\rm C$}\hbox{\hbox
436 | to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}
437 | {\setbox0=\hbox{$\scriptstyle\rm C$}\hbox{\hbox
438 | to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}
439 | {\setbox0=\hbox{$\scriptscriptstyle\rm C$}\hbox{\hbox
440 | to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}}}
441 | \def\bbbq{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm
442 | Q$}\hbox{\raise
443 | 0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.8\ht0\hss}\box0}}
444 | {\setbox0=\hbox{$\textstyle\rm Q$}\hbox{\raise
445 | 0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.8\ht0\hss}\box0}}
446 | {\setbox0=\hbox{$\scriptstyle\rm Q$}\hbox{\raise
447 | 0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.7\ht0\hss}\box0}}
448 | {\setbox0=\hbox{$\scriptscriptstyle\rm Q$}\hbox{\raise
449 | 0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.7\ht0\hss}\box0}}}}
450 | \def\bbbt{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm
451 | T$}\hbox{\hbox to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}
452 | {\setbox0=\hbox{$\textstyle\rm T$}\hbox{\hbox
453 | to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}
454 | {\setbox0=\hbox{$\scriptstyle\rm T$}\hbox{\hbox
455 | to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}
456 | {\setbox0=\hbox{$\scriptscriptstyle\rm T$}\hbox{\hbox
457 | to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}}}
458 | \def\bbbs{{\mathchoice
459 | {\setbox0=\hbox{$\displaystyle \rm S$}\hbox{\raise0.5\ht0\hbox
460 | to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\hbox
461 | to0pt{\kern0.55\wd0\vrule height0.5\ht0\hss}\box0}}
462 | {\setbox0=\hbox{$\textstyle \rm S$}\hbox{\raise0.5\ht0\hbox
463 | to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\hbox
464 | to0pt{\kern0.55\wd0\vrule height0.5\ht0\hss}\box0}}
465 | {\setbox0=\hbox{$\scriptstyle \rm S$}\hbox{\raise0.5\ht0\hbox
466 | to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\raise0.05\ht0\hbox
467 | to0pt{\kern0.5\wd0\vrule height0.45\ht0\hss}\box0}}
468 | {\setbox0=\hbox{$\scriptscriptstyle\rm S$}\hbox{\raise0.5\ht0\hbox
469 | to0pt{\kern0.4\wd0\vrule height0.45\ht0\hss}\raise0.05\ht0\hbox
470 | to0pt{\kern0.55\wd0\vrule height0.45\ht0\hss}\box0}}}}
471 | \def\bbbz{{\mathchoice {\hbox{$\mathsf\textstyle Z\kern-0.4em Z$}}
472 | {\hbox{$\mathsf\textstyle Z\kern-0.4em Z$}}
473 | {\hbox{$\mathsf\scriptstyle Z\kern-0.3em Z$}}
474 | {\hbox{$\mathsf\scriptscriptstyle Z\kern-0.2em Z$}}}}
475 |
476 | \let\ts\,
477 |
478 | \setlength\leftmargini {17\p@}
479 | \setlength\leftmargin {\leftmargini}
480 | \setlength\leftmarginii {\leftmargini}
481 | \setlength\leftmarginiii {\leftmargini}
482 | \setlength\leftmarginiv {\leftmargini}
483 | \setlength \labelsep {.5em}
484 | \setlength \labelwidth{\leftmargini}
485 | \addtolength\labelwidth{-\labelsep}
486 |
487 | \def\@listI{\leftmargin\leftmargini
488 | \parsep 0\p@ \@plus1\p@ \@minus\p@
489 | \topsep 8\p@ \@plus2\p@ \@minus4\p@
490 | \itemsep0\p@}
491 | \let\@listi\@listI
492 | \@listi
493 | \def\@listii {\leftmargin\leftmarginii
494 | \labelwidth\leftmarginii
495 | \advance\labelwidth-\labelsep
496 | \topsep 0\p@ \@plus2\p@ \@minus\p@}
497 | \def\@listiii{\leftmargin\leftmarginiii
498 | \labelwidth\leftmarginiii
499 | \advance\labelwidth-\labelsep
500 | \topsep 0\p@ \@plus\p@\@minus\p@
501 | \parsep \z@
502 | \partopsep \p@ \@plus\z@ \@minus\p@}
503 |
504 | \renewcommand\labelitemi{\normalfont\bfseries --}
505 | \renewcommand\labelitemii{$\m@th\bullet$}
506 |
507 | \setlength\arraycolsep{1.4\p@}
508 | \setlength\tabcolsep{1.4\p@}
509 |
510 | \def\tableofcontents{\chapter*{\contentsname\@mkboth{{\contentsname}}%
511 | {{\contentsname}}}
512 | \def\authcount##1{\setcounter{auco}{##1}\setcounter{@auth}{1}}
513 | \def\lastand{\ifnum\value{auco}=2\relax
514 | \unskip{} \andname\
515 | \else
516 | \unskip \lastandname\
517 | \fi}%
518 | \def\and{\stepcounter{@auth}\relax
519 | \ifnum\value{@auth}=\value{auco}%
520 | \lastand
521 | \else
522 | \unskip,
523 | \fi}%
524 | \@starttoc{toc}\if@restonecol\twocolumn\fi}
525 |
526 | \def\l@part#1#2{\addpenalty{\@secpenalty}%
527 | \addvspace{2em plus\p@}% % space above part line
528 | \begingroup
529 | \parindent \z@
530 | \rightskip \z@ plus 5em
531 | \hrule\vskip5pt
532 | \large % same size as for a contribution heading
533 | \bfseries\boldmath % set line in boldface
534 | \leavevmode % TeX command to enter horizontal mode.
535 | #1\par
536 | \vskip5pt
537 | \hrule
538 | \vskip1pt
539 | \nobreak % Never break after part entry
540 | \endgroup}
541 |
542 | \def\@dotsep{2}
543 |
544 | \let\phantomsection=\relax
545 |
546 | \def\hyperhrefextend{\ifx\hyper@anchor\@undefined\else
547 | {}\fi}
548 |
549 | \def\addnumcontentsmark#1#2#3{%
550 | \addtocontents{#1}{\protect\contentsline{#2}{\protect\numberline
551 | {\thechapter}#3}{\thepage}\hyperhrefextend}}%
552 | \def\addcontentsmark#1#2#3{%
553 | \addtocontents{#1}{\protect\contentsline{#2}{#3}{\thepage}\hyperhrefextend}}%
554 | \def\addcontentsmarkwop#1#2#3{%
555 | \addtocontents{#1}{\protect\contentsline{#2}{#3}{0}\hyperhrefextend}}%
556 |
557 | \def\@adcmk[#1]{\ifcase #1 \or
558 | \def\@gtempa{\addnumcontentsmark}%
559 | \or \def\@gtempa{\addcontentsmark}%
560 | \or \def\@gtempa{\addcontentsmarkwop}%
561 | \fi\@gtempa{toc}{chapter}%
562 | }
563 | \def\addtocmark{%
564 | \phantomsection
565 | \@ifnextchar[{\@adcmk}{\@adcmk[3]}%
566 | }
567 |
568 | \def\l@chapter#1#2{\addpenalty{-\@highpenalty}
569 | \vskip 1.0em plus 1pt \@tempdima 1.5em \begingroup
570 | \parindent \z@ \rightskip \@tocrmarg
571 | \advance\rightskip by 0pt plus 2cm
572 | \parfillskip -\rightskip \pretolerance=10000
573 | \leavevmode \advance\leftskip\@tempdima \hskip -\leftskip
574 | {\large\bfseries\boldmath#1}\ifx0#2\hfil\null
575 | \else
576 | \nobreak
577 | \leaders\hbox{$\m@th \mkern \@dotsep mu.\mkern
578 | \@dotsep mu$}\hfill
579 | \nobreak\hbox to\@pnumwidth{\hss #2}%
580 | \fi\par
581 | \penalty\@highpenalty \endgroup}
582 |
583 | \def\l@title#1#2{\addpenalty{-\@highpenalty}
584 | \addvspace{8pt plus 1pt}
585 | \@tempdima \z@
586 | \begingroup
587 | \parindent \z@ \rightskip \@tocrmarg
588 | \advance\rightskip by 0pt plus 2cm
589 | \parfillskip -\rightskip \pretolerance=10000
590 | \leavevmode \advance\leftskip\@tempdima \hskip -\leftskip
591 | #1\nobreak
592 | \leaders\hbox{$\m@th \mkern \@dotsep mu.\mkern
593 | \@dotsep mu$}\hfill
594 | \nobreak\hbox to\@pnumwidth{\hss #2}\par
595 | \penalty\@highpenalty \endgroup}
596 |
597 | \def\l@author#1#2{\addpenalty{\@highpenalty}
598 | \@tempdima=15\p@ %\z@
599 | \begingroup
600 | \parindent \z@ \rightskip \@tocrmarg
601 | \advance\rightskip by 0pt plus 2cm
602 | \pretolerance=10000
603 | \leavevmode \advance\leftskip\@tempdima %\hskip -\leftskip
604 | \textit{#1}\par
605 | \penalty\@highpenalty \endgroup}
606 |
607 | \setcounter{tocdepth}{0}
608 | \newdimen\tocchpnum
609 | \newdimen\tocsecnum
610 | \newdimen\tocsectotal
611 | \newdimen\tocsubsecnum
612 | \newdimen\tocsubsectotal
613 | \newdimen\tocsubsubsecnum
614 | \newdimen\tocsubsubsectotal
615 | \newdimen\tocparanum
616 | \newdimen\tocparatotal
617 | \newdimen\tocsubparanum
618 | \tocchpnum=\z@ % no chapter numbers
619 | \tocsecnum=15\p@ % section 88. plus 2.222pt
620 | \tocsubsecnum=23\p@ % subsection 88.8 plus 2.222pt
621 | \tocsubsubsecnum=27\p@ % subsubsection 88.8.8 plus 1.444pt
622 | \tocparanum=35\p@ % paragraph 88.8.8.8 plus 1.666pt
623 | \tocsubparanum=43\p@ % subparagraph 88.8.8.8.8 plus 1.888pt
624 | \def\calctocindent{%
625 | \tocsectotal=\tocchpnum
626 | \advance\tocsectotal by\tocsecnum
627 | \tocsubsectotal=\tocsectotal
628 | \advance\tocsubsectotal by\tocsubsecnum
629 | \tocsubsubsectotal=\tocsubsectotal
630 | \advance\tocsubsubsectotal by\tocsubsubsecnum
631 | \tocparatotal=\tocsubsubsectotal
632 | \advance\tocparatotal by\tocparanum}
633 | \calctocindent
634 |
635 | \def\l@section{\@dottedtocline{1}{\tocchpnum}{\tocsecnum}}
636 | \def\l@subsection{\@dottedtocline{2}{\tocsectotal}{\tocsubsecnum}}
637 | \def\l@subsubsection{\@dottedtocline{3}{\tocsubsectotal}{\tocsubsubsecnum}}
638 | \def\l@paragraph{\@dottedtocline{4}{\tocsubsubsectotal}{\tocparanum}}
639 | \def\l@subparagraph{\@dottedtocline{5}{\tocparatotal}{\tocsubparanum}}
640 |
641 | \def\listoffigures{\@restonecolfalse\if@twocolumn\@restonecoltrue\onecolumn
642 | \fi\section*{\listfigurename\@mkboth{{\listfigurename}}{{\listfigurename}}}
643 | \@starttoc{lof}\if@restonecol\twocolumn\fi}
644 | \def\l@figure{\@dottedtocline{1}{0em}{1.5em}}
645 |
646 | \def\listoftables{\@restonecolfalse\if@twocolumn\@restonecoltrue\onecolumn
647 | \fi\section*{\listtablename\@mkboth{{\listtablename}}{{\listtablename}}}
648 | \@starttoc{lot}\if@restonecol\twocolumn\fi}
649 | \let\l@table\l@figure
650 |
651 | \renewcommand\listoffigures{%
652 | \section*{\listfigurename
653 | \@mkboth{\listfigurename}{\listfigurename}}%
654 | \@starttoc{lof}%
655 | }
656 |
657 | \renewcommand\listoftables{%
658 | \section*{\listtablename
659 | \@mkboth{\listtablename}{\listtablename}}%
660 | \@starttoc{lot}%
661 | }
662 |
663 | \ifx\oribibl\undefined
664 | \ifx\citeauthoryear\undefined
665 | \renewenvironment{thebibliography}[1]
666 | {\section*{\refname}
667 | \def\@biblabel##1{##1.}
668 | \small
669 | \list{\@biblabel{\@arabic\c@enumiv}}%
670 | {\settowidth\labelwidth{\@biblabel{#1}}%
671 | \leftmargin\labelwidth
672 | \advance\leftmargin\labelsep
673 | \if@openbib
674 | \advance\leftmargin\bibindent
675 | \itemindent -\bibindent
676 | \listparindent \itemindent
677 | \parsep \z@
678 | \fi
679 | \usecounter{enumiv}%
680 | \let\p@enumiv\@empty
681 | \renewcommand\theenumiv{\@arabic\c@enumiv}}%
682 | \if@openbib
683 | \renewcommand\newblock{\par}%
684 | \else
685 | \renewcommand\newblock{\hskip .11em \@plus.33em \@minus.07em}%
686 | \fi
687 | \sloppy\clubpenalty4000\widowpenalty4000%
688 | \sfcode`\.=\@m}
689 | {\def\@noitemerr
690 | {\@latex@warning{Empty `thebibliography' environment}}%
691 | \endlist}
692 | \def\@lbibitem[#1]#2{\item[{[#1]}\hfill]\if@filesw
693 | {\let\protect\noexpand\immediate
694 | \write\@auxout{\string\bibcite{#2}{#1}}}\fi\ignorespaces}
695 | \newcount\@tempcntc
696 | \def\@citex[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi
697 | \@tempcnta\z@\@tempcntb\m@ne\def\@citea{}\@cite{\@for\@citeb:=#2\do
698 | {\@ifundefined
699 | {b@\@citeb}{\@citeo\@tempcntb\m@ne\@citea\def\@citea{,}{\bfseries
700 | ?}\@warning
701 | {Citation `\@citeb' on page \thepage \space undefined}}%
702 | {\setbox\z@\hbox{\global\@tempcntc0\csname b@\@citeb\endcsname\relax}%
703 | \ifnum\@tempcntc=\z@ \@citeo\@tempcntb\m@ne
704 | \@citea\def\@citea{,}\hbox{\csname b@\@citeb\endcsname}%
705 | \else
706 | \advance\@tempcntb\@ne
707 | \ifnum\@tempcntb=\@tempcntc
708 | \else\advance\@tempcntb\m@ne\@citeo
709 | \@tempcnta\@tempcntc\@tempcntb\@tempcntc\fi\fi}}\@citeo}{#1}}
710 | \def\@citeo{\ifnum\@tempcnta>\@tempcntb\else
711 | \@citea\def\@citea{,\,\hskip\z@skip}%
712 | \ifnum\@tempcnta=\@tempcntb\the\@tempcnta\else
713 | {\advance\@tempcnta\@ne\ifnum\@tempcnta=\@tempcntb \else
714 | \def\@citea{--}\fi
715 | \advance\@tempcnta\m@ne\the\@tempcnta\@citea\the\@tempcntb}\fi\fi}
716 | \else
717 | \renewenvironment{thebibliography}[1]
718 | {\section*{\refname}
719 | \small
720 | \list{}%
721 | {\settowidth\labelwidth{}%
722 | \leftmargin\parindent
723 | \itemindent=-\parindent
724 | \labelsep=\z@
725 | \if@openbib
726 | \advance\leftmargin\bibindent
727 | \itemindent -\bibindent
728 | \listparindent \itemindent
729 | \parsep \z@
730 | \fi
731 | \usecounter{enumiv}%
732 | \let\p@enumiv\@empty
733 | \renewcommand\theenumiv{}}%
734 | \if@openbib
735 | \renewcommand\newblock{\par}%
736 | \else
737 | \renewcommand\newblock{\hskip .11em \@plus.33em \@minus.07em}%
738 | \fi
739 | \sloppy\clubpenalty4000\widowpenalty4000%
740 | \sfcode`\.=\@m}
741 | {\def\@noitemerr
742 | {\@latex@warning{Empty `thebibliography' environment}}%
743 | \endlist}
744 | \def\@cite#1{#1}%
745 | \def\@lbibitem[#1]#2{\item[]\if@filesw
746 | {\def\protect##1{\string ##1\space}\immediate
747 | \write\@auxout{\string\bibcite{#2}{#1}}}\fi\ignorespaces}
748 | \fi
749 | \else
750 | \@cons\@openbib@code{\noexpand\small}
751 | \fi
752 |
753 | \def\idxquad{\hskip 10\p@}% space that divides entry from number
754 |
755 | \def\@idxitem{\par\hangindent 10\p@}
756 |
757 | \def\subitem{\par\setbox0=\hbox{--\enspace}% second order
758 | \noindent\hangindent\wd0\box0}% index entry
759 |
760 | \def\subsubitem{\par\setbox0=\hbox{--\,--\enspace}% third
761 | \noindent\hangindent\wd0\box0}% order index entry
762 |
763 | \def\indexspace{\par \vskip 10\p@ plus5\p@ minus3\p@\relax}
764 |
765 | \renewenvironment{theindex}
766 | {\@mkboth{\indexname}{\indexname}%
767 | \thispagestyle{empty}\parindent\z@
768 | \parskip\z@ \@plus .3\p@\relax
769 | \let\item\par
770 | \def\,{\relax\ifmmode\mskip\thinmuskip
771 | \else\hskip0.2em\ignorespaces\fi}%
772 | \normalfont\small
773 | \begin{multicols}{2}[\@makeschapterhead{\indexname}]%
774 | }
775 | {\end{multicols}}
776 |
777 | \renewcommand\footnoterule{%
778 | \kern-3\p@
779 | \hrule\@width 2truecm
780 | \kern2.6\p@}
781 | \newdimen\fnindent
782 | \fnindent1em
783 | \long\def\@makefntext#1{%
784 | \parindent \fnindent%
785 | \leftskip \fnindent%
786 | \noindent
787 | \llap{\hb@xt@1em{\hss\@makefnmark\ }}\ignorespaces#1}
788 |
789 | \long\def\@makecaption#1#2{%
790 | \small
791 | \vskip\abovecaptionskip
792 | \sbox\@tempboxa{{\bfseries #1.} #2}%
793 | \ifdim \wd\@tempboxa >\hsize
794 | {\bfseries #1.} #2\par
795 | \else
796 | \global \@minipagefalse
797 | \hb@xt@\hsize{\hfil\box\@tempboxa\hfil}%
798 | \fi
799 | \vskip\belowcaptionskip}
800 |
801 | \def\fps@figure{htbp}
802 | \def\fnum@figure{\figurename\thinspace\thefigure}
803 | \def \@floatboxreset {%
804 | \reset@font
805 | \small
806 | \@setnobreak
807 | \@setminipage
808 | }
809 | \def\fps@table{htbp}
810 | \def\fnum@table{\tablename~\thetable}
811 | \renewenvironment{table}
812 | {\setlength\abovecaptionskip{0\p@}%
813 | \setlength\belowcaptionskip{10\p@}%
814 | \@float{table}}
815 | {\end@float}
816 | \renewenvironment{table*}
817 | {\setlength\abovecaptionskip{0\p@}%
818 | \setlength\belowcaptionskip{10\p@}%
819 | \@dblfloat{table}}
820 | {\end@dblfloat}
821 |
822 | \long\def\@caption#1[#2]#3{\par\addcontentsline{\csname
823 | ext@#1\endcsname}{#1}{\protect\numberline{\csname
824 | the#1\endcsname}{\ignorespaces #2}}\begingroup
825 | \@parboxrestore
826 | \@makecaption{\csname fnum@#1\endcsname}{\ignorespaces #3}\par
827 | \endgroup}
828 |
829 | % LaTeX does not provide a command to enter the authors institute
830 | % addresses. The \institute command is defined here.
831 |
832 | \newcounter{@inst}
833 | \newcounter{@auth}
834 | \newcounter{auco}
835 | \newdimen\instindent
836 | \newbox\authrun
837 | \newtoks\authorrunning
838 | \newtoks\tocauthor
839 | \newbox\titrun
840 | \newtoks\titlerunning
841 | \newtoks\toctitle
842 |
843 | \def\clearheadinfo{\gdef\@author{No Author Given}%
844 | \gdef\@title{No Title Given}%
845 | \gdef\@subtitle{}%
846 | \gdef\@institute{No Institute Given}%
847 | \gdef\@thanks{}%
848 | \global\titlerunning={}\global\authorrunning={}%
849 | \global\toctitle={}\global\tocauthor={}}
850 |
851 | \def\institute#1{\gdef\@institute{#1}}
852 |
853 | \def\institutename{\par
854 | \begingroup
855 | \parskip=\z@
856 | \parindent=\z@
857 | \setcounter{@inst}{1}%
858 | \def\and{\par\stepcounter{@inst}%
859 | \noindent$^{\the@inst}$\enspace\ignorespaces}%
860 | \setbox0=\vbox{\def\thanks##1{}\@institute}%
861 | \ifnum\c@@inst=1\relax
862 | \gdef\fnnstart{0}%
863 | \else
864 | \xdef\fnnstart{\c@@inst}%
865 | \setcounter{@inst}{1}%
866 | \noindent$^{\the@inst}$\enspace
867 | \fi
868 | \ignorespaces
869 | \@institute\par
870 | \endgroup}
871 |
872 | \def\@fnsymbol#1{\ensuremath{\ifcase#1\or\star\or{\star\star}\or
873 | {\star\star\star}\or \dagger\or \ddagger\or
874 | \mathchar "278\or \mathchar "27B\or \|\or **\or \dagger\dagger
875 | \or \ddagger\ddagger \else\@ctrerr\fi}}
876 |
877 | \def\inst#1{\unskip$^{#1}$}
878 | \def\fnmsep{\unskip$^,$}
879 | \def\email#1{{\tt#1}}
880 | \AtBeginDocument{\@ifundefined{url}{\def\url#1{#1}}{}%
881 | \@ifpackageloaded{babel}{%
882 | \@ifundefined{extrasenglish}{}{\addto\extrasenglish{\switcht@albion}}%
883 | \@ifundefined{extrasfrenchb}{}{\addto\extrasfrenchb{\switcht@francais}}%
884 | \@ifundefined{extrasgerman}{}{\addto\extrasgerman{\switcht@deutsch}}%
885 | }{\switcht@@therlang}%
886 | \providecommand{\keywords}[1]{\par\addvspace\baselineskip
887 | \noindent\keywordname\enspace\ignorespaces#1}%
888 | }
889 | \def\homedir{\~{ }}
890 |
891 | \def\subtitle#1{\gdef\@subtitle{#1}}
892 | \clearheadinfo
893 | %
894 | %%% to avoid hyperref warnings
895 | \providecommand*{\toclevel@author}{999}
896 | %%% to make title-entry parent of section-entries
897 | \providecommand*{\toclevel@title}{0}
898 | %
899 | \renewcommand\maketitle{\newpage
900 | \phantomsection
901 | \refstepcounter{chapter}%
902 | \stepcounter{section}%
903 | \setcounter{section}{0}%
904 | \setcounter{subsection}{0}%
905 | \setcounter{figure}{0}
906 | \setcounter{table}{0}
907 | \setcounter{equation}{0}
908 | \setcounter{footnote}{0}%
909 | \begingroup
910 | \parindent=\z@
911 | \renewcommand\thefootnote{\@fnsymbol\c@footnote}%
912 | \if@twocolumn
913 | \ifnum \col@number=\@ne
914 | \@maketitle
915 | \else
916 | \twocolumn[\@maketitle]%
917 | \fi
918 | \else
919 | \newpage
920 | \global\@topnum\z@ % Prevents figures from going at top of page.
921 | \@maketitle
922 | \fi
923 | \thispagestyle{empty}\@thanks
924 | %
925 | \def\\{\unskip\ \ignorespaces}\def\inst##1{\unskip{}}%
926 | \def\thanks##1{\unskip{}}\def\fnmsep{\unskip}%
927 | \instindent=\hsize
928 | \advance\instindent by-\headlineindent
929 | \if!\the\toctitle!\addcontentsline{toc}{title}{\@title}\else
930 | \addcontentsline{toc}{title}{\the\toctitle}\fi
931 | \if@runhead
932 | \if!\the\titlerunning!\else
933 | \edef\@title{\the\titlerunning}%
934 | \fi
935 | \global\setbox\titrun=\hbox{\small\rm\unboldmath\ignorespaces\@title}%
936 | \ifdim\wd\titrun>\instindent
937 | \typeout{Title too long for running head. Please supply}%
938 | \typeout{a shorter form with \string\titlerunning\space prior to
939 | \string\maketitle}%
940 | \global\setbox\titrun=\hbox{\small\rm
941 | Title Suppressed Due to Excessive Length}%
942 | \fi
943 | \xdef\@title{\copy\titrun}%
944 | \fi
945 | %
946 | \if!\the\tocauthor!\relax
947 | {\def\and{\noexpand\protect\noexpand\and}%
948 | \protected@xdef\toc@uthor{\@author}}%
949 | \else
950 | \def\\{\noexpand\protect\noexpand\newline}%
951 | \protected@xdef\scratch{\the\tocauthor}%
952 | \protected@xdef\toc@uthor{\scratch}%
953 | \fi
954 | \addtocontents{toc}{\noexpand\protect\noexpand\authcount{\the\c@auco}}%
955 | \addcontentsline{toc}{author}{\toc@uthor}%
956 | \if@runhead
957 | \if!\the\authorrunning!
958 | \value{@inst}=\value{@auth}%
959 | \setcounter{@auth}{1}%
960 | \else
961 | \edef\@author{\the\authorrunning}%
962 | \fi
963 | \global\setbox\authrun=\hbox{\small\unboldmath\@author\unskip}%
964 | \ifdim\wd\authrun>\instindent
965 | \typeout{Names of authors too long for running head. Please supply}%
966 | \typeout{a shorter form with \string\authorrunning\space prior to
967 | \string\maketitle}%
968 | \global\setbox\authrun=\hbox{\small\rm
969 | Authors Suppressed Due to Excessive Length}%
970 | \fi
971 | \xdef\@author{\copy\authrun}%
972 | \markboth{\@author}{\@title}%
973 | \fi
974 | \endgroup
975 | \setcounter{footnote}{\fnnstart}%
976 | \clearheadinfo}
977 | %
978 | \def\@maketitle{\newpage
979 | \markboth{}{}%
980 | \def\lastand{\ifnum\value{@inst}=2\relax
981 | \unskip{} \andname\
982 | \else
983 | \unskip \lastandname\
984 | \fi}%
985 | \def\and{\stepcounter{@auth}\relax
986 | \ifnum\value{@auth}=\value{@inst}%
987 | \lastand
988 | \else
989 | \unskip,
990 | \fi}%
991 | \begin{center}%
992 | \let\newline\\
993 | {\Large \bfseries\boldmath
994 | \pretolerance=10000
995 | \@title \par}\vskip .8cm
996 | \if!\@subtitle!\else {\large \bfseries\boldmath
997 | \vskip -.65cm
998 | \pretolerance=10000
999 | \@subtitle \par}\vskip .8cm\fi
1000 | \setbox0=\vbox{\setcounter{@auth}{1}\def\and{\stepcounter{@auth}}%
1001 | \def\thanks##1{}\@author}%
1002 | \global\value{@inst}=\value{@auth}%
1003 | \global\value{auco}=\value{@auth}%
1004 | \setcounter{@auth}{1}%
1005 | {\lineskip .5em
1006 | \noindent\ignorespaces
1007 | \@author\vskip.35cm}
1008 | {\small\institutename}
1009 | \end{center}%
1010 | }
1011 |
1012 | % definition of the "\spnewtheorem" command.
1013 | %
1014 | % Usage:
1015 | %
1016 | % \spnewtheorem{env_nam}{caption}[within]{cap_font}{body_font}
1017 | % or \spnewtheorem{env_nam}[numbered_like]{caption}{cap_font}{body_font}
1018 | % or \spnewtheorem*{env_nam}{caption}{cap_font}{body_font}
1019 | %
1020 | % New is "cap_font" and "body_font". It stands for
1021 | % fontdefinition of the caption and the text itself.
1022 | %
1023 | % "\spnewtheorem*" gives a theorem without number.
1024 | %
1025 | % A defined spnewthoerem environment is used as described
1026 | % by Lamport.
1027 | %
1028 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
1029 |
1030 | \def\@thmcountersep{}
1031 | \def\@thmcounterend{.}
1032 |
1033 | \def\spnewtheorem{\@ifstar{\@sthm}{\@Sthm}}
1034 |
1035 | % definition of \spnewtheorem with number
1036 |
1037 | \def\@spnthm#1#2{%
1038 | \@ifnextchar[{\@spxnthm{#1}{#2}}{\@spynthm{#1}{#2}}}
1039 | \def\@Sthm#1{\@ifnextchar[{\@spothm{#1}}{\@spnthm{#1}}}
1040 |
1041 | \def\@spxnthm#1#2[#3]#4#5{\expandafter\@ifdefinable\csname #1\endcsname
1042 | {\@definecounter{#1}\@addtoreset{#1}{#3}%
1043 | \expandafter\xdef\csname the#1\endcsname{\expandafter\noexpand
1044 | \csname the#3\endcsname \noexpand\@thmcountersep \@thmcounter{#1}}%
1045 | \expandafter\xdef\csname #1name\endcsname{#2}%
1046 | \global\@namedef{#1}{\@spthm{#1}{\csname #1name\endcsname}{#4}{#5}}%
1047 | \global\@namedef{end#1}{\@endtheorem}}}
1048 |
1049 | \def\@spynthm#1#2#3#4{\expandafter\@ifdefinable\csname #1\endcsname
1050 | {\@definecounter{#1}%
1051 | \expandafter\xdef\csname the#1\endcsname{\@thmcounter{#1}}%
1052 | \expandafter\xdef\csname #1name\endcsname{#2}%
1053 | \global\@namedef{#1}{\@spthm{#1}{\csname #1name\endcsname}{#3}{#4}}%
1054 | \global\@namedef{end#1}{\@endtheorem}}}
1055 |
1056 | \def\@spothm#1[#2]#3#4#5{%
1057 | \@ifundefined{c@#2}{\@latexerr{No theorem environment `#2' defined}\@eha}%
1058 | {\expandafter\@ifdefinable\csname #1\endcsname
1059 | {\newaliascnt{#1}{#2}%
1060 | \expandafter\xdef\csname #1name\endcsname{#3}%
1061 | \global\@namedef{#1}{\@spthm{#1}{\csname #1name\endcsname}{#4}{#5}}%
1062 | \global\@namedef{end#1}{\@endtheorem}}}}
1063 |
1064 | \def\@spthm#1#2#3#4{\topsep 7\p@ \@plus2\p@ \@minus4\p@
1065 | \refstepcounter{#1}%
1066 | \@ifnextchar[{\@spythm{#1}{#2}{#3}{#4}}{\@spxthm{#1}{#2}{#3}{#4}}}
1067 |
1068 | \def\@spxthm#1#2#3#4{\@spbegintheorem{#2}{\csname the#1\endcsname}{#3}{#4}%
1069 | \ignorespaces}
1070 |
1071 | \def\@spythm#1#2#3#4[#5]{\@spopargbegintheorem{#2}{\csname
1072 | the#1\endcsname}{#5}{#3}{#4}\ignorespaces}
1073 |
1074 | \def\@spbegintheorem#1#2#3#4{\trivlist
1075 | \item[\hskip\labelsep{#3#1\ #2\@thmcounterend}]#4}
1076 |
1077 | \def\@spopargbegintheorem#1#2#3#4#5{\trivlist
1078 | \item[\hskip\labelsep{#4#1\ #2}]{#4(#3)\@thmcounterend\ }#5}
1079 |
1080 | % definition of \spnewtheorem* without number
1081 |
1082 | \def\@sthm#1#2{\@Ynthm{#1}{#2}}
1083 |
1084 | \def\@Ynthm#1#2#3#4{\expandafter\@ifdefinable\csname #1\endcsname
1085 | {\global\@namedef{#1}{\@Thm{\csname #1name\endcsname}{#3}{#4}}%
1086 | \expandafter\xdef\csname #1name\endcsname{#2}%
1087 | \global\@namedef{end#1}{\@endtheorem}}}
1088 |
1089 | \def\@Thm#1#2#3{\topsep 7\p@ \@plus2\p@ \@minus4\p@
1090 | \@ifnextchar[{\@Ythm{#1}{#2}{#3}}{\@Xthm{#1}{#2}{#3}}}
1091 |
1092 | \def\@Xthm#1#2#3{\@Begintheorem{#1}{#2}{#3}\ignorespaces}
1093 |
1094 | \def\@Ythm#1#2#3[#4]{\@Opargbegintheorem{#1}
1095 | {#4}{#2}{#3}\ignorespaces}
1096 |
1097 | \def\@Begintheorem#1#2#3{#3\trivlist
1098 | \item[\hskip\labelsep{#2#1\@thmcounterend}]}
1099 |
1100 | \def\@Opargbegintheorem#1#2#3#4{#4\trivlist
1101 | \item[\hskip\labelsep{#3#1}]{#3(#2)\@thmcounterend\ }}
1102 |
1103 | \if@envcntsect
1104 | \def\@thmcountersep{.}
1105 | \spnewtheorem{theorem}{Theorem}[section]{\bfseries}{\itshape}
1106 | \else
1107 | \spnewtheorem{theorem}{Theorem}{\bfseries}{\itshape}
1108 | \if@envcntreset
1109 | \@addtoreset{theorem}{section}
1110 | \else
1111 | \@addtoreset{theorem}{chapter}
1112 | \fi
1113 | \fi
1114 |
1115 | %definition of divers theorem environments
1116 | \spnewtheorem*{claim}{Claim}{\itshape}{\rmfamily}
1117 | \spnewtheorem*{proof}{Proof}{\itshape}{\rmfamily}
1118 | \if@envcntsame % alle Umgebungen wie Theorem.
1119 | \def\spn@wtheorem#1#2#3#4{\@spothm{#1}[theorem]{#2}{#3}{#4}}
1120 | \else % alle Umgebungen mit eigenem Zaehler
1121 | \if@envcntsect % mit section numeriert
1122 | \def\spn@wtheorem#1#2#3#4{\@spxnthm{#1}{#2}[section]{#3}{#4}}
1123 | \else % nicht mit section numeriert
1124 | \if@envcntreset
1125 | \def\spn@wtheorem#1#2#3#4{\@spynthm{#1}{#2}{#3}{#4}
1126 | \@addtoreset{#1}{section}}
1127 | \else
1128 | \def\spn@wtheorem#1#2#3#4{\@spynthm{#1}{#2}{#3}{#4}
1129 | \@addtoreset{#1}{chapter}}%
1130 | \fi
1131 | \fi
1132 | \fi
1133 | \spn@wtheorem{case}{Case}{\itshape}{\rmfamily}
1134 | \spn@wtheorem{conjecture}{Conjecture}{\itshape}{\rmfamily}
1135 | \spn@wtheorem{corollary}{Corollary}{\bfseries}{\itshape}
1136 | \spn@wtheorem{definition}{Definition}{\bfseries}{\itshape}
1137 | \spn@wtheorem{example}{Example}{\itshape}{\rmfamily}
1138 | \spn@wtheorem{exercise}{Exercise}{\itshape}{\rmfamily}
1139 | \spn@wtheorem{lemma}{Lemma}{\bfseries}{\itshape}
1140 | \spn@wtheorem{note}{Note}{\itshape}{\rmfamily}
1141 | \spn@wtheorem{problem}{Problem}{\itshape}{\rmfamily}
1142 | \spn@wtheorem{property}{Property}{\itshape}{\rmfamily}
1143 | \spn@wtheorem{proposition}{Proposition}{\bfseries}{\itshape}
1144 | \spn@wtheorem{question}{Question}{\itshape}{\rmfamily}
1145 | \spn@wtheorem{solution}{Solution}{\itshape}{\rmfamily}
1146 | \spn@wtheorem{remark}{Remark}{\itshape}{\rmfamily}
1147 |
1148 | \def\@takefromreset#1#2{%
1149 | \def\@tempa{#1}%
1150 | \let\@tempd\@elt
1151 | \def\@elt##1{%
1152 | \def\@tempb{##1}%
1153 | \ifx\@tempa\@tempb\else
1154 | \@addtoreset{##1}{#2}%
1155 | \fi}%
1156 | \expandafter\expandafter\let\expandafter\@tempc\csname cl@#2\endcsname
1157 | \expandafter\def\csname cl@#2\endcsname{}%
1158 | \@tempc
1159 | \let\@elt\@tempd}
1160 |
1161 | \def\theopargself{\def\@spopargbegintheorem##1##2##3##4##5{\trivlist
1162 | \item[\hskip\labelsep{##4##1\ ##2}]{##4##3\@thmcounterend\ }##5}
1163 | \def\@Opargbegintheorem##1##2##3##4{##4\trivlist
1164 | \item[\hskip\labelsep{##3##1}]{##3##2\@thmcounterend\ }}
1165 | }
1166 |
1167 | \renewenvironment{abstract}{%
1168 | \list{}{\advance\topsep by0.35cm\relax\small
1169 | \leftmargin=1cm
1170 | \labelwidth=\z@
1171 | \listparindent=\z@
1172 | \itemindent\listparindent
1173 | \rightmargin\leftmargin}\item[\hskip\labelsep
1174 | \bfseries\abstractname]}
1175 | {\endlist}
1176 |
1177 | \newdimen\headlineindent % dimension for space between
1178 | \headlineindent=1.166cm % number and text of headings.
1179 |
1180 | \def\ps@headings{\let\@mkboth\@gobbletwo
1181 | \let\@oddfoot\@empty\let\@evenfoot\@empty
1182 | \def\@evenhead{\normalfont\small\rlap{\thepage}\hspace{\headlineindent}%
1183 | \leftmark\hfil}
1184 | \def\@oddhead{\normalfont\small\hfil\rightmark\hspace{\headlineindent}%
1185 | \llap{\thepage}}
1186 | \def\chaptermark##1{}%
1187 | \def\sectionmark##1{}%
1188 | \def\subsectionmark##1{}}
1189 |
1190 | \def\ps@titlepage{\let\@mkboth\@gobbletwo
1191 | \let\@oddfoot\@empty\let\@evenfoot\@empty
1192 | \def\@evenhead{\normalfont\small\rlap{\thepage}\hspace{\headlineindent}%
1193 | \hfil}
1194 | \def\@oddhead{\normalfont\small\hfil\hspace{\headlineindent}%
1195 | \llap{\thepage}}
1196 | \def\chaptermark##1{}%
1197 | \def\sectionmark##1{}%
1198 | \def\subsectionmark##1{}}
1199 |
1200 | \if@runhead\ps@headings\else
1201 | \ps@empty\fi
1202 |
1203 | \setlength\arraycolsep{1.4\p@}
1204 | \setlength\tabcolsep{1.4\p@}
1205 |
1206 | \endinput
1207 | %end of file llncs.cls
1208 |
--------------------------------------------------------------------------------