├── fastbm25 ├── __init__.py └── fastbm25.py ├── setup.py ├── README.md ├── fastbm25.py └── LICENSE /fastbm25/__init__.py: -------------------------------------------------------------------------------- 1 | from .fastbm25 import fastbm25 2 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | setuptools.setup( 7 | name="fastbm25", 8 | version="0.0.2", 9 | author="zhusleep", 10 | author_email="zhuflower@qq.com", 11 | description="Fast text match algorithm implementation for bm25", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://github.com/zhusleep/fastbm25", 15 | packages=setuptools.find_packages(), 16 | classifiers=[ 17 | "Programming Language :: Python :: 3", 18 | "License :: OSI Approved :: MIT License", 19 | "Operating System :: OS Independent", 20 | ], 21 | python_requires='>=3.5', 22 | ) 23 | 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | The easiest way to install this package is through pip, using 3 | 4 | ``` 5 | pip install fastbm25 6 | ``` 7 | # fastbm25 8 | The fast bm25 algorithm for text match optimized by reverted index. So the complexity will be no more than O(N(log N)). 9 | 10 | (利用倒排索引加速的bm25文本匹配算法,从一堆数据中寻找最相似的文本) 11 | 12 | # usage 13 | ## find top k similar sentences from corpus; Note you should tokenize text and use stop words in advance 14 | ``` 15 | from fastbm25 import fastbm25 16 | 17 | corpus = [ 18 | "How are you !", 19 | "Hello Jack! Nice to meet you!", 20 | "I am from China, I like math." 21 | ] 22 | tokenized_corpus = [doc.lower().split(" ") for doc in corpus] 23 | model = fastbm25(tokenized_corpus) 24 | query = "where are you from".lower().split() 25 | result = model.top_k_sentence(query,k=1) 26 | print(result) 27 | ``` 28 | The result is list of tuple like 29 | > [('I am from China, I like math.', 2, -0.06000000000000001)] 30 | 31 | For some language like Chinese that doesn't need tokenization. you can use this example 32 | ``` 33 | from fastbm25 import fastbm25 34 | 35 | corpus = [ 36 | "张三考上了清华", 37 | "李四考上了北大", 38 | "我烤上了地瓜.", 39 | "我们都有光明的未来." 40 | ] 41 | model = fastbm25(corpus) 42 | query = "我考上了大学" 43 | result = model.top_k_sentence(query,k=1) 44 | print(result) 45 | ``` 46 | > [('李四考上了北大', 1, 1.21)] 47 | ## find document pair similarity between document a and document b 48 | Note that a and b don't need to be included in the reference corpus; 49 | 50 | ``` 51 | from fastbm25 import fastbm25 52 | corpus = [ 53 | "How are you !", 54 | "Hello Jack! Nice to meet you!", 55 | "I am from China, I like math." 56 | ] 57 | tokenized_corpus = [doc.lower().split(" ") for doc in corpus] 58 | model = fastbm25(tokenized_corpus) 59 | document_a = "where are you from".lower().split() 60 | document_b = "where are you".lower().split() 61 | 62 | result = model.similarity_bm25(document_a,document_b) 63 | print(result) 64 | ``` 65 | > 1.944187075527278 66 | 67 | -------------------------------------------------------------------------------- /fastbm25.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | 5 | """This module contains function of computing rank scores for documents in 6 | corpus and helper class `BM25` used in calculations. Original algorithm 7 | descibed in [1]_, also you may check Wikipedia page [2]_. 8 | 9 | 10 | .. [1] Robertson, Stephen; Zaragoza, Hugo (2009). The Probabilistic Relevance Framework: BM25 and Beyond, 11 | http://www.staff.city.ac.uk/~sb317/papers/foundations_bm25_review.pdf 12 | .. [2] Okapi BM25 on Wikipedia, https://en.wikipedia.org/wiki/Okapi_BM25 13 | 14 | 15 | Data: 16 | ----- 17 | .. data:: PARAM_K1 - Free smoothing parameter for BM25. 18 | .. data:: PARAM_B - Free smoothing parameter for BM25. 19 | .. data:: EPSILON - Constant used for negative idf of document in corpus. 20 | 21 | """ 22 | 23 | 24 | import math 25 | from six import iteritems 26 | from six.moves import range 27 | import numpy as np 28 | import heapq 29 | from collections.abc import Iterable 30 | 31 | 32 | PARAM_K1 = 1.5 33 | PARAM_B = 0.75 34 | EPSILON = 0.25 35 | 36 | 37 | class fastbm25(object): 38 | """Implementation of Best Matching 25 ranking function. 39 | 40 | Attributes 41 | ---------- 42 | corpus_size : int 43 | Size of corpus (number of documents). 44 | avgdl : float 45 | Average length of document in `corpus`. 46 | doc_freqs : list of dicts of int 47 | Dictionary with terms frequencies for each document in `corpus`. Words used as keys and frequencies as values. 48 | idf : dict 49 | Dictionary with inversed documents frequencies for whole `corpus`. Words used as keys and frequencies as values. 50 | doc_len : list of int 51 | List of document lengths. 52 | """ 53 | 54 | def __init__(self, corpus): 55 | """ 56 | Parameters 57 | ---------- 58 | corpus : list of list of str 59 | Given corpus. 60 | 61 | """ 62 | self.corpus_size = len(corpus) 63 | self.avgdl = 0 64 | self.doc_freqs = [] 65 | self.idf = {} 66 | self.doc_len = {} 67 | self._initialize(corpus) 68 | self.corpus = corpus 69 | self.get_score_by_reversed_index_all_documents(corpus) 70 | 71 | def _initialize(self, corpus): 72 | """Calculates frequencies of terms in documents and in corpus. Also computes inverse document frequencies.""" 73 | nd = {} # word -> number of documents with word 74 | num_doc = 0 75 | for j, document in enumerate(corpus): 76 | self.doc_len[j] = len(document) 77 | num_doc += len(document) 78 | 79 | frequencies = {} 80 | for word in document: 81 | if word not in frequencies: 82 | frequencies[word] = 0 83 | frequencies[word] += 1 84 | self.doc_freqs.append(frequencies) 85 | 86 | for word, freq in iteritems(frequencies): 87 | if word not in nd: 88 | nd[word] = 0 89 | nd[word] += 1 90 | 91 | self.avgdl = float(num_doc) / self.corpus_size 92 | # collect idf sum to calculate an average idf for epsilon value 93 | idf_sum = 0 94 | # collect words with negative idf to set them a special epsilon value. 95 | # idf can be negative if word is contained in more than half of documents 96 | negative_idfs = [] 97 | self.nd = nd 98 | for word, freq in iteritems(nd): 99 | idf = math.log(self.corpus_size - freq + 0.5) - math.log(freq + 0.5) 100 | self.idf[word] = idf 101 | idf_sum += idf 102 | if idf < 0: 103 | negative_idfs.append(word) 104 | self.average_idf = float(idf_sum) / len(self.idf) 105 | 106 | eps = EPSILON * self.average_idf 107 | for word in negative_idfs: 108 | self.idf[word] = eps 109 | 110 | def similarity_bm25(self, document_a, document_b): 111 | """Computes BM25 score of given `document A` in relation to given `document B` . 112 | 113 | Parameters 114 | ---------- 115 | document_a : list of str 116 | Document to be scored. 117 | document_b : list of str 118 | Document to be scored. 119 | Returns 120 | ------- 121 | float 122 | BM25 score. 123 | 124 | """ 125 | assert isinstance(document_a,Iterable),'document a is not iterable' 126 | assert isinstance(document_b,Iterable),'document b is not iterable' 127 | score = 0 128 | doc_freqs = {} 129 | for word in document_b: 130 | if word not in doc_freqs: 131 | doc_freqs[word] = 0 132 | doc_freqs[word] += 1 133 | freq = 1 134 | default_idf = math.log(self.corpus_size - freq + 0.5) - math.log(freq + 0.5) 135 | for word in document_a: 136 | if word not in doc_freqs: 137 | continue 138 | score += (self.idf.get(word,default_idf) * doc_freqs[word] * (PARAM_K1 + 1) 139 | / (doc_freqs[word] + PARAM_K1 * (1 - PARAM_B + PARAM_B * len(document_b) / self.avgdl))) 140 | return score 141 | 142 | 143 | def get_score_by_reversed_index_all_documents(self, corpus): 144 | """ 145 | Build reverted index for documents like {word:{index:grades}} 146 | """ 147 | document_score = {} 148 | for index, document in enumerate(corpus): 149 | q_id = index 150 | doc_freqs = self.doc_freqs[index] 151 | for word in document: 152 | if word not in doc_freqs: 153 | continue 154 | score = (self.idf[word] * doc_freqs[word] * (PARAM_K1 + 1) 155 | / (doc_freqs[word] + PARAM_K1 * (1 - PARAM_B + PARAM_B * self.doc_len[index] / self.avgdl))) 156 | if word not in document_score: 157 | document_score[word] = {q_id: round(score, 2)} 158 | else: 159 | document_score[word].update({q_id: round(score, 2)}) 160 | self.document_score = document_score 161 | 162 | def top_k_sentence(self,document,k=1): 163 | """ 164 | document: Iterable, to be retrieved 165 | Returns 166 | ------- 167 | float 168 | List of [(nearest sentence,index,score)]. 169 | """ 170 | assert isinstance(document,Iterable),'document is not iterable' 171 | score_overall = {} 172 | for word in document: 173 | if word not in self.document_score: 174 | continue 175 | for key, value in self.document_score[word].items(): 176 | if key not in score_overall: 177 | # print(score_overall) 178 | score_overall[key] = value 179 | else: 180 | score_overall[key] += value 181 | k_keys_sorted = heapq.nlargest(k, score_overall,key=score_overall.__getitem__) 182 | return [(self.corpus[item],item,score_overall.get(item,None)) for item in k_keys_sorted] 183 | 184 | -------------------------------------------------------------------------------- /fastbm25/fastbm25.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | 5 | """This module contains function of computing rank scores for documents in 6 | corpus and helper class `BM25` used in calculations. Original algorithm 7 | descibed in [1]_, also you may check Wikipedia page [2]_. 8 | 9 | 10 | .. [1] Robertson, Stephen; Zaragoza, Hugo (2009). The Probabilistic Relevance Framework: BM25 and Beyond, 11 | http://www.staff.city.ac.uk/~sb317/papers/foundations_bm25_review.pdf 12 | .. [2] Okapi BM25 on Wikipedia, https://en.wikipedia.org/wiki/Okapi_BM25 13 | 14 | 15 | Data: 16 | ----- 17 | .. data:: PARAM_K1 - Free smoothing parameter for BM25. 18 | .. data:: PARAM_B - Free smoothing parameter for BM25. 19 | .. data:: EPSILON - Constant used for negative idf of document in corpus. 20 | 21 | """ 22 | 23 | 24 | import math 25 | from six import iteritems 26 | from six.moves import range 27 | import numpy as np 28 | import heapq 29 | from collections.abc import Iterable 30 | 31 | 32 | PARAM_K1 = 1.5 33 | PARAM_B = 0.75 34 | EPSILON = 0.25 35 | 36 | 37 | class fastbm25(object): 38 | """Implementation of Best Matching 25 ranking function. 39 | 40 | Attributes 41 | ---------- 42 | corpus_size : int 43 | Size of corpus (number of documents). 44 | avgdl : float 45 | Average length of document in `corpus`. 46 | doc_freqs : list of dicts of int 47 | Dictionary with terms frequencies for each document in `corpus`. Words used as keys and frequencies as values. 48 | idf : dict 49 | Dictionary with inversed documents frequencies for whole `corpus`. Words used as keys and frequencies as values. 50 | doc_len : list of int 51 | List of document lengths. 52 | """ 53 | 54 | def __init__(self, corpus): 55 | """ 56 | Parameters 57 | ---------- 58 | corpus : list of list of str 59 | Given corpus. 60 | 61 | """ 62 | self.corpus_size = len(corpus) 63 | self.avgdl = 0 64 | self.doc_freqs = [] 65 | self.idf = {} 66 | self.doc_len = {} 67 | self._initialize(corpus) 68 | self.corpus = corpus 69 | self.get_score_by_reversed_index_all_documents(corpus) 70 | 71 | def _initialize(self, corpus): 72 | """Calculates frequencies of terms in documents and in corpus. Also computes inverse document frequencies.""" 73 | nd = {} # word -> number of documents with word 74 | num_doc = 0 75 | for j, document in enumerate(corpus): 76 | self.doc_len[j] = len(document) 77 | num_doc += len(document) 78 | 79 | frequencies = {} 80 | for word in document: 81 | if word not in frequencies: 82 | frequencies[word] = 0 83 | frequencies[word] += 1 84 | self.doc_freqs.append(frequencies) 85 | 86 | for word, freq in iteritems(frequencies): 87 | if word not in nd: 88 | nd[word] = 0 89 | nd[word] += 1 90 | 91 | self.avgdl = float(num_doc) / self.corpus_size 92 | # collect idf sum to calculate an average idf for epsilon value 93 | idf_sum = 0 94 | # collect words with negative idf to set them a special epsilon value. 95 | # idf can be negative if word is contained in more than half of documents 96 | negative_idfs = [] 97 | self.nd = nd 98 | for word, freq in iteritems(nd): 99 | idf = math.log(self.corpus_size - freq + 0.5) - math.log(freq + 0.5) 100 | self.idf[word] = idf 101 | idf_sum += idf 102 | if idf < 0: 103 | negative_idfs.append(word) 104 | self.average_idf = float(idf_sum) / len(self.idf) 105 | 106 | eps = EPSILON * self.average_idf 107 | for word in negative_idfs: 108 | self.idf[word] = eps 109 | 110 | def similarity_bm25(self, document_a, document_b): 111 | """Computes BM25 score of given `document A` in relation to given `document B` . 112 | 113 | Parameters 114 | ---------- 115 | document_a : list of str 116 | Document to be scored. 117 | document_b : list of str 118 | Document to be scored. 119 | Returns 120 | ------- 121 | float 122 | BM25 score. 123 | 124 | """ 125 | assert isinstance(document_a,Iterable),'document a is not iterable' 126 | assert isinstance(document_b,Iterable),'document b is not iterable' 127 | score = 0 128 | doc_freqs = {} 129 | for word in document_b: 130 | if word not in doc_freqs: 131 | doc_freqs[word] = 0 132 | doc_freqs[word] += 1 133 | freq = 1 134 | default_idf = math.log(self.corpus_size - freq + 0.5) - math.log(freq + 0.5) 135 | for word in document_a: 136 | if word not in doc_freqs: 137 | continue 138 | score += (self.idf.get(word,default_idf) * doc_freqs[word] * (PARAM_K1 + 1) 139 | / (doc_freqs[word] + PARAM_K1 * (1 - PARAM_B + PARAM_B * len(document_b) / self.avgdl))) 140 | return score 141 | 142 | 143 | def get_score_by_reversed_index_all_documents(self, corpus): 144 | """ 145 | Build reverted index for documents like {word:{index:grades}} 146 | """ 147 | document_score = {} 148 | for index, document in enumerate(corpus): 149 | q_id = index 150 | doc_freqs = self.doc_freqs[index] 151 | for word in document: 152 | if word not in doc_freqs: 153 | continue 154 | score = (self.idf[word] * doc_freqs[word] * (PARAM_K1 + 1) 155 | / (doc_freqs[word] + PARAM_K1 * (1 - PARAM_B + PARAM_B * self.doc_len[index] / self.avgdl))) 156 | if word not in document_score: 157 | document_score[word] = {q_id: round(score, 2)} 158 | else: 159 | document_score[word].update({q_id: round(score, 2)}) 160 | self.document_score = document_score 161 | 162 | def top_k_sentence(self,document,k=1): 163 | """ 164 | document: Iterable, to be retrieved 165 | Returns 166 | ------- 167 | float 168 | List of [(nearest sentence,index,score)]. 169 | """ 170 | assert isinstance(document,Iterable),'document is not iterable' 171 | score_overall = {} 172 | for word in document: 173 | if word not in self.document_score: 174 | continue 175 | for key, value in self.document_score[word].items(): 176 | if key not in score_overall: 177 | # print(score_overall) 178 | score_overall[key] = value 179 | else: 180 | score_overall[key] += value 181 | k_keys_sorted = heapq.nlargest(k, score_overall,key=score_overall.__getitem__) 182 | return [(self.corpus[item],item,score_overall.get(item,None)) for item in k_keys_sorted] 183 | 184 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------