├── images ├── 0.png └── 1.png ├── readme.md ├── web_ui.py ├── .gitignore ├── demo.py └── LICENSE /images/0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yuanzhoulvpi2017/DocumentSearch/HEAD/images/0.png -------------------------------------------------------------------------------- /images/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yuanzhoulvpi2017/DocumentSearch/HEAD/images/1.png -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | ## 文档搜索 2 | 1. 🥱懒得用`langchain`,看他文档,可能需要一天,我自己实现的话,一天都不需要。 3 | 2. 💻 本质上就是`sbert`和`chatglm-6b`。 4 | 3. 👨‍💻 自己做一个得了。 5 | 4. 🗑️目前还是毛坯,后面继续更新。 6 | 5. 🎯只要传递一个文件夹📁,就可以把这个文件夹下所有的`.pdf`、`.docx`格式文件加载(目前还不支持的`.doc`格式文件) 7 | 8 | 9 | ## 用法 10 | 11 | ### 命令行形式 12 | 1. `demo.py`文件里面 13 | 14 | ```python 15 | 16 | if __name__ == "__main__": 17 | global_dir = "政策归档文件" # 你自己的文件夹 18 | 19 | kl = KnowLedge(global_dir=global_dir) 20 | res, data = kl.search_result(question_str="大学生创业有什么补贴") # 你想问什么呢 21 | print(res) 22 | print(data) 23 | ``` 24 | ### web端 25 | 1. 注意修改`code-21`行的代码`global_dir = "政策归档文件"` 26 | ```bash 27 | streamlit run web_ui.py --server.fileWatcherType none 28 | ``` 29 | ![](images/0.png) 30 | ![](images/1.png) 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /web_ui.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | import pandas as pd 3 | import numpy as np 4 | from demo import KnowLedge 5 | 6 | 7 | 8 | st.set_page_config( 9 | page_title="文档搜索", 10 | page_icon="📝", 11 | layout="wide", 12 | initial_sidebar_state="expanded", 13 | ) 14 | 15 | @st.cache_resource 16 | def create_model(global_dir): 17 | kl = KnowLedge(global_dir=global_dir) 18 | return kl 19 | 20 | # 文件夹目录 21 | global_dir = "政策归档文件" 22 | kl = create_model(global_dir) 23 | 24 | # streamlit run web_ui.py --server.fileWatcherType none 25 | df = pd.DataFrame( 26 | np.random.randn(50, 20), 27 | columns=('col %d' % i for i in range(20))) 28 | 29 | col1, col2 = st.columns(2) 30 | 31 | with col1: 32 | st.header("👇在这里输入文本") 33 | input_str = st.text_input(label="文本输入", placeholder="输入想要提问的内容, 回车键键提交", max_chars=100) 34 | if input_str is not None and len(input_str) >0: 35 | 36 | output_str, output_df = kl.search_result(input_str) 37 | st.session_state['output_df'] = output_df 38 | with st.expander(label="生成结果", expanded=True): 39 | st.markdown(output_str) 40 | # st.text_area(label="展示生成内容", placeholder="", height=600) 41 | 42 | 43 | with col2: 44 | st.header("参考依据") 45 | if st.session_state.get('output_df') is not None: 46 | 47 | st.dataframe(st.session_state.get('output_df')) # Same as st.write(df) 48 | else: 49 | st.markdown(""" 50 | ## 说明: 51 | 1. 在左上角输入`文本`,然后按`enter`结束. 52 | 2. 右上角会有`running`字样,表示程序正在运行. 53 | 3. 结束后,会出现文本提取结果和对应的参考依据. 54 | - 3.1. 左下角文本框是生成的文本. 55 | - 3.2. 右侧是文本生成所参考的文档. 56 | """) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /demo.py: -------------------------------------------------------------------------------- 1 | from pypdf import PdfReader 2 | import numpy as np 3 | import pandas as pd 4 | from tqdm import tqdm 5 | import docx 6 | from dataclasses import dataclass 7 | from enum import Enum, auto 8 | from typing import List, Optional, Union, Tuple 9 | from pathlib import Path 10 | from glob import glob 11 | import os 12 | import re 13 | from transformers import AutoTokenizer, AutoModel 14 | import torch as t 15 | from transformers import AutoTokenizer, AutoModel 16 | import torch 17 | 18 | # 对文本进行拆分 19 | CHUNK_SIZE = 64 20 | global_dir = "政策归档文件" 21 | 22 | 23 | class FileType(Enum): 24 | PDF = auto() 25 | doc = auto() 26 | docx = auto() 27 | 28 | 29 | @dataclass 30 | class TransOutput: 31 | file_name: str 32 | file_type: FileType 33 | text_data: Union[pd.DataFrame, None] 34 | 35 | 36 | def transpdf(pdf_path: str, show_progress_bar: bool = False): 37 | reader = PdfReader(pdf_path) 38 | number_of_pages = len(reader.pages) 39 | 40 | def page2text(pageid: int): 41 | page = reader.pages[pageid] 42 | text = page.extract_text() 43 | return text 44 | 45 | data = pd.DataFrame({ 46 | # 'file_Path':pdf_path, 47 | 'text': [page2text(i) for i in tqdm(range(number_of_pages), disable=not show_progress_bar)], 48 | 'pageid': range(number_of_pages), 49 | }) 50 | res = TransOutput( 51 | file_name=pdf_path, 52 | file_type=FileType.PDF, 53 | text_data=data 54 | ) 55 | return res 56 | 57 | 58 | def transdocx(doc_path: str, show_progress_bar: bool = False): 59 | doc = docx.Document(doc_path) 60 | all_paras = doc.paragraphs 61 | number_of_pages = len(all_paras) 62 | 63 | data = pd.DataFrame({ 64 | 'text': [i.text for i in tqdm(all_paras, disable=not show_progress_bar)], 65 | 'paraid': range(number_of_pages) 66 | }) 67 | res = TransOutput( 68 | file_name=doc_path, 69 | file_type=FileType.docx, 70 | text_data=data 71 | ) 72 | return res 73 | 74 | 75 | def cal_detail_in_dir(dir_name): 76 | all_file_list = [] 77 | # all_file_size = [] 78 | 79 | for (root, dir, file_name) in os.walk(dir_name): 80 | for temp_file in file_name: 81 | standard_path = f"{root}/{temp_file}" 82 | 83 | all_file_list.append(standard_path) 84 | 85 | return all_file_list 86 | 87 | 88 | def transfile(x: Path) -> TransOutput: 89 | if x.suffix == ".docx": 90 | return transdocx(x.__str__()) 91 | else: 92 | return transpdf(x.__str__()) 93 | 94 | 95 | def cleanquestion(x: str) -> str: 96 | if isinstance(x, str): 97 | 98 | str_text = re.sub( 99 | u"([^\u4e00-\u9fa5\u0030-\u0039\u0041-\u005a\u0061-\u007a])", "", x) 100 | return str_text 101 | else: 102 | return None 103 | 104 | 105 | def clean_text_data(transout: TransOutput) -> TransOutput: 106 | text_df = transout.text_data 107 | res = text_df.pipe( 108 | lambda x: x.assign(**{ 109 | 'new_text_': x['text'].apply(lambda j: cleanquestion(j)) 110 | }) 111 | ).pipe( 112 | lambda x: x.loc[x['new_text_'].apply(lambda j: len(j) > 0)] 113 | ) 114 | 115 | transout.text_data = res 116 | return transout 117 | 118 | 119 | def chunk_text(x: str) -> Union[None, List[str]]: 120 | if not isinstance(x, str): 121 | x = str(x) 122 | 123 | x_list = [x[startid:(startid + CHUNK_SIZE)] for startid in range(0, len(x), CHUNK_SIZE)] 124 | return x_list 125 | 126 | 127 | def chunk_text4TransOutput(x: TransOutput) -> TransOutput: 128 | # try: 129 | text_df = x.text_data 130 | res = text_df.pipe( 131 | lambda x: x.assign(**{ 132 | 'chunk_text': x['new_text_'].apply(lambda j: chunk_text(j)) 133 | }) 134 | ).explode(['chunk_text']).drop(columns=['new_text_']) 135 | x.text_data = res 136 | return x 137 | # except Exception as e: 138 | # return None 139 | 140 | 141 | def numpy_cos_sim(a: np.ndarray, b: np.ndarray) -> np.ndarray: 142 | if len(a.shape) == 1: 143 | a = a.reshape(1, -1) 144 | if len(b.shape) == 1: 145 | b = b.reshape(1, -1) 146 | 147 | a_norm = a / np.linalg.norm(a, ord=2, axis=1).reshape(-1, 1) 148 | b_norm = b / np.linalg.norm(b, ord=2, axis=1).reshape(-1, 1) 149 | 150 | return np.matmul(a_norm, b_norm.T) 151 | 152 | 153 | class SentenceVector: 154 | def __init__(self, 155 | model_name_or_path: str = None, 156 | device: str = "cuda:0") -> None: 157 | self.model_name_or_path = model_name_or_path 158 | self.device = device 159 | 160 | self.tokenizer = AutoTokenizer.from_pretrained(self.model_name_or_path) 161 | 162 | self.model = AutoModel.from_pretrained(self.model_name_or_path) 163 | self.model.to(self.device) 164 | 165 | def encode_fun(self, texts: List[str]) -> np.ndarray: 166 | texts = [cleanquestion(i) for i in texts] 167 | 168 | inputs = self.tokenizer.batch_encode_plus( 169 | texts, padding=True, truncation=True, return_tensors="pt", max_length=64) 170 | inputs.to(device=self.device) 171 | with t.no_grad(): 172 | embeddings = self.model(**inputs) 173 | 174 | embeddings = embeddings.last_hidden_state[:, 0] 175 | embeddings = embeddings.to('cpu').numpy() 176 | return embeddings 177 | 178 | def encode_fun_plus(self, texts: List[str], batch_size: int = 100) -> np.ndarray: 179 | embeddings = np.concatenate([self.encode_fun( 180 | texts[i:(i + batch_size)]) for i in tqdm(range(0, len(texts), batch_size))]) 181 | return embeddings 182 | 183 | 184 | class KnowLedge: 185 | def __init__(self, 186 | global_dir: str = None, 187 | gen_model_name_or_path: str = "THUDM/chatglm-6b", 188 | sen_embedding_model_name_or_path: str = "hfl/chinese-roberta-wwm-ext", 189 | batch_top_k=5 190 | ) -> None: 191 | 192 | self.batch_top_k = batch_top_k 193 | 194 | all_file_list = cal_detail_in_dir(global_dir) 195 | all_file_list = [Path(i) for i in all_file_list] 196 | all_file_list = [i for i in all_file_list if i.suffix in ['.pdf', '.docx']] 197 | all_trans_data = [transfile(i) for i in tqdm(all_file_list)] 198 | all_trans_data = [clean_text_data(i) for i in all_trans_data] 199 | all_trans_data = [i for i in all_trans_data if i.text_data.shape[0] > 0] 200 | 201 | all_trans_data = [chunk_text4TransOutput(i) for i in all_trans_data] 202 | 203 | self.sv = SentenceVector(model_name_or_path=sen_embedding_model_name_or_path) 204 | 205 | all_vector = [self.sv.encode_fun_plus(i.text_data['chunk_text'].tolist()) for i in all_trans_data] 206 | 207 | self.all_trans_data = all_trans_data 208 | self.all_vector = all_vector 209 | 210 | self.gen_tokenizer = AutoTokenizer.from_pretrained(gen_model_name_or_path, trust_remote_code=True) 211 | self.gen_model = AutoModel.from_pretrained(gen_model_name_or_path, trust_remote_code=True).half().cuda(1) 212 | 213 | def search_top_info(self, index: int, question_vector: np.ndarray) -> pd.DataFrame: 214 | # print("".format(index)) 215 | similar_score = numpy_cos_sim(self.all_vector[index], question_vector).flatten() 216 | 217 | if similar_score.shape[0] < self.batch_top_k: 218 | res = self.all_trans_data[index].text_data.reset_index(drop=True).pipe( 219 | lambda x: x.assign(**{ 220 | 'score': similar_score 221 | }) 222 | ).pipe( 223 | lambda x: x.assign(**{ 224 | 'file_name': self.all_trans_data[index].file_name, 225 | 'file_path': self.all_trans_data[index].file_type 226 | }) 227 | ) 228 | 229 | else: 230 | 231 | top_k_location = np.argpartition(similar_score, kth=-self.batch_top_k)[-self.batch_top_k:] 232 | 233 | res = self.all_trans_data[index].text_data.reset_index(drop=True).iloc[top_k_location].pipe( 234 | lambda x: x.assign(**{ 235 | 'score': similar_score[top_k_location] 236 | }) 237 | ).pipe( 238 | lambda x: x.assign(**{ 239 | 'file_name': self.all_trans_data[index].file_name, 240 | 'file_path': self.all_trans_data[index].file_type 241 | }) 242 | ) 243 | 244 | return res 245 | 246 | def search_result(self, question_str: str) -> Tuple[str, pd.DataFrame]: 247 | 248 | # question_str ="大学生创业有什么补贴" #"做集成电路的企业,有什么补贴"# 249 | question_vector = self.sv.encode_fun([question_str]) 250 | # question_vector.shape 251 | # index = 0 252 | 253 | search_table_info = pd.concat( 254 | [self.search_top_info(index, question_vector) for index in range(len(self.all_vector))]).pipe( 255 | lambda x: x.sort_values(by=['score'], ascending=False) 256 | ) 257 | search_table = search_table_info.drop_duplicates(['chunk_text']).head(30) 258 | 259 | search_text_list = search_table['chunk_text'].tolist() 260 | # len(search_text_list), search_text_list[:3] 261 | 262 | prompt_template = """基于以下已知信息,简洁和专业的来回答用户的问题。 263 | 如果无法从中得到答案,请说 "根据已知信息无法回答该问题" 或 "没有提供足够的相关信息",不允许在答案中添加编造成分,答案请使用中文。 264 | 问题: 265 | {question} 266 | 已知内容: 267 | {context} 268 | 269 | """ 270 | 271 | text2chatglm = prompt_template.format_map({ 272 | 'question': question_str, 273 | 'context': '\n'.join(search_text_list) 274 | }) 275 | 276 | response, history = self.gen_model.chat(self.gen_tokenizer, text2chatglm, history=[]) 277 | torch.cuda.empty_cache() 278 | 279 | return response, search_table 280 | 281 | 282 | if __name__ == "__main__": 283 | kl = KnowLedge(global_dir=global_dir) 284 | res, data = kl.search_result("大学生创业有什么补贴") 285 | print(res) 286 | print(data) 287 | 288 | 289 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------