├── data
├── predictions
│ ├── MATH
│ │ └── Arithmo-Mistral-7B
│ │ │ ├── predictions_Arithmo_math_zero_shot_CoT.json
│ │ │ └── incorrect_predictions_Arithmo_math_zero_shot_CoT.json
│ └── gsm8k
│ │ └── Arithmo-Mistral-7B
│ │ ├── predictions_Arithmo_gsm8k_zero_shot_CoT.json
│ │ ├── predictions_Arithmo_gsm8k_zero_shot_PoT.json
│ │ └── gsm8k_zero_shot_PoT_results.txt
└── python_coding_prompts.txt
├── eval
├── gsm8k
│ ├── gsm8k_compute_metric_zero_shot_PoT.py
│ ├── gsm8k_compute_metric_zero_shot_CoT.py
│ ├── gsm8k_write_zero_shot_PoT_outputs.py
│ ├── gsm8k_generate_response_zero_shot_CoT.py
│ └── gsm8k_generate_response_zero_shot_PoT.py
└── MATH
│ ├── MATH_generate_response_zero_shot_CoT.py
│ └── MATH_compute_metric_zero_shot_CoT.py
├── query_model.py
├── data_prep
└── prepare_model_traininig_data.py
├── LICENSE
└── README.md
/data/predictions/MATH/Arithmo-Mistral-7B/predictions_Arithmo_math_zero_shot_CoT.json:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:08519621ba0daa4643c758179571aba56952c0fd758f946c4d876662d499d804
3 | size 8716400
4 |
--------------------------------------------------------------------------------
/data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_gsm8k_zero_shot_CoT.json:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:a101e55977c65257064721c36a2a49e2132b87ececda21cb88c59e3befc524d2
3 | size 1644185
4 |
--------------------------------------------------------------------------------
/data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_gsm8k_zero_shot_PoT.json:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:abe5504c9e1b0ee7f7cb8b98b58394eb1dd57b505100cc0baf1c88d97b49f1dc
3 | size 1661651
4 |
--------------------------------------------------------------------------------
/data/predictions/MATH/Arithmo-Mistral-7B/incorrect_predictions_Arithmo_math_zero_shot_CoT.json:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:9e510639bde5d71dd19676fe597dc3ce227749af82198b5db26e5f36dd4b79fb
3 | size 7663790
4 |
--------------------------------------------------------------------------------
/eval/gsm8k/gsm8k_compute_metric_zero_shot_PoT.py:
--------------------------------------------------------------------------------
1 | from fractions import Fraction
2 |
3 | lines = open("data/predictions/gsm8k/Arithmo-Mistral-7B//gsm8k_zero_shot_PoT_results.txt", "r").readlines()
4 | lines = [line.strip() for line in lines]
5 |
6 | predicted = None
7 | correct, total = 0,0
8 | for line in lines:
9 | if line.startswith("==="):
10 | predicted, truth = None, None
11 | elif predicted is None:
12 | # Assign a default value when predicted answer contains alphabets or spaces. This is not expected for GSM8K data and is inaccurate. There are very few cases.
13 | if any(c.isalpha() for c in line) or " " in line:
14 | predicted = "1e-9" # some default value
15 | else:
16 | if "/" in line: # eg: "27/3" => 9.0. Very few cases.
17 | predicted = float(Fraction(line))
18 | else:
19 | predicted = float(line)
20 | else:
21 | if predicted == float(line):
22 | correct += 1
23 | total += 1
24 | print(f"Total Instances: {total}, Correct Count: {correct}, Accuracy (Correct Count/Total Instances): {correct/total}")
25 |
26 |
--------------------------------------------------------------------------------
/eval/gsm8k/gsm8k_compute_metric_zero_shot_CoT.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | file_path = "data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_gsm8k_zero_shot_CoT.json"
4 |
5 | def extract_ground_truth_answer(ground_truth_gen):
6 | # there are cases when 250000 is written as 250,000. Normalize it to 250000
7 | answer = ground_truth_gen.split("####")[-1].strip().replace(",", "")
8 | return answer
9 |
10 | def extract_predcited_answer(predicted_gen):
11 | if "The answer is:" in predicted_gen:
12 | answer = predicted_gen.rsplit("The answer is:")[-1].strip()
13 | return answer
14 | elif "The answer is " in predicted_gen:
15 | answer = predicted_gen.rsplit("The answer is ")[-1].strip()
16 | return answer
17 | else: # Answer couldn't be found in generated text. Return empty string.
18 | return ""
19 |
20 | count, total = 0,0
21 | with open(file_path, 'r') as f:
22 | data = json.load(f)
23 | for d in data:
24 | question = d["question"]
25 | ground_truth_gen = d["ground_truth"]
26 | predicted_gen = d["prediction"]
27 |
28 | ground_truth_answer = extract_ground_truth_answer(ground_truth_gen)
29 | predicted_answer = extract_predcited_answer(predicted_gen)
30 | if ground_truth_answer == predicted_answer:
31 | count += 1
32 | total += 1
33 | print(f"Total Instances: {total}, Correct Count: {count}, Accuracy (Correct Count/Total Instances): {count/total}")
34 |
--------------------------------------------------------------------------------
/eval/gsm8k/gsm8k_write_zero_shot_PoT_outputs.py:
--------------------------------------------------------------------------------
1 | # Run this file as 'python eval/gsm8k/gsm8k_write_zero_shot_PoT_outputs.py > data/predictions/gsm8k/Arithmo-Mistral-7B/gsm8k_zero_shot_PoT_results.txt'
2 |
3 | import json
4 |
5 | file_path = "data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_gsm8k_zero_shot_PoT.json"
6 |
7 | def extract_ground_truth_answer(ground_truth_gen):
8 | # there are cases when 250000 is written as 250,000
9 | answer = ground_truth_gen.split("####")[-1].strip().replace(",", "")
10 | return answer
11 |
12 | def extract_python_program(predicted_gen):
13 | if "Answer: " in predicted_gen:
14 | program = predicted_gen.rsplit("Answer: ")[-1].strip()
15 | else:
16 | program = ""
17 | print(predicted_gen)
18 | return program
19 |
20 |
21 |
22 | with open(file_path, 'r') as f:
23 | data = json.load(f)
24 | for i, d in enumerate(data):
25 | question = d["question"]
26 | ground_truth_gen = d["ground_truth"]
27 | predicted_gen = d["prediction"]
28 |
29 | ground_truth_answer = extract_ground_truth_answer(ground_truth_gen)
30 | py_program = extract_python_program(predicted_gen)
31 | try:
32 | exec(py_program) # exec prints output of script to stdout and doesn't allow storing output in a variable.
33 | print(ground_truth_answer)
34 | print("=========")
35 | except:
36 | # Python program is not able to compile. Ignore it.
37 | pass
38 |
--------------------------------------------------------------------------------
/eval/gsm8k/gsm8k_generate_response_zero_shot_CoT.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from transformers import (
3 | AutoModelForCausalLM,
4 | AutoTokenizer
5 | )
6 | import json
7 | from datasets import load_dataset
8 |
9 | model_path = "akjindal53244/Arithmo-Mistral-7B"
10 |
11 | device_map = {"": 0}
12 |
13 | ft_model = AutoModelForCausalLM.from_pretrained(
14 | model_path,
15 | device_map=device_map
16 | )
17 |
18 | tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
19 | tokenizer.pad_token = tokenizer.eos_token
20 |
21 | predictions = list()
22 |
23 | gsm8k_test = load_dataset("gsm8k", "main")
24 | dataset_size = len(gsm8k_test['test'])
25 | print(f"gsm8k_test size: {dataset_size}")
26 |
27 | count = 0
28 | # Adjust batch size based on available memory.
29 | batch_size = 16
30 |
31 | for i in range(0, dataset_size, batch_size):
32 | start = i
33 | end = start + batch_size if start + batch_size <= dataset_size else dataset_size
34 | examples = gsm8k_test["test"][start:end]
35 | input_text_ft = [f"Question: {each}\n\nAnswer:" for each in examples["question"]]
36 | inputs_ft = tokenizer(input_text_ft, return_tensors="pt", padding=True)
37 | generated_ids = ft_model.generate(**inputs_ft, max_new_tokens=1024, temperature=0.0)
38 | output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
39 | for j in range(len(output)):
40 | predictions.append(
41 | {
42 | "question": examples["question"][j],
43 | "ground_truth": examples['answer'][j],
44 | "prediction": output[j]
45 | }
46 | )
47 | count += len(output)
48 | print(count)
49 |
50 | with open('data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_gsm8k_zero_shot_CoT.json', 'w') as f:
51 | json.dump(predictions, f, indent=1)
52 |
53 |
--------------------------------------------------------------------------------
/eval/gsm8k/gsm8k_generate_response_zero_shot_PoT.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from transformers import (
3 | AutoModelForCausalLM,
4 | AutoTokenizer
5 | )
6 | import json
7 | from datasets import load_dataset
8 |
9 | model_path = "akjindal53244/Arithmo-Mistral-7B"
10 |
11 | device_map = {"": 0}
12 |
13 | ft_model = AutoModelForCausalLM.from_pretrained(
14 | model_path,
15 | device_map=device_map
16 | )
17 |
18 | tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
19 | tokenizer.pad_token = tokenizer.eos_token
20 |
21 | predictions = list()
22 |
23 | gsm8k_test = load_dataset("gsm8k", "main")
24 | dataset_size = len(gsm8k_test['test'])
25 | print(f"gsm8k_test size: {dataset_size}")
26 |
27 | count = 0
28 | # Adjust batch size based on available memory.
29 | batch_size = 16
30 |
31 |
32 |
33 | for i in range(0, dataset_size, batch_size):
34 | start = i
35 | end = start + batch_size if start + batch_size <= dataset_size else dataset_size
36 | examples = gsm8k_test["test"][start:end]
37 | input_text_ft = [f"Question: {each}. Write a Python program to solve this.\n\nAnswer:" for each in examples["question"]] # Added Python prompt
38 | inputs_ft = tokenizer(input_text_ft, return_tensors="pt", padding=True)
39 | generated_ids = ft_model.generate(**inputs_ft, max_new_tokens=1024, temperature=0.0)
40 | output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
41 | for j in range(len(output)):
42 | predictions.append(
43 | {
44 | "question": examples["question"][j],
45 | "ground_truth": examples['answer'][j],
46 | "prediction": output[j]
47 | }
48 | )
49 | count += len(output)
50 | print(count)
51 |
52 | with open('data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_gsm8k_zero_shot_PoT.json', 'w') as f:
53 | json.dump(predictions, f, indent=1)
54 |
55 |
--------------------------------------------------------------------------------
/eval/MATH/MATH_generate_response_zero_shot_CoT.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from transformers import (
3 | AutoModelForCausalLM,
4 | AutoTokenizer,
5 | BitsAndBytesConfig,
6 | )
7 | from peft import PeftModel
8 | import json
9 |
10 | model_path = "akjindal53244/Arithmo-Mistral-7B"
11 |
12 | from datasets import load_dataset, concatenate_datasets
13 |
14 | device_map = {"": 0}
15 |
16 | ft_model = AutoModelForCausalLM.from_pretrained(
17 | model_path,
18 | device_map=device_map
19 | )
20 |
21 | tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
22 | tokenizer.pad_token = tokenizer.eos_token
23 |
24 | predictions = list()
25 |
26 | math_test = load_dataset("competition_math")
27 | dataset_size = len(math_test['test'])
28 | print(f"math_test size: {dataset_size}")
29 |
30 | count = 0
31 | # Adjust batch size based on available memory.
32 | batch_size = 6
33 |
34 | for i in range(0, dataset_size, batch_size):
35 | start = i
36 | end = start + batch_size if start + batch_size <= dataset_size else dataset_size
37 | examples = math_test["test"][start:end]
38 | input_text_ft = [f"Question: {each}\n\nAnswer:" for each in examples["problem"]]
39 | inputs_ft = tokenizer(input_text_ft, return_tensors="pt", padding=True).to("cuda")
40 | generated_ids = ft_model.generate(**inputs_ft, max_new_tokens=2048, temperature=0.0)
41 | output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
42 | for j in range(len(output)):
43 | predictions.append(
44 | {
45 | "question": examples["problem"][j],
46 | "ground_truth": examples['solution'][j],
47 | "prediction": output[j]
48 | }
49 | )
50 | count += len(output)
51 | print(count)
52 |
53 | with open('data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_MATH_zero_shot_CoT.json', 'w') as f:
54 | json.dump(predictions, f, indent=1)
55 |
56 |
57 |
--------------------------------------------------------------------------------
/query_model.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from transformers import (
3 | AutoModelForCausalLM,
4 | AutoTokenizer,
5 | BitsAndBytesConfig,
6 | )
7 |
8 | model_path = "akjindal53244/Arithmo-Mistral-7B"
9 |
10 | run_model_on_gpu = True
11 |
12 | ##############################################################################################
13 | # bitsandbytes parameters. Used if run_model_on_gpu = True. CPU doesn't support quantization
14 | ##############################################################################################
15 |
16 | # Activate 4-bit precision base model loading
17 | use_4bit = True
18 |
19 | # Compute dtype for 4-bit base models
20 | bnb_4bit_compute_dtype = "bfloat16" # Efficient. Newer GPUs support bfloat16
21 |
22 | # Quantization type (fp4 or nf4)
23 | bnb_4bit_quant_type = "nf4"
24 |
25 | # Activate nested quantization for 4-bit base models (double quantization)
26 | use_nested_quant = False
27 |
28 | #########################################
29 | # Load Model and associated tokenizer.
30 | #########################################
31 |
32 | if run_model_on_gpu:
33 | device_map = {"": 0}
34 | # Load tokenizer and model with QLoRA configuration
35 | compute_dtype = getattr(torch, bnb_4bit_compute_dtype)
36 |
37 | bnb_config = BitsAndBytesConfig(
38 | load_in_4bit=use_4bit,
39 | bnb_4bit_quant_type=bnb_4bit_quant_type,
40 | bnb_4bit_compute_dtype=compute_dtype,
41 | bnb_4bit_use_double_quant=use_nested_quant,
42 | )
43 | arithmo_model = AutoModelForCausalLM.from_pretrained(
44 | model_path,
45 | quantization_config=bnb_config,
46 | device_map=device_map,
47 | )
48 | else:
49 | device_map = {"": "cpu"}
50 | arithmo_model = AutoModelForCausalLM.from_pretrained(
51 | model_path,
52 | device_map=device_map,
53 | )
54 |
55 | # Load Tokenizer
56 | tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
57 |
58 |
59 | ##############################################
60 | # Query Model with CoT (default) and PoT
61 | ##############################################
62 |
63 | while True:
64 | input_text = input("Enter your question: ")
65 |
66 | # Default: Generate Reasoning steps i.e. CoT
67 | input_text_ft = f"Question: {input_text.strip()}\n\nAnswer:"
68 | # Uncomment this, if you want to generate python program i.e. POT
69 | # input_text_ft = f"Question: {input_text.strip()}. Write a Python program to solve this.\n\nAnswer:"
70 |
71 | if run_model_on_gpu:
72 | inputs_ft = tokenizer(input_text_ft, return_tensors="pt").to("cuda")
73 | else:
74 | inputs_ft = tokenizer(input_text_ft, return_tensors="pt")
75 |
76 | generated_ids = arithmo_model.generate(**inputs_ft, max_new_tokens=1024, temperature=0.0)
77 | output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
78 | print(output + "\n")
--------------------------------------------------------------------------------
/data_prep/prepare_model_traininig_data.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import random
4 |
5 | from datasets import load_dataset, concatenate_datasets
6 | import numpy as np
7 |
8 | from difflib import SequenceMatcher
9 |
10 | def similar(a, b):
11 | return SequenceMatcher(None, a, b).ratio()
12 |
13 | all_python_prompts = open("data/python_coding_prompts.txt", "r").readlines()
14 | all_python_prompts = list(set([each.strip() for each in all_python_prompts]))
15 | random.shuffle(all_python_prompts)
16 |
17 |
18 | # Found these prompts in existing datasets.
19 | existing_prompts = [
20 | "Let's write a program.",
21 | "Let's write a Python program.",
22 | "Let's program in Python in the response.",
23 | "Let's write a Python program to solve it.",
24 | "Please write a program to solve it",
25 | ]
26 |
27 | all_QA = dict()
28 |
29 | def add_python_prompt(question):
30 | question = f"{question.strip()} {random.choice(all_python_prompts)}"
31 | return question
32 |
33 | def replace_python_prompt(question):
34 | for python_prompt in existing_prompts:
35 | if python_prompt in question:
36 | question = question.replace(python_prompt, random.choice(all_python_prompts))
37 | return question
38 |
39 | return question
40 |
41 | def modify_input(question):
42 | # For python program prompts, replace original prompt with randomly choosen python prompt.
43 | num = random.randint(1, 10)
44 | if num <= 8:
45 | question = replace_python_prompt(question)
46 |
47 | # Convert input (question) to lower case for 30% of the instances.
48 | num = random.randint(1, 10)
49 | if num <= 3:
50 | question = question.lower()
51 | return question
52 |
53 | def remove_hash(answer: str):
54 | if "####" in answer:
55 | return answer[:answer.rindex("####")].strip()
56 | return answer
57 |
58 | def format_metamath_response(answer: str, answer_identifier: str):
59 | answer_prefix_len = len(answer_identifier)
60 | if answer_identifier in answer:
61 | answer_prefix_start_idx = answer.index(answer_identifier)
62 | reasoning = remove_hash(answer[:answer_prefix_start_idx].strip())
63 |
64 | # ==== Enable it if we want to add "answer" as part of output
65 | answer = answer[answer_prefix_start_idx:].strip()
66 | assert len(answer) > 0
67 | # answer = "Answer: " + answer
68 | return f"{reasoning}\n{answer.strip()}"
69 | else:
70 | return answer
71 |
72 |
73 |
74 | outputs = []
75 |
76 | metamath_dataset = load_dataset("meta-math/MetaMathQA", "train")
77 | print(f"MetaMathQA dataset size: {len(metamath_dataset['train'])}")
78 | print(f"Processing MetaMathQA dataset..")
79 | for each in metamath_dataset["train"]:
80 | output = {}
81 | if each['query'].lower() not in all_QA:
82 | all_QA[each['query'].lower()] = [each['response'].lower()]
83 | elif max([similar(x, each['response'].lower()) for x in all_QA[each['query'].lower()]]) < 0.7:
84 | all_QA[each['query'].lower()].append(each['response'].lower())
85 | else:
86 | continue
87 |
88 | output['question'] = modify_input(each['query']).strip()
89 | output['answer'] = format_metamath_response(each['response'], "The answer is:").strip()
90 | if len(output['question']) > 0 and len(output['answer']) > 0:
91 | outputs.append(output)
92 |
93 |
94 | math_instruct_dataset = load_dataset("TIGER-Lab/MathInstruct", "train")
95 | print(f"MathInstruct dataset size: {len(math_instruct_dataset['train'])}")
96 | print(f"Processing MathInstruct dataset..")
97 | for each in math_instruct_dataset["train"]:
98 | output = {}
99 | if each['instruction'].lower() not in all_QA:
100 | all_QA[each['instruction'].lower()] = [each['output'].lower()]
101 | elif max([similar(x, each['output'].lower()) for x in all_QA[each['instruction'].lower()]]) < 0.7:
102 | all_QA[each['instruction'].lower()].append(each['output'].lower())
103 | else:
104 | continue
105 |
106 | output['question'] = modify_input(each['instruction']).strip()
107 | output['answer'] = format_metamath_response(each['output'], "The answer is").strip()
108 | if len(output['question']) > 0 and len(output['answer']) > 0:
109 | outputs.append(output)
110 |
111 |
112 | lila_ood_dataset = load_dataset("allenai/lila", 'ood')
113 | lila_ood_dataset = concatenate_datasets([lila_ood_dataset['train'], lila_ood_dataset['validation'], lila_ood_dataset['test']])
114 | print(f"lila ood dataset size: {len(lila_ood_dataset)}")
115 | print(f"Processing lila ood dataset..")
116 | for instance in lila_ood_dataset:
117 | output = {}
118 | if instance['input'].lower() not in all_QA:
119 | all_QA[instance['input'].lower()] = [instance['output_program'].lower()]
120 | elif max([similar(x, instance['output_program'].lower()) for x in all_QA[instance['input'].lower()]]) < 0.7:
121 | all_QA[instance['input'].lower()].append(instance['output_program'].lower())
122 | else:
123 | continue
124 |
125 | output['question'] = add_python_prompt(instance['input']).strip()
126 | output['answer'] = instance['output_program'].strip()
127 | if len(output['question']) > 0 and len(output['answer']) > 0:
128 | outputs.append(output)
129 |
130 | print(f"Original datasets size: {len(metamath_dataset['train'])+len(math_instruct_dataset['train'])+len(lila_ood_dataset)}")
131 | print(f"Prepared dataset size: {len(outputs)}")
132 | random.shuffle(outputs)
133 |
134 | print(f"Assigning train/eval splits..")
135 | train_set = outputs[:int(0.98*len(outputs))]
136 | eval_set = outputs[int(0.98*len(outputs)):]
137 |
138 | print("Writing train/eval files..")
139 |
140 | with open('data/model_training/train.json', 'w') as f:
141 | json.dump(train_set, f, indent=1)
142 |
143 | with open('data/model_training/eval.json', 'w') as f:
144 | json.dump(eval_set, f, indent=1)
145 |
146 | print("DONE!")
147 |
--------------------------------------------------------------------------------
/eval/MATH/MATH_compute_metric_zero_shot_CoT.py:
--------------------------------------------------------------------------------
1 | import pprint
2 | import json
3 |
4 | incorrect_prediction_records = []
5 |
6 | def _fix_fracs(string):
7 | substrs = string.split("\\frac")
8 | new_str = substrs[0]
9 | if len(substrs) > 1:
10 | substrs = substrs[1:]
11 | for substr in substrs:
12 | new_str += "\\frac"
13 | if substr[0] == "{":
14 | new_str += substr
15 | else:
16 | try:
17 | assert len(substr) >= 2
18 | except:
19 | return string
20 | a = substr[0]
21 | b = substr[1]
22 | if b != "{":
23 | if len(substr) > 2:
24 | post_substr = substr[2:]
25 | new_str += "{" + a + "}{" + b + "}" + post_substr
26 | else:
27 | new_str += "{" + a + "}{" + b + "}"
28 | else:
29 | if len(substr) > 2:
30 | post_substr = substr[2:]
31 | new_str += "{" + a + "}" + b + post_substr
32 | else:
33 | new_str += "{" + a + "}" + b
34 | string = new_str
35 | return string
36 |
37 |
38 | def _fix_a_slash_b(string):
39 | if len(string.split("/")) != 2:
40 | return string
41 | a = string.split("/")[0]
42 | b = string.split("/")[1]
43 | try:
44 | a = int(a)
45 | b = int(b)
46 | assert string == "{}/{}".format(a, b)
47 | new_string = "\\frac{" + str(a) + "}{" + str(b) + "}"
48 | return new_string
49 | except:
50 | return string
51 |
52 |
53 | def _remove_right_units(string):
54 | # "\\text{ " only ever occurs (at least in the val set) when describing units
55 | if "\\text{ " in string:
56 | splits = string.split("\\text{ ")
57 | assert len(splits) == 2
58 | return splits[0]
59 | else:
60 | return string
61 |
62 |
63 | def _fix_sqrt(string):
64 | if "\\sqrt" not in string:
65 | return string
66 | splits = string.split("\\sqrt")
67 | new_string = splits[0]
68 | for split in splits[1:]:
69 | if split[0] != "{":
70 | a = split[0]
71 | new_substr = "\\sqrt{" + a + "}" + split[1:]
72 | else:
73 | new_substr = "\\sqrt" + split
74 | new_string += new_substr
75 | return new_string
76 |
77 |
78 | def _strip_string(string):
79 | # linebreaks
80 | string = string.replace("\n", "")
81 | # print(string)
82 |
83 | # remove inverse spaces
84 | string = string.replace("\\!", "")
85 | # print(string)
86 |
87 | # replace \\ with \
88 | string = string.replace("\\\\", "\\")
89 | # print(string)
90 |
91 | # replace tfrac and dfrac with frac
92 | string = string.replace("tfrac", "frac")
93 | string = string.replace("dfrac", "frac")
94 | # print(string)
95 |
96 | # remove \left and \right
97 | string = string.replace("\\left", "")
98 | string = string.replace("\\right", "")
99 | # print(string)
100 |
101 | # Remove circ (degrees)
102 | string = string.replace("^{\\circ}", "")
103 | string = string.replace("^\\circ", "")
104 |
105 | # remove dollar signs
106 | string = string.replace("\\$", "")
107 |
108 | # remove units (on the right)
109 | string = _remove_right_units(string)
110 |
111 | # remove percentage
112 | string = string.replace("\\%", "")
113 | string = string.replace("\%", "")
114 |
115 | # " 0." equivalent to " ." and "{0." equivalent to "{." Alternatively, add "0" if "." is the start of the string
116 | string = string.replace(" .", " 0.")
117 | string = string.replace("{.", "{0.")
118 | # if empty, return empty string
119 | if len(string) == 0:
120 | return string
121 | if string[0] == ".":
122 | string = "0" + string
123 |
124 | # to consider: get rid of e.g. "k = " or "q = " at beginning
125 | if len(string.split("=")) == 2:
126 | if len(string.split("=")[0]) <= 2:
127 | string = string.split("=")[1]
128 |
129 | # fix sqrt3 --> sqrt{3}
130 | string = _fix_sqrt(string)
131 |
132 | # remove spaces
133 | string = string.replace(" ", "")
134 |
135 | # \frac1b or \frac12 --> \frac{1}{b} and \frac{1}{2}, etc. Even works with \frac1{72} (but not \frac{72}1). Also does a/b --> \\frac{a}{b}
136 | string = _fix_fracs(string)
137 |
138 | # manually change 0.5 --> \frac{1}{2}
139 | if string == "0.5":
140 | string = "\\frac{1}{2}"
141 |
142 | # NOTE: X/Y changed to \frac{X}{Y} in dataset, but in simple cases fix in case the model output is X/Y
143 | string = _fix_a_slash_b(string)
144 |
145 | return string
146 |
147 |
148 | def is_equiv(str1, str2, verbose=False):
149 | if str1 is None and str2 is None:
150 | print("WARNING: Both None")
151 | return True
152 | if str1 is None or str2 is None:
153 | return False
154 |
155 | try:
156 | ss1 = _strip_string(str1)
157 | ss2 = _strip_string(str2)
158 | if verbose:
159 | print(ss1, ss2)
160 | return ss1 == ss2
161 | except:
162 | return str1 == str2
163 |
164 |
165 | def last_boxed_only_string(string):
166 | idx = string.rfind("\\boxed")
167 | if idx < 0:
168 | idx = string.rfind("\\fbox")
169 | if idx < 0:
170 | return None
171 |
172 | i = idx
173 | right_brace_idx = None
174 | num_left_braces_open = 0
175 | while i < len(string):
176 | if string[i] == "{":
177 | num_left_braces_open += 1
178 | if string[i] == "}":
179 | num_left_braces_open -= 1
180 | if num_left_braces_open == 0:
181 | right_brace_idx = i
182 | break
183 | i += 1
184 |
185 | if right_brace_idx == None:
186 | retval = None
187 | else:
188 | retval = string[idx:right_brace_idx + 1]
189 |
190 | return retval
191 |
192 | def remove_boxed(s):
193 | left = "\\boxed{"
194 | try:
195 | assert s[:len(left)] == left
196 | assert s[-1] == "}"
197 | return s[len(left):-1]
198 | except:
199 | return None
200 |
201 | def get_result(ground_truth_solution, generated_text, question, pos):
202 | answer = remove_boxed(last_boxed_only_string(ground_truth_solution))
203 | if "The answer is:" in generated_text:
204 | predicted_answer = generated_text.rsplit("The answer is:")[-1].strip()
205 | elif "The answer is " in generated_text:
206 | predicted_answer = generated_text.rsplit("The answer is ")[-1].strip()
207 | else: # TODO: This is most likely because we stopped generation in between. There are very rare cases when model doesn't generate "The answer is" format.
208 | predicted_answer = "" # answer is missing
209 |
210 | try:
211 | equiv = is_equiv(predicted_answer, answer)
212 | except:
213 | equiv = False
214 | if not equiv:
215 | incorrect_prediction_record = {
216 | "Record#": pos+1,
217 | "question": question,
218 | "correct_answer": answer,
219 | "predicted_answer": predicted_answer,
220 | "correct_completion": ground_truth_solution,
221 | "predicted_completion": generated_text,
222 | }
223 | incorrect_prediction_records.append(incorrect_prediction_record)
224 | return equiv, predicted_answer == ""
225 |
226 |
227 | correct, total, missing_answer_count = 0, 0, 0
228 | file_path = "data/predictions/MATH/Arithmo-Mistral-7B/predictions_Arithmo_math_zero_shot_CoT.json"
229 |
230 | with open(file_path, 'r') as f:
231 | data = json.load(f)
232 | for i, d in enumerate(data):
233 | question = d["question"]
234 | ground_truth_gen = d["ground_truth"]
235 | predicted_gen = d["prediction"]
236 | is_correct, is_answer_missing = get_result(ground_truth_gen, predicted_gen, question, i)
237 | correct += is_correct
238 | total += 1
239 | missing_answer_count += is_answer_missing
240 |
241 | print(f"\nTotal Instances: {total}, Correct Count: {correct}, Accuracy (Correct Count/Total Instances): {correct/total}")
242 | print(f"\nOut of {total} instances, couldn't find answer for {missing_answer_count} instances.")
243 |
244 | with open('data/predictions/MATH/Arithmo-Mistral-7B/incorrect_predictions_Arithmo_math_zero_shot_CoT.json', 'w') as f:
245 | json.dump(incorrect_prediction_records, f, indent=1)
246 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/data/python_coding_prompts.txt:
--------------------------------------------------------------------------------
1 | Invent a Python code that dances through this mathematical maze with clever reasoning.
2 | Solve this mathematical enigma with a Python program that thinks logically.
3 | Pythonize the problem.
4 | Let's do Python coding.
5 | Utilize your programming finesse to fashion a Python solution for this intricate mathematical query.
6 | Pythonize your solution now.
7 | Solve it, Python way.
8 | Solve it with Python.
9 | Craft a Python program that adeptly reasons through this math puzzle, leading to a creative and elegant solution.
10 | I challenge you to write a program that employs logical reasoning to answer this math query, ensuring it compiles and provides the correct result.
11 | Pythonize this task.
12 | Can you write a Python solution for this math question that requires reasoning?
13 | Python it into reality.
14 | How would you write a Python program that applies logical thinking to this math challenge and produces a correct solution? Let's see your code and how it works.
15 | Channel your inner math wizard and Python your way to a creative solution for this problem.
16 | Write a Python program that can reason its way through this math puzzle.
17 | Python your way through this math conundrum, elegantly applying logical thinking to develop a successful program that computes the correct answer.
18 | Develop a Python program that employs Sherlock Holmes-like reasoning to unravel this math conundrum.
19 | Let's craft a Python solution that embraces logical reasoning to dissect this complex mathematical question.
20 | Your challenge is to write a program that employs logical reasoning to answer this math query, making sure it compiles and returns the correct result.
21 | Erect a Python solution that acts as a mathematical detective, uncovering the clues through logical reasoning.
22 | I'm in need of a Python program that can think its way to a solution for this math problem.
23 | Let's see how you can use Python to tackle this math challenge. Write a program that applies logical operations and arithmetic to find the correct answer.
24 | Guide Python through this mathematical labyrinth using sharp reasoning to decode the puzzle.
25 | We can turn this math problem into a Python program that excels in logical deduction and creative problem-solving. The code should be both functional and precise.
26 | I invite you to write a program that uses logical reasoning to answer this math query. It's important that the program compiles and delivers accurate results.
27 | Transform this math problem into a program that excels in logical deduction and creative problem-solving. Your Python code should be both functional and accurate.
28 | Write Python code here.
29 | How would you approach this math problem using Python? Demonstrate your programming logic and accuracy by writing a program that solves it correctly and efficiently.
30 | Pythonize the solution.
31 | I challenge you to write a Python program that uses logic to answer this math query.
32 | Let's create a Python program that navigates this mathematical labyrinth with precision and provides an accurate solution.
33 | Write a Python program that operates like a mathematical detective, using reasoning to solve this perplexing math question.
34 | Solve this using Python.
35 | Embark on a Python programming quest that harnesses the power of logical reasoning to conquer this mathematical challenge.
36 | Let's write a Python program that plays the role of a mathematical detective, cracking the case with logical deductions.
37 | I'm looking for a Python solution that uses logical thinking to crack this math problem.
38 | Can you conjure a Python program to unravel this mathematical enigma?
39 | Let's embark on a Python programming adventure to unravel this mathematical enigma using logical reasoning.
40 | Demonstrate your logical reasoning and programming prowess by writing a Python program that cracks this math riddle. Make sure your code is error-free and returns the expected output.
41 | Python programming time.
42 | Embark on a Python programming odyssey that explores this mathematical challenge through the lens of logical reasoning.
43 | Python-program your way to a solution for this math reasoning problem.
44 | Let's program in Python in the response.
45 | Python your way through this math conundrum, elegantly applying logical thinking to develop a successful program.
46 | Write a Python program now.
47 | Create a Python program.
48 | Let's write a Python program.
49 | Solve this mathematical conundrum with a Python program that thinks logically.
50 | Unleash the power of Python to dissect this mathematical puzzle through thoughtful reasoning.
51 | Take on the challenge of crafting a Python program that employs sharp reasoning to answer this math question.
52 | Invent a Python program that can reason through this math question effectively.
53 | This math problem is a good opportunity to showcase your Python skills. Write a program that uses logical reasoning and arithmetic to find the solution. Your program should be elegant, error-free, and correct.
54 | Let's tackle this math problem by developing a Python program that provides a precise solution.
55 | Craft Python code.
56 | Let's see if you can Python your way to a solution for this math conundrum.
57 | Python it up!
58 | Pythonize your code quickly.
59 | I'm looking for a Python program that can apply logical thinking to solve this math challenge.
60 | Let's write Python code.
61 | Create in Python language.
62 | Solve this problem in Python.
63 | Pythonize your approach.
64 | Write a Python program that channels the spirit of a mathematical detective, using reasoning to conquer this complex math question.
65 | This is a fun math problem that challenges your logical reasoning and programming skills. Write a Python program that can solve it. Make sure your program is clean, error-free, and correct.
66 | Sculpt a Python code that masterfully reasons through this math puzzle, guaranteeing it compiles without errors and offers an elegant solution to the problem.
67 | Let's try to solve this math problem using Python. Write a program that uses logical thinking and arithmetic to find the solution. Your program should be elegant, error-free, and correct.
68 | Code this in Python.
69 | Weave a Python program that navigates this mathematical labyrinth by relying on astute reasoning.
70 | Solve using Python now.
71 | I challenge you to write a Python program that reasons through this math query.
72 | Let's write a program.
73 | Initiate a Python programming endeavor that employs mathematical detective work, unraveling the math challenge through logical reasoning.
74 | Let's compose a Python solution that skillfully unravels this mathematical enigma using sharp deductive thinking.
75 | Python your way through this math conundrum, elegantly applying logical thinking to create a successful program that computes the correct answer.
76 | Create a Python program that uses reasoning to decipher this mathematical question.
77 | Pythonize and code it now.
78 | Let's write a Python program to solve it.
79 | Let's employ Python to untangle this math problem using logical thinking.
80 | This math problem requires some logical thinking and programming skills. Let's write a Python program that can handle it. Your program should run smoothly and produce the right answer.
81 | Let's Python this out.
82 | Write Python code now.
83 | Python your way through this mathematical enigma with a code that uses clever reasoning.
84 | Solve with Python.
85 | Time to don your coding hat and Python your way through this math puzzle.
86 | Pythonize your solution.
87 | Inscribe a Python code that navigates this mathematical labyrinth with mathematical precision.
88 | Could you craft a Python program to tackle this mathematical challenge?
89 | This math problem is a test of your logical reasoning and Python skills. Write a program that solves it using Python. Your program should be efficient, error-free, and correct.
90 | Write a Python program that showcases your mathematical reasoning and problem-solving abilities. Make sure your code is error-free and returns the expected output.
91 | Python code, go!
92 | Sculpt a Python code that masterfully reasons through this math puzzle to find an elegant solution.
93 | Let's code in Python.
94 | Sculpt a Python code that masterfully reasons through this math puzzle, ensuring it compiles without errors and offers an elegant solution to the problem.
95 | Solve this mathematical enigma with a Python program that employs elegant logical reasoning.
96 | Let's Pythonize the task.
97 | Pythonize your ideas.
98 | Craft a Python program that uses logic to answer this math question.
99 | Python craftmanship required.
100 | Use your programming prowess to create a Python solution for this mathematical question.
101 | Time to write Python.
102 | Let's see if you can Python your way to a solution for this mathematical riddle.
103 | Write a Python program quickly.
104 | Utilize your programming prowess to create a Python solution for this mathematical question.
105 | Begin coding in Python.
106 | Let's Python-script this.
107 | Let's script in Python.
108 | Python code needed here.
109 | Let's Pythonize this task.
110 | Transform this math problem into a Python code that excels in logical deduction and creative problem-solving.
111 | Write a Python program that can reason its way through this mathematical puzzle.
112 | Write Python code.
113 | Let's work on a Python program to solve this math problem with precision and accuracy.
114 | Let's create a Python program that takes a Sherlock Holmes approach to reason through this math riddle.
115 | Show me your Python skills by writing a program that tackles this math problem with logic and creativity. Your code should be clean, efficient, and accurate.
116 | Python your way through this math conundrum with a program that elegantly applies logical thinking.
117 | I challenge you to write a program that uses logical reasoning to answer this math query. Ensure that the program compiles and returns the correct result.
118 | Solve this with Python.
119 | Create a Python code that serves as a mathematical detective, solving this math riddle through logical reasoning.
120 | Time for Python solution.
121 | We can transform this math problem into a Python program that excels in logical deduction and creative problem-solving. Your code should be both functional and accurate.
122 | Utilize your programming skills to craft a solution for this mathematical question, ensuring the code not only compiles successfully but also returns the correct result.
123 | Python your way through this math conundrum, elegantly applying logical thinking to create a successful program that delivers the correct answer.
124 | Let's Python this task.
125 | Utilize your programming skills to craft a solution for this mathematical question that not only compiles but also returns the correct result.
126 | Let's use Python to solve this math problem. Write a program that demonstrates your logical thinking and programming skills. Your program should be neat, error-free, and precise.
127 | Craft a Python program that thinks like a mathematician, navigating this problem with logical precision.
128 | It's time to wear your coding thinking cap and code a Python solution for this math challenge.
129 | Let's dive into Python and code up a solution for this math problem.
130 | Transform this math problem into a Python program that excels in logical deduction and creative problem-solving. Your Python code should be both functional and accurate.
131 | Let's script a Python program that skillfully applies deductive reasoning to solve this intriguing math question.
132 | Let's embark on a Python coding journey to decrypt this mathematical enigma using sharp deductive thinking.
133 | Start coding with Python.
134 | Show me your Python prowess by writing a program that cracks this math riddle. Your program should be well-written, error-free, and accurate.
135 | Pythonize your thoughts.
136 | Time for Python magic.
137 | Python is a great language for mathematical reasoning. Can you write a program that solves this math problem using Python? Your code should be clear, concise, and correct.
138 | Sculpt a Python code that masterfully reasons through this math puzzle, ensuring it compiles without errors and provides an elegant solution.
139 | Harness Python's capabilities to traverse this mathematical maze with clever problem-solving and logical reasoning.
140 | Write a Python program immediately.
141 | Transform this math problem into a Python program that excels in logical deduction and creative problem-solving.
142 | Let's embark on a Python coding journey to tackle this mathematical reasoning problem.
143 | Utilize your programming skills to craft a solution for this mathematical question, guaranteeing the code compiles without errors and returns the correct result.
144 | This is a tricky math problem that tests your logical reasoning and programming skills. Write a Python program that can solve it. Make sure your program is flawless and returns the correct result.
145 | Solve it Python-style.
146 | Solve this mathematical enigma with a Python solution that employs elegant logical reasoning.
147 | Let's embark on the task of developing a Python program to solve this math problem with precision.
148 | Compose a Python program that thinks like a mathematical detective, uncovering the secrets hidden in this math puzzle.
149 | How good are you at mathematical reasoning and Python programming? Write a program that answers this math question using Python. Your program should be well-structured, error-free, and accurate.
150 | Code it with Python.
151 | Utilize Python's capabilities to navigate this mathematical maze with clever problem-solving and logical reasoning.
152 | Python your way to a solution for this mathematical riddle.
153 | Think like a coding mathematician and write a Python program for this problem.
154 | Let's see how you can use Python to reason through this math problem. Write a program that uses logical expressions and calculations to find the answer. Your program should be simple, error-free, and accurate.
155 | Think through this math problem and Python your way to a solution.
156 | Craft a Python program that reasons through this math puzzle.
157 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## Updates
2 |
3 | ### [January 2024] New Model Release: Arithmo2-Mistral-7B
4 |
5 | **Arithmo2-Mistral-7B** model improves initially released Arithmo-Mistral-7B model on both GSM8K and MATH benchmarks. Specifically, there is absolute improvement of **+1.7% on GSM8K, +3.0% on GSM8K PoT, and +1.9% on MATH benchmarks**. We release both [merged model](https://huggingface.co/upaya07/Arithmo2-Mistral-7B) and [LoRA Adapter](https://huggingface.co/upaya07/Arithmo2-Mistral-7B-adapter).
6 | - Arithmo2-Mistral-7B is trained on same data as Arithmo-Mistral-7B except that we removed both validation and test set of [lila ood subset](https://huggingface.co/datasets/allenai/lila/viewer/ood) to avoid possibility of data leakage.
7 | - Added [NEFTune](https://arxiv.org/pdf/2310.05914.pdf)
8 | - Enabled sample packing = true for faster training.
9 |
10 |
11 | # Arithmo Models
12 | [](LICENSE)
13 | [](LICENSE)
14 | [](https://www.python.org/downloads/release/python-390/)
15 |
16 |
17 | Both [Arithmo2-Mistral-7B](https://huggingface.co/upaya07/Arithmo2-Mistral-7B) and [Arithmo-Mistral-7B](https://huggingface.co/akjindal53244/Arithmo-Mistral-7B) models are trained to reason and answer mathematical problems and is also capable of writing a Python program that upon execution prints answer to the question. We used [Mistral-7B](https://huggingface.co/mistralai/Mistral-7B-v0.1) as a base model and used **QLoRA to fine-tune it on a single RTX 4090 GPU**.
18 |
19 |
20 | ## Benchmark Results
21 |
22 | Arithmo2-Mistral-7B model is fine-tuned with 4-bit QLoRA on single GPU and is competitive with supervised full-finetuned state-of-the-art Mathematical Reasoning models. Refer to [Comparing Arithmo models with other SFT LLM models](https://github.com/akjindal53244/Arithmo/tree/master?tab=readme-ov-file#comparing-arithmo-models-with-other-sft-llm-models) section for more details.
23 |
24 |
25 |
26 |
27 | | Model Name |
28 | Checkpoint |
29 | Training Approach |
30 | Prompt Approach |
31 | GSM8k |
32 | MATH |
33 | License |
34 |
35 |
36 |
37 |
38 | | Arithmo-Mistral-7B |
39 | 🤗 Model |
40 | 4-bit QLoRA Fine-tuning on 1x4090 |
41 | Zero-Shot CoT |
42 | 74.7 |
43 | 25.3 |
44 | Apache-2.0 |
45 |
46 |
47 | | Zero-Shot PoT |
48 | 71.2 |
49 | - |
50 |
51 |
52 | | 🔥 Arithmo2-Mistral-7B |
53 | 🤗 Model 🤗 LoRA Adapter |
54 | 4-bit QLoRA Fine-tuning on 1x4090 |
55 | Zero-Shot CoT |
56 | 76.4 |
57 | 27.2 |
58 | Apache-2.0 |
59 |
60 |
61 | | Zero-Shot PoT |
62 | 74.2 |
63 | - |
64 |
65 |
66 |
67 |
68 | - **Zero-Shot CoT**: On providing a question as prompt, model generates reasoning steps to solve the question along with answer. We check if answer matches with ground-truth.
69 | - **Zero-Shot PoT**: We prompt the model to generate a Python program for the given question. During inference, we execute the Python program generated by the model and check if the program output matches with ground-truth answer. Visit [Model Card](https://huggingface.co/akjindal53244/Arithmo-Mistral-7B) to see few PoT examples.
70 |
71 |
72 | ## Installation
73 |
74 | ```
75 | pip install transformers >=4.34.0
76 | pip install accelerate
77 | pip install sentencepiece
78 | pip install protobuf
79 |
80 | # If you are GPU poor like me
81 | pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
82 |
83 | # If you have a GPU.
84 | pip install --pre torch --index-url https://download.pytorch.org/whl/nightly/cu118
85 | pip install scipy
86 | pip install bitsandbytes
87 | ```
88 |
89 |
90 | ## How to query the model
91 |
92 | ```
93 | # Set `run_model_on_gpu` to `False` if you are running on CPU. Model will generate reasoning steps with answer for your question. If you want to generate Python program, uncomment line-69 that adds a Python prompt.
94 | # This script automatically does formatting for you, so you just need to type question (eg: `What is 2+2?`) without any prefix like `Question:`, etc.**
95 |
96 | $ python query_model.py
97 | ```
98 | **Note:** Above script automatically does formatting for you, so you just need to type question (eg: `What is 2+2?`) without any prefix like `Question:`, etc. Checkout `query_model.py` for more details.
99 |
100 | ##### Sample Input:
101 | ```
102 | Question: There are total 10 children. I have to give 1 apple to first child, 2 apples to second child, 3 apples to third child, and so on. How many apples do I need?
103 | ```
104 | ##### Model Output:
105 | ```
106 | Answer: The total number of apples needed is the sum of the first 10 positive integers.
107 | This can be calculated using the formula for the sum of an arithmetic series:
108 | \[S = \frac{n}{2}(a_1 + a_n),\]
109 | where $S$ is the sum, $n$ is the number of terms, $a_1$ is the first term, and $a_n$ is the last term.
110 | In this case, $n = 10$, $a_1 = 1$, and $a_n = 10$.
111 | Plugging these values into the formula, we get:
112 | \[S = \frac{10}{2}(1 + 10) = 5(11) = \boxed{55}.\]
113 | The answer is: 55
114 | ```
115 |
116 | Arithmo-Mistral-7B is trained with the following format:
117 | #### CoT Format (generate reasoning steps with answer):
118 | ```
119 | Question:
120 |
121 | Answer:
122 | ```
123 |
124 | #### PoT Format (generate a python program):
125 | ```
126 | Question:
127 |
128 | Answer:
129 | ```
130 | It will perform best if queried in this way with your own script.
131 |
132 |
133 | ## Model Finetuning Details
134 | Due to limited compute budget, Mistral-7B model is fine-tuned with QLoRA using Single RTX 4090 GPU. We plan to do a full finetuning of Mistral-7B model on this dataset to further improve performance.
135 |
136 |
137 | ## Reproducing Results
138 |
139 | ### Model Training Data
140 | Model training data is prepared by combining [MetaMathQA](https://huggingface.co/datasets/meta-math/MetaMathQA) (train split), [lila OOD](https://huggingface.co/datasets/allenai/lila/viewer/ood) (train, validation, and test splits), and [MathInstruct](https://huggingface.co/datasets/TIGER-Lab/MathInstruct) (train split) datasets. We have verified that our training data has no overlap with GSM8K and MATH test set. Further post-processing steps are applied such as 1) deduplication, 2) randomly lower-casing x% inputs, 3) adding diverse set of Python prompts for PoT, and 4) standardizing answer format. Final dataset is of size ~540,000. Also, to train Arithmo2-Mistral-7B model, we removed both validation and test set of [lila ood subset](https://huggingface.co/datasets/allenai/lila/viewer/ood) to avoid possibility of data leakage.
141 |
142 | ```
143 | # This script generates train and eval sets.
144 | $ python data_prep/prepare_model_traininig_data.py
145 | ```
146 |
147 | Here is [Huggingface link](https://huggingface.co/datasets/akjindal53244/Arithmo-Data) for our dataset.
148 |
149 | ### Answer/Response Generation
150 |
151 | #### Prediction on [GSM8K Test set](https://huggingface.co/datasets/gsm8k/viewer/main/test)
152 | ##### Zero-Shot with CoT:
153 | ```
154 | # This script saves output to `data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_gsm8k_zero_shot_CoT.json` path.
155 | $ python eval/gsm8k/gsm8k_generate_response_zero_shot_CoT.py
156 | ```
157 |
158 | ##### Zero-Shot with PoT:
159 | ```
160 | # This script saves output to `data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_gsm8k_zero_shot_PoT.json` path.
161 | $ python eval/gsm8k/gsm8k_generate_response_zero_shot_PoT.py
162 | ```
163 |
164 | #### Prediction on [MATH Test set](https://huggingface.co/datasets/competition_math/viewer/default/test)
165 | ##### Zero-Shot with CoT:
166 | ```
167 | # This script saves output to `data/predictions/gsm8k/Arithmo-Mistral-7B/predictions_Arithmo_MATH_zero_shot_CoT.json` path.
168 | $ python eval/MATH/MATH_generate_response_zero_shot_CoT.py
169 | ```
170 |
171 | **Zero-Shot with PoT**: Answers in MATH test set consist of expressions like `(x+2)/5` instead of a numeric value. Currently, Arithmo-Mistral-7B's PoT training data doesn't contain expressions as answers. Hence, we don't run PoT based inference on MATH dataset.
172 |
173 |
174 | ### Metrics Computation
175 |
176 | #### [GSM8K Test set](https://huggingface.co/datasets/gsm8k/viewer/main/test)
177 | ##### Zero-Shot with CoT:
178 | ```
179 | $ python eval/gsm8k/gsm8k_compute_metric_zero_shot_CoT.py
180 | ```
181 | Expected output: `Total Instances: 1319, Correct Count: 985, Accuracy (Correct Count/Total Instances): 0.7467`
182 | ##### Zero-Shot with PoT:
183 | ```
184 | # Step-1: This script executes generated python programs and saves results into a file.
185 | $ python eval/gsm8k/gsm8k_write_zero_shot_PoT_outputs.py > data/predictions/gsm8k/Arithmo-Mistral-7B/gsm8k_zero_shot_PoT_results.txt
186 |
187 | # Step-2: This script computes accuracy by taking above file as input.
188 | $ python eval/gsm8k/gsm8k_compute_metric_zero_shot_PoT.py
189 | ```
190 | Expected output: `Total Instances: 1309, Correct Count: 932, Accuracy: 0.7119`
191 |
192 | #### [MATH Test set](https://huggingface.co/datasets/competition_math/viewer/default/test)
193 | ##### Zero-Shot with CoT:
194 | ```
195 | $ python eval/MATH/MATH_compute_metric_zero_shot_CoT.py
196 | ```
197 | Script is borrowed from official [math repository](https://github.com/hendrycks/math/blob/main/modeling/math_equivalence.py)
198 | Expected output: `Total Instances: 5000, Correct Count: 1266, Accuracy (Correct Count/Total Instances): 0.2532`
199 |
200 |
201 | ## Comparing Arithmo models with other SFT LLM models
202 | Results for all models except `Arithmo2-Mistral-7B` and `Arithmo-Mistral-7B` are taken from [MetaMath](https://github.com/meta-math/MetaMath/blob/main/README.MD) repository.
203 |
204 | | Model | GSM8k Pass@1 | MATH Pass@1 | Model Training details |
205 | |---------------------|--------------|-------------|------------------------|
206 | | MPT-7B | 6.8 | 3.0 |
207 | | Falcon-7B | 6.8 | 2.3 |
208 | | LLaMA-1-7B | 11.0 | 2.9 |
209 | | LLaMA-2-7B | 14.6 | 2.5 |
210 | | MPT-30B | 15.2 | 3.1 |
211 | | LLaMA-1-13B | 17.8 | 3.9 |
212 | | GPT-Neo-2.7B | 19.5 | -- |
213 | | Falcon-40B | 19.6 | 2.5 |
214 | | Baichuan-chat-13B | 23.9 | -- |
215 | | Vicuna-v1.3-13B | 27.6 | -- |
216 | | LLaMA-2-13B | 28.7 | 3.9 |
217 | | InternLM-7B | 31.2 | -- |
218 | | ChatGLM-2-6B | 32.4 | -- |
219 | | GPT-J-6B | 34.9 | -- |
220 | | LLaMA-1-33B | 35.6 | 3.9 |
221 | | LLaMA-2-34B | 42.2 | 6.24 |
222 | | RFT-7B | 50.3 | -- |
223 | | LLaMA-1-65B | 50.9 | 10.6 |
224 | | Qwen-7B | 51.6 | -- |
225 | | WizardMath-7B | 54.9 | 10.7 |
226 | | LLaMA-2-70B | 56.8 | 13.5 |
227 | | WizardMath-13B | 63.9 | 14.0 |
228 | | MetaMath-7B | 66.5 | 19.8 |
229 | | MetaMath-13B | 72.3 | 22.4 |
230 | | Arithmo-Mistral-7B (PoT) | 71.2 | -- | SFT: 4-bit QLoRA |
231 | | Arithmo2-Mistral-7B (PoT) | 74.2 | -- | SFT: 4-bit QLoRA |
232 | | MetaMath-Mistral-7B | 77.7 | 28.2 | SFT: Full fine-tuned |
233 | | Arithmo-Mistral-7B| 74.7 | 25.3 | SFT: 4-bit QLoRA |
234 | | 🔥 **Arithmo2-Mistral-7B** | **76.4** | **27.2** | **SFT: 4-bit QLoRA** |
235 |
236 |
237 | ### Citation
238 | To cite Arithmo models:
239 | ```
240 | @misc{jindal_2023_arithmo,
241 | author = {Jindal, Ashvini},
242 | title = {Arithmo-Mistral-7B: Mathematical Reasoning Model},
243 | howpublished = {Hugging Face},
244 | month = {October},
245 | year = {2023},
246 | url = {https://huggingface.co/akjindal53244/Arithmo-Mistral-7B}
247 | }
248 | ```
249 |
250 | ### Support My Work
251 | Building LLMs takes time and resources; if you find my work interesting, your support would be epic!
252 |
253 |
254 |
255 | P.S.: If you are interested in providing compute support, please reach out to [Ashvini Jindal](https://www.linkedin.com/in/ashvini-jindal-26653262/)
256 |
257 |
258 | References
259 |
260 | ```
261 | @article{yu2023metamath,
262 | title={MetaMath: Bootstrap Your Own Mathematical Questions for Large Language Models},
263 | author={Yu, Longhui and Jiang, Weisen and Shi, Han and Yu, Jincheng and Liu, Zhengying and Zhang, Yu and Kwok, James T and Li, Zhenguo and Weller, Adrian and Liu, Weiyang},
264 | journal={arXiv preprint arXiv:2309.12284},
265 | year={2023}
266 | }
267 |
268 | @article{Yue2023mammoth,
269 | title={MAmmoTH: Building math generalist models through hybrid instruction tuning},
270 | author={Xiang Yue, Xingwei Qu, Ge Zhang, Yao Fu, Wenhao Huang, Huan Sun, Yu Su, and Wenhu Chen},
271 | journal={arXiv preprint arXiv:2309.05653},
272 | year={2023}
273 | }
274 |
275 | @article{mishra2022lila,
276 | title={Lila: A unified benchmark for mathematical reasoning},
277 | author={Swaroop Mishra, Matthew Finlayson, Pan Lu, Leonard Tang, Sean Welleck, Chitta Baral, Tanmay Rajpurohit, Oyvind Tafjord, Ashish Sabharwal, Peter Clark, and Ashwin Kalyan},
278 | journal={arXiv preprint arXiv:2210.17517},
279 | year={2022}
280 | }
281 | ```
282 |
283 | ## Todos
284 | -
285 |
--------------------------------------------------------------------------------
/data/predictions/gsm8k/Arithmo-Mistral-7B/gsm8k_zero_shot_PoT_results.txt:
--------------------------------------------------------------------------------
1 | 18
2 | 18
3 | =========
4 | 3.0
5 | 3
6 | =========
7 | 65000.0
8 | 70000
9 | =========
10 | 540
11 | 540
12 | =========
13 | 140
14 | 20
15 | =========
16 | 40.0
17 | 64
18 | =========
19 | 260
20 | 260
21 | =========
22 | 120.0
23 | 160
24 | =========
25 | 300.0
26 | 45
27 | =========
28 | 460.0
29 | 460
30 | =========
31 | 366.0
32 | 366
33 | =========
34 | 694
35 | 694
36 | =========
37 | 12
38 | 13
39 | =========
40 | 1
41 | 18
42 | =========
43 | 60.0
44 | 60
45 | =========
46 | 125.0
47 | 125
48 | =========
49 | 310
50 | 230
51 | =========
52 | 57500
53 | 57500
54 | =========
55 | 7.0
56 | 7
57 | =========
58 | 4.0
59 | 6
60 | =========
61 | 0.27777777777777773
62 | 15
63 | =========
64 | 29
65 | 14
66 | =========
67 | 7
68 | 7
69 | =========
70 | 10
71 | 8
72 | =========
73 | 26.0
74 | 26
75 | =========
76 | 2
77 | 2
78 | =========
79 | 243.0
80 | 243
81 | =========
82 | 16.0
83 | 16
84 | =========
85 | 25
86 | 25
87 | =========
88 | 104
89 | 104
90 | =========
91 | 80.0
92 | 80
93 | =========
94 | 35.0
95 | 35
96 | =========
97 | 70
98 | 70
99 | =========
100 | 23
101 | 23
102 | =========
103 | 9.0
104 | 9
105 | =========
106 | 75.0
107 | 75
108 | =========
109 | 2
110 | 2
111 | =========
112 | 2.5
113 | 10
114 | =========
115 | 18.0
116 | 18
117 | =========
118 | 8
119 | 8
120 | =========
121 | -200
122 | 200
123 | =========
124 | 26
125 | 26
126 | =========
127 | 1875.0
128 | 48
129 | =========
130 | 20.0
131 | 20
132 | =========
133 | 44.0
134 | 104
135 | =========
136 | -117
137 | 163
138 | =========
139 | 200.0
140 | 800
141 | =========
142 | 8
143 | 8
144 | =========
145 | 30
146 | 30
147 | =========
148 | 294.0
149 | 294
150 | =========
151 | 0.8
152 | 5
153 | =========
154 | 15
155 | 15
156 | =========
157 | 40
158 | 40
159 | =========
160 | 40
161 | 40
162 | =========
163 | 14
164 | 14
165 | =========
166 | 3.0
167 | 3
168 | =========
169 | 83
170 | 83
171 | =========
172 | 57.0
173 | 57
174 | =========
175 | 187
176 | 187
177 | =========
178 | 17
179 | 17
180 | =========
181 | 1430.0
182 | 1430
183 | =========
184 | 216097.11875753338
185 | 25000
186 | =========
187 | 1596.0
188 | 1596
189 | =========
190 | 300.0
191 | 300
192 | =========
193 | 36.0
194 | 36
195 | =========
196 | 48
197 | 48
198 | =========
199 | 595
200 | 595
201 | =========
202 | 36
203 | 36
204 | =========
205 | 60
206 | 60
207 | =========
208 | 20475.0
209 | 7425
210 | =========
211 | 60
212 | 60
213 | =========
214 | 221
215 | 221
216 | =========
217 | 255.0
218 | 255
219 | =========
220 | 86
221 | 88
222 | =========
223 | 7.5
224 | 60
225 | =========
226 | 5
227 | 5
228 | =========
229 | 100.0
230 | 100
231 | =========
232 | -6.0
233 | 6
234 | =========
235 | 70.0
236 | 70
237 | =========
238 | 10.0
239 | 10
240 | =========
241 | 17
242 | 17
243 | =========
244 | 623
245 | 623
246 | =========
247 | 600
248 | 600
249 | =========
250 | 15
251 | 15
252 | =========
253 | 44
254 | 44
255 | =========
256 | 22.0
257 | 22
258 | =========
259 | 1286.153286000001
260 | 9360
261 | =========
262 | 8000
263 | 8000
264 | =========
265 | 24.0
266 | 24
267 | =========
268 | 225.0
269 | 225
270 | =========
271 | 28
272 | 28
273 | =========
274 | 3
275 | 4
276 | =========
277 | -1.8181818181818181
278 | 36
279 | =========
280 | 348
281 | 348
282 | =========
283 | 40
284 | 40
285 | =========
286 | 3.0
287 | 3
288 | =========
289 | 12
290 | 12
291 | =========
292 | 5
293 | 5
294 | =========
295 | 58
296 | 58
297 | =========
298 | 175.0
299 | 175
300 | =========
301 | 9.75
302 | 6
303 | =========
304 | 20
305 | 26
306 | =========
307 | 140.0
308 | 140
309 | =========
310 | 500
311 | 500
312 | =========
313 | 20
314 | 20
315 | =========
316 | 72
317 | 72
318 | =========
319 | 3
320 | 3
321 | =========
322 | 50
323 | 50
324 | =========
325 | 28.0
326 | 28
327 | =========
328 | 45.0
329 | 45
330 | =========
331 | 16.0
332 | 16
333 | =========
334 | 24
335 | 24
336 | =========
337 | 25
338 | 25
339 | =========
340 | 6
341 | 6
342 | =========
343 | 40.0
344 | 90
345 | =========
346 | 120 - y
347 | 42
348 | =========
349 | 360
350 | 360
351 | =========
352 | 4
353 | 4
354 | =========
355 | 99076.92307692308
356 | 95200
357 | =========
358 | 240
359 | 240
360 | =========
361 | 27.0
362 | 27
363 | =========
364 | 29
365 | 48
366 | =========
367 | 50
368 | 50
369 | =========
370 | 50
371 | 10
372 | =========
373 | 4
374 | 10
375 | =========
376 | 82
377 | 82
378 | =========
379 | 120
380 | 120
381 | =========
382 | 880
383 | 880
384 | =========
385 | 70000.0
386 | 10000
387 | =========
388 | 30
389 | 30
390 | =========
391 | 940
392 | 940
393 | =========
394 | 10
395 | 60
396 | =========
397 | 13
398 | 13
399 | =========
400 | 720
401 | 720
402 | =========
403 | 40
404 | 40
405 | =========
406 | 6
407 | 6
408 | =========
409 | 25.2
410 | 29
411 | =========
412 | 105
413 | 105
414 | =========
415 | 70
416 | 70
417 | =========
418 | 20.0
419 | 20
420 | =========
421 | 700.0
422 | 400
423 | =========
424 | 140
425 | 140
426 | =========
427 | 16.0
428 | 16
429 | =========
430 | 20
431 | 20
432 | =========
433 | 4000.0
434 | 4000
435 | =========
436 | 2125.0
437 | 2125
438 | =========
439 | 75
440 | 75
441 | =========
442 | 14
443 | 30
444 | =========
445 | 16
446 | 16
447 | =========
448 | 4.0
449 | 4
450 | =========
451 | 5.0
452 | 5
453 | =========
454 | 4.0
455 | 4
456 | =========
457 | -1
458 | 48
459 | =========
460 | 272
461 | 272
462 | =========
463 | 280.0
464 | 280
465 | =========
466 | 1400
467 | 1400
468 | =========
469 | 1440
470 | 80
471 | =========
472 | 34.0
473 | 34
474 | =========
475 | 15
476 | 15
477 | =========
478 | 16.0
479 | 16
480 | =========
481 | 32
482 | 32
483 | =========
484 | 532.4666656177045
485 | 92
486 | =========
487 | 50
488 | 50
489 | =========
490 | 15
491 | 15
492 | =========
493 | 77
494 | 77
495 | =========
496 | 5
497 | 5
498 | =========
499 | 13
500 | 16
501 | =========
502 | 18
503 | 18
504 | =========
505 | 120
506 | 120
507 | =========
508 | 150.0
509 | 150
510 | =========
511 | 1210
512 | 1210
513 | =========
514 | 51
515 | 51
516 | =========
517 | 18000.0
518 | 18000
519 | =========
520 | 95.0
521 | 95
522 | =========
523 | 25.0
524 | 15
525 | =========
526 | 100
527 | 100
528 | =========
529 | 350.0
530 | 350
531 | =========
532 | 122
533 | 122
534 | =========
535 | 130
536 | 130
537 | =========
538 | 79
539 | 20
540 | =========
541 | 160
542 | 160
543 | =========
544 | 17.0
545 | 23
546 | =========
547 | 2
548 | 2
549 | =========
550 | 0.0
551 | 25
552 | =========
553 | 30.0
554 | 30
555 | =========
556 | 5
557 | 5
558 | =========
559 | 106.12080000000002
560 | 106
561 | =========
562 | 13
563 | 50
564 | =========
565 | 34.0
566 | 34
567 | =========
568 | 360
569 | 360
570 | =========
571 | 0.05
572 | 5
573 | =========
574 | 91
575 | 91
576 | =========
577 | 18
578 | 24
579 | =========
580 | 10
581 | 10
582 | =========
583 | 12.0
584 | 12
585 | =========
586 | 120
587 | 120
588 | =========
589 | 6277
590 | 6277
591 | =========
592 | 320
593 | 320
594 | =========
595 | 7500.0
596 | 7500
597 | =========
598 | 55
599 | 55
600 | =========
601 | 114200.0
602 | 114200
603 | =========
604 | 100
605 | 100
606 | =========
607 | 31
608 | 31
609 | =========
610 | 98
611 | 98
612 | =========
613 | 120
614 | 98
615 | =========
616 | 860
617 | 860
618 | =========
619 | 2600.0
620 | 2600
621 | =========
622 | 76
623 | 76
624 | =========
625 | 150.0
626 | 145
627 | =========
628 | 15
629 | 10
630 | =========
631 | 26/3
632 | 4
633 | =========
634 | 5.0
635 | 5
636 | =========
637 | 250
638 | 250
639 | =========
640 | 2.0
641 | 8
642 | =========
643 | 44.0
644 | 44
645 | =========
646 | 220.0
647 | 220
648 | =========
649 | 15
650 | 15
651 | =========
652 | 45.0
653 | 45
654 | =========
655 | 54.0
656 | 54
657 | =========
658 | 70
659 | 70
660 | =========
661 | 90
662 | 90
663 | =========
664 | 140
665 | 140
666 | =========
667 | 20000.0
668 | 20000
669 | =========
670 | 180
671 | 180
672 | =========
673 | 9
674 | 9
675 | =========
676 | 33
677 | 33
678 | =========
679 | 9.0
680 | 9
681 | =========
682 | 1.0
683 | 1
684 | =========
685 | 21
686 | 21
687 | =========
688 | 276000.0
689 | 276000
690 | =========
691 | 50
692 | 50
693 | =========
694 | 75.0
695 | 75
696 | =========
697 | 12
698 | 12
699 | =========
700 | 21
701 | 21
702 | =========
703 | 10
704 | 10
705 | =========
706 | 150
707 | 31
708 | =========
709 | 90
710 | 90
711 | =========
712 | 68
713 | 68
714 | =========
715 | 280
716 | 280
717 | =========
718 | 21.0
719 | 21
720 | =========
721 | 5
722 | 6
723 | =========
724 | 3.0
725 | 3
726 | =========
727 | 250
728 | 250
729 | =========
730 | 7
731 | 20
732 | =========
733 | 7.0
734 | 7
735 | =========
736 | 27000.0
737 | 27000
738 | =========
739 | 32.0
740 | 32
741 | =========
742 | 300.0
743 | 300
744 | =========
745 | 5600
746 | 5600
747 | =========
748 | 6
749 | 17
750 | =========
751 | 70
752 | 70
753 | =========
754 | 82
755 | 73
756 | =========
757 | 18
758 | 18
759 | =========
760 | 84
761 | 84
762 | =========
763 | 192
764 | 192
765 | =========
766 | 45.0
767 | 45
768 | =========
769 | 5600.0
770 | 5600
771 | =========
772 | 6.0
773 | 6
774 | =========
775 | 144
776 | 168
777 | =========
778 | 11.0
779 | 11
780 | =========
781 | 3100.0
782 | 62
783 | =========
784 | 270
785 | 270
786 | =========
787 | 8
788 | 8
789 | =========
790 | 400
791 | 400
792 | =========
793 | 9500.0
794 | 9500
795 | =========
796 | 118000.0
797 | 118000
798 | =========
799 | 51.0
800 | 91
801 | =========
802 | 1375
803 | 1375
804 | =========
805 | 4
806 | 4
807 | =========
808 | 762.0
809 | 762
810 | =========
811 | 20
812 | 20
813 | =========
814 | 5.000000000000002
815 | 5
816 | =========
817 | 315.0
818 | 315
819 | =========
820 | 3200
821 | 3200
822 | =========
823 | 138
824 | 138
825 | =========
826 | 9
827 | 9
828 | =========
829 | 4.0
830 | 4
831 | =========
832 | 40
833 | 40
834 | =========
835 | 3
836 | 6
837 | =========
838 | 7.0
839 | 7
840 | =========
841 | 2450.0
842 | 2450
843 | =========
844 | 195
845 | 195
846 | =========
847 | 1.0
848 | 68
849 | =========
850 | 360
851 | 360
852 | =========
853 | 21
854 | 21
855 | =========
856 | 90
857 | 90
858 | =========
859 | 5
860 | 8
861 | =========
862 | 3.0
863 | 3
864 | =========
865 | 16.0
866 | 16
867 | =========
868 | 390
869 | 390
870 | =========
871 | 2.0
872 | 2
873 | =========
874 | 75.0
875 | 75
876 | =========
877 | 83
878 | 83
879 | =========
880 | 3
881 | 3
882 | =========
883 | 370
884 | 370
885 | =========
886 | 3.0
887 | 3
888 | =========
889 | 55
890 | 55
891 | =========
892 | 350
893 | 500
894 | =========
895 | 37500
896 | 31800
897 | =========
898 | 78
899 | 78
900 | =========
901 | 64
902 | 8
903 | =========
904 | 15.0
905 | 15
906 | =========
907 | 5800
908 | 1300
909 | =========
910 | 3200
911 | 3200
912 | =========
913 | 4
914 | 4
915 | =========
916 | 10
917 | 10
918 | =========
919 | 8.0
920 | 16
921 | =========
922 | 4.0
923 | 6
924 | =========
925 | 8
926 | 8
927 | =========
928 | 2050
929 | 2050
930 | =========
931 | 105
932 | 91
933 | =========
934 | 32
935 | 32
936 | =========
937 | 2029.4117647058824
938 | 120000
939 | =========
940 | 35
941 | 30
942 | =========
943 | 18
944 | 14
945 | =========
946 | 156.0
947 | 156
948 | =========
949 | 12.0
950 | 12
951 | =========
952 | 123
953 | 123
954 | =========
955 | 135
956 | 15
957 | =========
958 | 15
959 | 8
960 | =========
961 | 1.0
962 | 1
963 | =========
964 | 24
965 | 9
966 | =========
967 | 75.0
968 | 75
969 | =========
970 | 14
971 | 14
972 | =========
973 | 224000.0
974 | 224000
975 | =========
976 | 7
977 | 14
978 | =========
979 | 31
980 | 31
981 | =========
982 | 2.0
983 | 2
984 | =========
985 | 14
986 | 14
987 | =========
988 | 44.85
989 | 31
990 | =========
991 | 7800.0
992 | 8400
993 | =========
994 | 44
995 | 44
996 | =========
997 | 100
998 | 100
999 | =========
1000 | 6.0
1001 | 6
1002 | =========
1003 | 310
1004 | 310
1005 | =========
1006 | 72.0
1007 | 72
1008 | =========
1009 | 1
1010 | 1
1011 | =========
1012 | 90
1013 | 60
1014 | =========
1015 | 160.0
1016 | 160
1017 | =========
1018 | 5
1019 | 4
1020 | =========
1021 | 280
1022 | 260
1023 | =========
1024 | 87
1025 | 87
1026 | =========
1027 | 180000.0
1028 | 180000
1029 | =========
1030 | 2
1031 | 2
1032 | =========
1033 | 310
1034 | 310
1035 | =========
1036 | 9
1037 | 9
1038 | =========
1039 | 36.0
1040 | 36
1041 | =========
1042 | 2640
1043 | 2640
1044 | =========
1045 | 8.0
1046 | 8
1047 | =========
1048 | 10
1049 | 10
1050 | =========
1051 | 21
1052 | 21
1053 | =========
1054 | 24.390243902439025
1055 | 20
1056 | =========
1057 | 45
1058 | 45
1059 | =========
1060 | 34
1061 | 34
1062 | =========
1063 | 21
1064 | 21
1065 | =========
1066 | 3
1067 | 2
1068 | =========
1069 | 20.0
1070 | 20
1071 | =========
1072 | 5.333333333333333
1073 | 4
1074 | =========
1075 | 25
1076 | 25
1077 | =========
1078 | 0.05
1079 | 20
1080 | =========
1081 | 23.0
1082 | 23
1083 | =========
1084 | 12.053571428571429
1085 | 6
1086 | =========
1087 | 49
1088 | 49
1089 | =========
1090 | 18
1091 | 18
1092 | =========
1093 | 9
1094 | 9
1095 | =========
1096 | 19
1097 | 19
1098 | =========
1099 | -10
1100 | 18
1101 | =========
1102 | 1198.0
1103 | 1198
1104 | =========
1105 | 320.0
1106 | 320
1107 | =========
1108 | 50
1109 | 50
1110 | =========
1111 | 5
1112 | 5
1113 | =========
1114 | 240000.0
1115 | 240000
1116 | =========
1117 | 60.0
1118 | 45
1119 | =========
1120 | 48
1121 | 48
1122 | =========
1123 | 15
1124 | 15
1125 | =========
1126 | 50.0
1127 | 50
1128 | =========
1129 | 15
1130 | 15
1131 | =========
1132 | 21
1133 | 21
1134 | =========
1135 | 73.0
1136 | 803
1137 | =========
1138 | 67
1139 | 67
1140 | =========
1141 | 350
1142 | 350
1143 | =========
1144 | 4.5
1145 | 2
1146 | =========
1147 | 32.0
1148 | 32
1149 | =========
1150 | 16
1151 | 16
1152 | =========
1153 | 80.0
1154 | 80
1155 | =========
1156 | 36
1157 | 36
1158 | =========
1159 | 88.0
1160 | 88
1161 | =========
1162 | -12
1163 | 6
1164 | =========
1165 | 12.0
1166 | 12
1167 | =========
1168 | 15
1169 | 15
1170 | =========
1171 | 34.0
1172 | 34
1173 | =========
1174 | 27.27272727272727
1175 | 20
1176 | =========
1177 | 56
1178 | 92
1179 | =========
1180 | 38
1181 | 38
1182 | =========
1183 | 3
1184 | 3
1185 | =========
1186 | 25
1187 | 25
1188 | =========
1189 | 168
1190 | 168
1191 | =========
1192 | 12
1193 | 12
1194 | =========
1195 | 48
1196 | 48
1197 | =========
1198 | 14400
1199 | 14400
1200 | =========
1201 | 4
1202 | 4
1203 | =========
1204 | 135000
1205 | 81
1206 | =========
1207 | 22.0
1208 | 22
1209 | =========
1210 | 200.0
1211 | 50
1212 | =========
1213 | 150.0
1214 | 200
1215 | =========
1216 | 2000.0
1217 | 2000
1218 | =========
1219 | -40.0
1220 | 20
1221 | =========
1222 | 42000.0
1223 | 168000
1224 | =========
1225 | 1.5
1226 | 3
1227 | =========
1228 | 1080.0
1229 | 1110
1230 | =========
1231 | 5
1232 | 5
1233 | =========
1234 | 2500
1235 | 25
1236 | =========
1237 | 24.0
1238 | 56
1239 | =========
1240 | 350.0
1241 | 350
1242 | =========
1243 | 3140.0
1244 | 3140
1245 | =========
1246 | 40.0
1247 | 40
1248 | =========
1249 | 3000
1250 | 3000
1251 | =========
1252 | 17000
1253 | 17000
1254 | =========
1255 | 12.0
1256 | 12
1257 | =========
1258 | 312
1259 | 284
1260 | =========
1261 | -6.0
1262 | 8
1263 | =========
1264 | 570
1265 | 570
1266 | =========
1267 | 150
1268 | 150
1269 | =========
1270 | 11.0
1271 | 11
1272 | =========
1273 | 300
1274 | 150
1275 | =========
1276 | 22
1277 | 26
1278 | =========
1279 | 13
1280 | 13
1281 | =========
1282 | 132.0
1283 | 132
1284 | =========
1285 | 1
1286 | 1
1287 | =========
1288 | 30
1289 | 30
1290 | =========
1291 | 6.0
1292 | 6
1293 | =========
1294 | 5.0
1295 | 5
1296 | =========
1297 | 15.0
1298 | 15
1299 | =========
1300 | 7.0
1301 | 7
1302 | =========
1303 | 8
1304 | 2
1305 | =========
1306 | 11
1307 | 17
1308 | =========
1309 | 98.0
1310 | 98
1311 | =========
1312 | 80
1313 | 80
1314 | =========
1315 | 49.0
1316 | 49
1317 | =========
1318 | 0.0
1319 | 59
1320 | =========
1321 | 20.0
1322 | 20
1323 | =========
1324 | 6.0
1325 | 6
1326 | =========
1327 | 10
1328 | 2
1329 | =========
1330 | 5
1331 | 5
1332 | =========
1333 | 539
1334 | 539
1335 | =========
1336 | 112
1337 | 112
1338 | =========
1339 | 6
1340 | 4
1341 | =========
1342 | 11050.0
1343 | 11050
1344 | =========
1345 | 50.0
1346 | 50
1347 | =========
1348 | 6400
1349 | 6400
1350 | =========
1351 | 240
1352 | 150
1353 | =========
1354 | 1920.0
1355 | 1920
1356 | =========
1357 | 78
1358 | 78
1359 | =========
1360 | 45
1361 | 45
1362 | =========
1363 | 35.0
1364 | 35
1365 | =========
1366 | 2
1367 | 2
1368 | =========
1369 | 84
1370 | 84
1371 | =========
1372 | 9
1373 | 9
1374 | =========
1375 | 71.0
1376 | 71
1377 | =========
1378 | 18.0
1379 | 18
1380 | =========
1381 | -4
1382 | 6
1383 | =========
1384 | 30
1385 | 30
1386 | =========
1387 | 100.89999999999998
1388 | 1
1389 | =========
1390 | 1200
1391 | 1200
1392 | =========
1393 | 70
1394 | 120
1395 | =========
1396 | 4
1397 | 4
1398 | =========
1399 | 3
1400 | 3
1401 | =========
1402 | 80
1403 | 80
1404 | =========
1405 | 1
1406 | 6
1407 | =========
1408 | 10.0
1409 | 10
1410 | =========
1411 | 80
1412 | 80
1413 | =========
1414 | 80.0
1415 | 20
1416 | =========
1417 | 5
1418 | 5
1419 | =========
1420 | 20
1421 | 20
1422 | =========
1423 | 621
1424 | 621
1425 | =========
1426 | 15400.0
1427 | 15400
1428 | =========
1429 | 11.0
1430 | 11
1431 | =========
1432 | 84.0
1433 | 84
1434 | =========
1435 | 26
1436 | 26
1437 | =========
1438 | 40
1439 | 40
1440 | =========
1441 | 240.0
1442 | 240
1443 | =========
1444 | 220
1445 | 220
1446 | =========
1447 | 6.0
1448 | 6
1449 | =========
1450 | 4
1451 | 4
1452 | =========
1453 | 6
1454 | 6
1455 | =========
1456 | -10.0
1457 | -10
1458 | =========
1459 | -4
1460 | 4
1461 | =========
1462 | 16
1463 | 16
1464 | =========
1465 | 32
1466 | 32
1467 | =========
1468 | 100.0
1469 | 25
1470 | =========
1471 | 47.666666666666664
1472 | 21
1473 | =========
1474 | 200
1475 | 200
1476 | =========
1477 | 38
1478 | 38
1479 | =========
1480 | 224
1481 | 112
1482 | =========
1483 | 40
1484 | 40
1485 | =========
1486 | -9
1487 | 10
1488 | =========
1489 | 43
1490 | 16
1491 | =========
1492 | 273.0
1493 | 273
1494 | =========
1495 | 45
1496 | 26
1497 | =========
1498 | 18
1499 | 18
1500 | =========
1501 | 2800
1502 | 1600
1503 | =========
1504 | 144.0
1505 | 144
1506 | =========
1507 | 2.0
1508 | 2
1509 | =========
1510 | 120
1511 | 120
1512 | =========
1513 | 4
1514 | 4
1515 | =========
1516 | 1875.0
1517 | 525
1518 | =========
1519 | 110
1520 | 110
1521 | =========
1522 | 120.0
1523 | 120
1524 | =========
1525 | 300.0
1526 | 300
1527 | =========
1528 | 30000
1529 | 90000
1530 | =========
1531 | 288
1532 | 160
1533 | =========
1534 | 375
1535 | 375
1536 | =========
1537 | 18
1538 | 18
1539 | =========
1540 | 32.0
1541 | 32
1542 | =========
1543 | 280
1544 | 280
1545 | =========
1546 | 63
1547 | 63
1548 | =========
1549 | 39
1550 | 39
1551 | =========
1552 | 29
1553 | 29
1554 | =========
1555 | 74
1556 | 74
1557 | =========
1558 | 9
1559 | 9
1560 | =========
1561 | 36
1562 | 12
1563 | =========
1564 | -21.0
1565 | 21
1566 | =========
1567 | 2.0
1568 | 48
1569 | =========
1570 | 78
1571 | 172
1572 | =========
1573 | 4
1574 | 11
1575 | =========
1576 | 48
1577 | 36
1578 | =========
1579 | 122
1580 | 66
1581 | =========
1582 | 19
1583 | 25
1584 | =========
1585 | 300.0
1586 | 300
1587 | =========
1588 | 60.0
1589 | 300
1590 | =========
1591 | 16
1592 | 16
1593 | =========
1594 | 8
1595 | 8
1596 | =========
1597 | 188
1598 | 188
1599 | =========
1600 | 18
1601 | 18
1602 | =========
1603 | 60.0
1604 | 35
1605 | =========
1606 | 39.0
1607 | 39
1608 | =========
1609 | 150.0
1610 | 50
1611 | =========
1612 | 7.0
1613 | 7
1614 | =========
1615 | 6.0
1616 | 6
1617 | =========
1618 | 20
1619 | 80
1620 | =========
1621 | 30
1622 | 30
1623 | =========
1624 | 130.0
1625 | 130
1626 | =========
1627 | 81
1628 | 81
1629 | =========
1630 | 100
1631 | 100
1632 | =========
1633 | 400
1634 | 398
1635 | =========
1636 | 27.0
1637 | 27
1638 | =========
1639 | 17.0
1640 | 17
1641 | =========
1642 | 550.0
1643 | 450
1644 | =========
1645 | 92
1646 | 92
1647 | =========
1648 | 54
1649 | 54
1650 | =========
1651 | 2.0
1652 | 2
1653 | =========
1654 | 160
1655 | 160
1656 | =========
1657 | 70
1658 | 70
1659 | =========
1660 | 3.0
1661 | 3
1662 | =========
1663 | 1.0
1664 | 16
1665 | =========
1666 | 90
1667 | 45
1668 | =========
1669 | 180.0
1670 | 180
1671 | =========
1672 | 82
1673 | 82
1674 | =========
1675 | 12
1676 | 12
1677 | =========
1678 | 120.0
1679 | 240
1680 | =========
1681 | 5
1682 | 5
1683 | =========
1684 | 59.0
1685 | 10
1686 | =========
1687 | 8.5
1688 | 9
1689 | =========
1690 | 175
1691 | 175
1692 | =========
1693 | 21
1694 | 21
1695 | =========
1696 | 15
1697 | 23
1698 | =========
1699 | 308
1700 | 308
1701 | =========
1702 | 220
1703 | 100
1704 | =========
1705 | 600
1706 | 600
1707 | =========
1708 | 37.0
1709 | 37
1710 | =========
1711 | 36.0
1712 | 36
1713 | =========
1714 | 11232
1715 | 11232
1716 | =========
1717 | 40.0
1718 | 40
1719 | =========
1720 | 48.0
1721 | 48
1722 | =========
1723 | 7
1724 | 7
1725 | =========
1726 | 500.0
1727 | 500
1728 | =========
1729 | 215
1730 | 215
1731 | =========
1732 | 129200.0
1733 | 129200
1734 | =========
1735 | 136
1736 | 120
1737 | =========
1738 | 1.0
1739 | 2
1740 | =========
1741 | 40
1742 | 40
1743 | =========
1744 | 800.0
1745 | 800
1746 | =========
1747 | 450
1748 | 30
1749 | =========
1750 | 52.0
1751 | 52
1752 | =========
1753 | 15.0
1754 | 15
1755 | =========
1756 | 318
1757 | 319
1758 | =========
1759 | 220.0
1760 | 220
1761 | =========
1762 | 1
1763 | 1
1764 | =========
1765 | 3
1766 | 3
1767 | =========
1768 | 42.0
1769 | 42
1770 | =========
1771 | 19
1772 | 13
1773 | =========
1774 | 293.3333333333333
1775 | 260
1776 | =========
1777 | 30.0
1778 | 90
1779 | =========
1780 | 69
1781 | 69
1782 | =========
1783 | 48
1784 | 48
1785 | =========
1786 | 10.0
1787 | 10
1788 | =========
1789 | 104
1790 | 104
1791 | =========
1792 | 5.0
1793 | 5
1794 | =========
1795 | 1260.0
1796 | 1800
1797 | =========
1798 | 12
1799 | 12
1800 | =========
1801 | 42.0
1802 | 42
1803 | =========
1804 | 3.0
1805 | 6
1806 | =========
1807 | 10.0
1808 | 10
1809 | =========
1810 | 8.0
1811 | 8
1812 | =========
1813 | 84.0
1814 | 7
1815 | =========
1816 | 65960
1817 | 65960
1818 | =========
1819 | 5500000
1820 | 1450000
1821 | =========
1822 | 30
1823 | 30
1824 | =========
1825 | 93000
1826 | 93000
1827 | =========
1828 | 312.0
1829 | 312
1830 | =========
1831 | 33.0
1832 | 33
1833 | =========
1834 | 10
1835 | 10
1836 | =========
1837 | 5.0
1838 | 5
1839 | =========
1840 | 36
1841 | 36
1842 | =========
1843 | 76
1844 | 76
1845 | =========
1846 | 509
1847 | 1509
1848 | =========
1849 | 3000
1850 | 3000
1851 | =========
1852 | 4
1853 | 7
1854 | =========
1855 | 8
1856 | 8
1857 | =========
1858 | 85
1859 | 85
1860 | =========
1861 | 160
1862 | 160
1863 | =========
1864 | 72.0
1865 | 72
1866 | =========
1867 | 50
1868 | 54
1869 | =========
1870 | 4
1871 | 4
1872 | =========
1873 | 17500.0
1874 | 17500
1875 | =========
1876 | 15
1877 | 10
1878 | =========
1879 | 4800.0
1880 | 4800
1881 | =========
1882 | 45
1883 | 45
1884 | =========
1885 | 5.0
1886 | 5
1887 | =========
1888 | 14
1889 | 14
1890 | =========
1891 | 4
1892 | 4
1893 | =========
1894 | 525.0
1895 | 1050
1896 | =========
1897 | 20
1898 | 17
1899 | =========
1900 | 12.0
1901 | 12
1902 | =========
1903 | 216
1904 | 216
1905 | =========
1906 | 43500.0
1907 | 43500
1908 | =========
1909 | 272000.0
1910 | 262500
1911 | =========
1912 | 10800.0
1913 | 10800
1914 | =========
1915 | 840
1916 | 840
1917 | =========
1918 | 29.0
1919 | 29
1920 | =========
1921 | 48
1922 | 48
1923 | =========
1924 | 79.0
1925 | 79
1926 | =========
1927 | 10.0
1928 | 10
1929 | =========
1930 | 156
1931 | 54
1932 | =========
1933 | 162000
1934 | 162000
1935 | =========
1936 | 140.0
1937 | 142
1938 | =========
1939 | 2100.0
1940 | 2100
1941 | =========
1942 | 75
1943 | 75
1944 | =========
1945 | 80
1946 | 80
1947 | =========
1948 | 2.0
1949 | 2
1950 | =========
1951 | 10
1952 | 10
1953 | =========
1954 | -10.0
1955 | 10
1956 | =========
1957 | 330000
1958 | 330000
1959 | =========
1960 | 120.0
1961 | 120
1962 | =========
1963 | 40.0
1964 | 3
1965 | =========
1966 | 7
1967 | 15
1968 | =========
1969 | 44.0
1970 | 44
1971 | =========
1972 | 7.0
1973 | 7
1974 | =========
1975 | 193
1976 | 193
1977 | =========
1978 | 32
1979 | 32
1980 | =========
1981 | 360.0
1982 | 360
1983 | =========
1984 | 120.0
1985 | 120
1986 | =========
1987 | 41
1988 | 53
1989 | =========
1990 | 3
1991 | 3
1992 | =========
1993 | 132.0
1994 | 132
1995 | =========
1996 | 4.0
1997 | 4
1998 | =========
1999 | 4.0
2000 | 4
2001 | =========
2002 | -5
2003 | 2
2004 | =========
2005 | 9.0
2006 | 9
2007 | =========
2008 | 12.0
2009 | 12
2010 | =========
2011 | 25
2012 | 33
2013 | =========
2014 | 240.0
2015 | 240
2016 | =========
2017 | 36
2018 | 36
2019 | =========
2020 | 120
2021 | 120
2022 | =========
2023 | 576.0
2024 | 576
2025 | =========
2026 | 20
2027 | 20
2028 | =========
2029 | 298
2030 | 298
2031 | =========
2032 | 80.0
2033 | 80
2034 | =========
2035 | 50.0
2036 | 50
2037 | =========
2038 | 11
2039 | 11
2040 | =========
2041 | 14
2042 | 14
2043 | =========
2044 | 80
2045 | 80
2046 | =========
2047 | 13
2048 | 13
2049 | =========
2050 | 100
2051 | 100
2052 | =========
2053 | 36.0
2054 | 7
2055 | =========
2056 | 3360.0
2057 | 5760
2058 | =========
2059 | 49.0
2060 | 25
2061 | =========
2062 | 32.0
2063 | 32
2064 | =========
2065 | 68
2066 | 68
2067 | =========
2068 | 2
2069 | 9
2070 | =========
2071 | 5
2072 | 5
2073 | =========
2074 | 145.0
2075 | 145
2076 | =========
2077 | 27
2078 | 27
2079 | =========
2080 | 720.0
2081 | 720
2082 | =========
2083 | 8.0
2084 | 8
2085 | =========
2086 | 135
2087 | 135
2088 | =========
2089 | 130
2090 | 200
2091 | =========
2092 | 2800
2093 | 2800
2094 | =========
2095 | 42
2096 | 50
2097 | =========
2098 | 50.0
2099 | 50
2100 | =========
2101 | 120.0
2102 | 120
2103 | =========
2104 | 1400.0
2105 | 9
2106 | =========
2107 | 8.0
2108 | 8
2109 | =========
2110 | 168
2111 | 168
2112 | =========
2113 | 3000.0
2114 | 3000
2115 | =========
2116 | 45.0
2117 | 45
2118 | =========
2119 | 3.0
2120 | 6
2121 | =========
2122 | 14
2123 | 14
2124 | =========
2125 | 576
2126 | 576
2127 | =========
2128 | 10.0
2129 | 10
2130 | =========
2131 | 385000.0
2132 | 385000
2133 | =========
2134 | 770
2135 | 770
2136 | =========
2137 | 5.0
2138 | 5
2139 | =========
2140 | 2
2141 | 2
2142 | =========
2143 | 175
2144 | 175
2145 | =========
2146 | 4
2147 | 4
2148 | =========
2149 | 1250
2150 | 2450
2151 | =========
2152 | 255
2153 | 255
2154 | =========
2155 | 160
2156 | 160
2157 | =========
2158 | 56.5
2159 | 18
2160 | =========
2161 | 50.0
2162 | 25
2163 | =========
2164 | 10.0
2165 | 10
2166 | =========
2167 | 140
2168 | 112
2169 | =========
2170 | 40.0
2171 | 40
2172 | =========
2173 | 1000
2174 | 1000
2175 | =========
2176 | 8.0
2177 | 8
2178 | =========
2179 | 10
2180 | 1
2181 | =========
2182 | 87
2183 | 87
2184 | =========
2185 | 6.666666666666667
2186 | 5
2187 | =========
2188 | 17
2189 | 17
2190 | =========
2191 | 50
2192 | 50
2193 | =========
2194 | 3
2195 | 3
2196 | =========
2197 | 2.0
2198 | 2
2199 | =========
2200 | 98
2201 | 98
2202 | =========
2203 | 23
2204 | 25
2205 | =========
2206 | 28
2207 | 28
2208 | =========
2209 | 24
2210 | 24
2211 | =========
2212 | 8
2213 | 8
2214 | =========
2215 | 4.0
2216 | 4
2217 | =========
2218 | 1100
2219 | 1100
2220 | =========
2221 | 28
2222 | 28
2223 | =========
2224 | 349.9999999999999
2225 | 350
2226 | =========
2227 | 336
2228 | 336
2229 | =========
2230 | 0
2231 | 3
2232 | =========
2233 | 1000
2234 | 4000
2235 | =========
2236 | 43
2237 | 43
2238 | =========
2239 | 300.0
2240 | 240
2241 | =========
2242 | 10
2243 | 6
2244 | 128
2245 | =========
2246 | 117.0
2247 | 89
2248 | =========
2249 | 7
2250 | 7
2251 | =========
2252 | 22
2253 | 22
2254 | =========
2255 | 75
2256 | 75
2257 | =========
2258 | 133.0
2259 | 133
2260 | =========
2261 | 60000
2262 | 60000
2263 | =========
2264 | 16
2265 | 16
2266 | =========
2267 | 27
2268 | 27
2269 | =========
2270 | 85.0
2271 | 85
2272 | =========
2273 | 125.0
2274 | 100
2275 | =========
2276 | 14
2277 | 14
2278 | =========
2279 | 490
2280 | 490
2281 | =========
2282 | 12
2283 | 12
2284 | =========
2285 | 60
2286 | 60
2287 | =========
2288 | 600.0
2289 | 675
2290 | =========
2291 | 90
2292 | 110
2293 | =========
2294 | 3
2295 | 4
2296 | =========
2297 | 50.0
2298 | 50
2299 | =========
2300 | 10
2301 | 10
2302 | =========
2303 | 10.0
2304 | 10
2305 | =========
2306 | 276
2307 | 276
2308 | =========
2309 | 800.0
2310 | 800
2311 | =========
2312 | 4400.0
2313 | 4400
2314 | =========
2315 | 38.0
2316 | 38
2317 | =========
2318 | 330.0
2319 | 255
2320 | =========
2321 | 215
2322 | 25
2323 | =========
2324 | 2.0
2325 | 17
2326 | =========
2327 | 35
2328 | 54
2329 | =========
2330 | -15
2331 | 4
2332 | =========
2333 | 15.0
2334 | 15
2335 | =========
2336 | 260.0
2337 | 155
2338 | =========
2339 | 142
2340 | 142
2341 | =========
2342 | 25
2343 | 25
2344 | =========
2345 | 100.0
2346 | 100
2347 | =========
2348 | -4
2349 | 4
2350 | =========
2351 | 12
2352 | 108
2353 | =========
2354 | 100
2355 | 100
2356 | =========
2357 | 75.0
2358 | 75
2359 | =========
2360 | 250.0
2361 | 250
2362 | =========
2363 | 32
2364 | 32
2365 | =========
2366 | 20.0
2367 | 20
2368 | =========
2369 | 1040000
2370 | 2880000
2371 | =========
2372 | 540
2373 | 540
2374 | =========
2375 | 20.0
2376 | 20
2377 | =========
2378 | 4
2379 | 4
2380 | =========
2381 | 428.0
2382 | 428
2383 | =========
2384 | 3360
2385 | 1240
2386 | =========
2387 | 4
2388 | 6
2389 | =========
2390 | 9.0
2391 | 9
2392 | =========
2393 | 20
2394 | 20
2395 | =========
2396 | 1170.0
2397 | 1170
2398 | =========
2399 | 0.46
2400 | 70
2401 | =========
2402 | 4.0
2403 | 4
2404 | =========
2405 | 12.0
2406 | 12
2407 | =========
2408 | 50
2409 | 50
2410 | =========
2411 | 310
2412 | 310
2413 | =========
2414 | 60.0
2415 | 60
2416 | =========
2417 | 83
2418 | 79
2419 | =========
2420 | 7
2421 | 7
2422 | =========
2423 | 11.0
2424 | 11
2425 | =========
2426 | 5.714285714285714
2427 | 4
2428 | =========
2429 | 4500
2430 | 4500
2431 | =========
2432 | 15
2433 | 15
2434 | =========
2435 | 16
2436 | 16
2437 | =========
2438 | 6250.0
2439 | 6250
2440 | =========
2441 | 720.0
2442 | 720
2443 | =========
2444 | 35
2445 | 35
2446 | =========
2447 | 1260.0
2448 | 1260
2449 | =========
2450 | 18
2451 | 14
2452 | =========
2453 | 52
2454 | 52
2455 | =========
2456 | 12
2457 | 153
2458 | =========
2459 | 27
2460 | 27
2461 | =========
2462 | 11.0
2463 | 11
2464 | =========
2465 | 60
2466 | 60
2467 | =========
2468 | 14000.0
2469 | 14000
2470 | =========
2471 | 1128
2472 | 1128
2473 | =========
2474 | 294
2475 | 324
2476 | =========
2477 | 42.0
2478 | 42
2479 | =========
2480 | 2.5
2481 | 40
2482 | =========
2483 | 80
2484 | 80
2485 | =========
2486 | 0.1111111111111111
2487 | 48
2488 | =========
2489 | 140
2490 | 140
2491 | =========
2492 | 580
2493 | 120
2494 | =========
2495 | 15
2496 | 15
2497 | =========
2498 | 2
2499 | 2
2500 | =========
2501 | -4.0
2502 | 16
2503 | =========
2504 | 5600
2505 | 5600
2506 | =========
2507 | 10
2508 | 10
2509 | =========
2510 | 19
2511 | 19
2512 | =========
2513 | 210.0
2514 | 180
2515 | =========
2516 | 7
2517 | 12
2518 | =========
2519 | 11
2520 | 11
2521 | =========
2522 | 975.0
2523 | 975
2524 | =========
2525 | 10
2526 | 10
2527 | =========
2528 | 75.0
2529 | 75
2530 | =========
2531 | 70
2532 | 70
2533 | =========
2534 | 110.0
2535 | 110
2536 | =========
2537 | 75
2538 | 123
2539 | =========
2540 | 15.0
2541 | 15
2542 | =========
2543 | 144
2544 | 144
2545 | =========
2546 | 85/3
2547 | 13
2548 | =========
2549 | 6
2550 | 7
2551 | =========
2552 | 14000
2553 | 14000
2554 | =========
2555 | 2160
2556 | 3430
2557 | =========
2558 | 1520.0
2559 | 1520
2560 | =========
2561 | 3
2562 | 3
2563 | =========
2564 | 12
2565 | 30
2566 | =========
2567 | 40.0
2568 | 40
2569 | =========
2570 | 110
2571 | 110
2572 | =========
2573 | 80
2574 | 80
2575 | =========
2576 | 23
2577 | 23
2578 | =========
2579 | 15
2580 | 28
2581 | =========
2582 | 7
2583 | 7
2584 | =========
2585 | 15.0
2586 | 15
2587 | =========
2588 | 500
2589 | 500
2590 | =========
2591 | 40.0
2592 | 40
2593 | =========
2594 | 48
2595 | 48
2596 | =========
2597 | 13.0
2598 | 13
2599 | =========
2600 | 12.0
2601 | 12
2602 | =========
2603 | 132.0
2604 | 132
2605 | =========
2606 | 60
2607 | 60
2608 | =========
2609 | 41.0
2610 | 41
2611 | =========
2612 | 38.888888888888886
2613 | 7000
2614 | =========
2615 | 5.0
2616 | 5
2617 | =========
2618 | 575.0
2619 | 575
2620 | =========
2621 | 10.0
2622 | 10
2623 | =========
2624 | 24.0
2625 | 16
2626 | =========
2627 | 5.0
2628 | 5
2629 | =========
2630 | 25.0
2631 | 25
2632 | =========
2633 | 50
2634 | 50
2635 | =========
2636 | 750
2637 | 500
2638 | =========
2639 | 20.0
2640 | 20
2641 | =========
2642 | 34
2643 | 34
2644 | =========
2645 | 10.0
2646 | 10
2647 | =========
2648 | 15
2649 | 15
2650 | =========
2651 | 25.0
2652 | 25
2653 | =========
2654 | 55.0
2655 | 55
2656 | =========
2657 | 1.5
2658 | 1
2659 | =========
2660 | 480
2661 | 480
2662 | =========
2663 | 32
2664 | 26
2665 | =========
2666 | 74
2667 | 74
2668 | =========
2669 | 250
2670 | 250
2671 | =========
2672 | -4
2673 | 1
2674 | =========
2675 | 110.0
2676 | 110
2677 | =========
2678 | 16.0
2679 | 16
2680 | =========
2681 | 15
2682 | 15
2683 | =========
2684 | 0
2685 | 1
2686 | =========
2687 | 8.0
2688 | 8
2689 | =========
2690 | 7.666666666666666
2691 | 16
2692 | =========
2693 | 8.0
2694 | 8
2695 | =========
2696 | 5
2697 | 5
2698 | =========
2699 | 4 6
2700 | 10
2701 | =========
2702 | 16
2703 | 16
2704 | =========
2705 | 14
2706 | 14
2707 | =========
2708 | 38.0
2709 | 38
2710 | =========
2711 | 700
2712 | 700
2713 | =========
2714 | 64.0
2715 | 64
2716 | =========
2717 | 7
2718 | 6
2719 | =========
2720 | 6
2721 | 6
2722 | =========
2723 | 3
2724 | 3
2725 | =========
2726 | 23
2727 | 23
2728 | =========
2729 | 14.0
2730 | 14
2731 | =========
2732 | 12
2733 | 12
2734 | =========
2735 | 56
2736 | 56
2737 | =========
2738 | 90.00000000000001
2739 | 90
2740 | =========
2741 | 47
2742 | 47
2743 | =========
2744 | 4.0
2745 | 4
2746 | =========
2747 | 120
2748 | 60
2749 | =========
2750 | 2
2751 | 2
2752 | =========
2753 | 12
2754 | 12
2755 | =========
2756 | 2000.0
2757 | 2000
2758 | =========
2759 | -1
2760 | 1
2761 | =========
2762 | 85000.0
2763 | 85000
2764 | =========
2765 | 60.0
2766 | 60
2767 | =========
2768 | 60
2769 | 60
2770 | =========
2771 | 14
2772 | 14
2773 | =========
2774 | 21
2775 | 24
2776 | =========
2777 | 30
2778 | 15
2779 | =========
2780 | 410.0
2781 | 410
2782 | =========
2783 | 64800
2784 | 64800
2785 | =========
2786 | 250
2787 | 250
2788 | =========
2789 | 159
2790 | 159
2791 | =========
2792 | 4
2793 | 4
2794 | =========
2795 | 650
2796 | 650
2797 | =========
2798 | 280
2799 | 280
2800 | =========
2801 | -26042
2802 | 842
2803 | =========
2804 | 178.75
2805 | 205
2806 | =========
2807 | -110.0
2808 | 50
2809 | =========
2810 | 34.0
2811 | 34
2812 | =========
2813 | 17.0
2814 | 17
2815 | =========
2816 | 450
2817 | 450
2818 | =========
2819 | 13
2820 | 13
2821 | =========
2822 | 15.0
2823 | 15
2824 | =========
2825 | 42
2826 | 42
2827 | =========
2828 | -5
2829 | 5
2830 | =========
2831 | 150.0
2832 | 300
2833 | =========
2834 | 1440.0
2835 | 360
2836 | =========
2837 | 92
2838 | 452
2839 | =========
2840 | 34.0
2841 | 34
2842 | =========
2843 | 100.0
2844 | 100
2845 | =========
2846 | 3
2847 | 1
2848 | =========
2849 | 45
2850 | 45
2851 | =========
2852 | 38
2853 | 40
2854 | =========
2855 | 7.0
2856 | 7
2857 | =========
2858 | 87
2859 | 11
2860 | =========
2861 | 155
2862 | 225
2863 | =========
2864 | 600.0
2865 | 1000
2866 | =========
2867 | 200.0
2868 | 200
2869 | =========
2870 | 486
2871 | 374
2872 | =========
2873 | 48.0
2874 | 48
2875 | =========
2876 | 30.0
2877 | 30
2878 | =========
2879 | 227
2880 | 227
2881 | =========
2882 | 2100.0
2883 | 1800
2884 | =========
2885 | 33
2886 | 33
2887 | =========
2888 | 300
2889 | 100
2890 | =========
2891 | 195.0
2892 | 120
2893 | =========
2894 | 79
2895 | 79
2896 | =========
2897 | 5.0
2898 | 5
2899 | =========
2900 | 10
2901 | 20
2902 | =========
2903 | 465
2904 | 540
2905 | =========
2906 | 32
2907 | 4
2908 | =========
2909 | 160.0
2910 | 160
2911 | =========
2912 | 12.0
2913 | 50
2914 | =========
2915 | 90
2916 | 90
2917 | =========
2918 | 7.0
2919 | 7
2920 | =========
2921 | 12
2922 | 12
2923 | =========
2924 | 15.0
2925 | 15
2926 | =========
2927 | 376.0
2928 | 342
2929 | =========
2930 | 63.0
2931 | 63
2932 | =========
2933 | 2.8
2934 | 70
2935 | =========
2936 | 3.0
2937 | 3
2938 | =========
2939 | 6
2940 | 6
2941 | =========
2942 | 2.5
2943 | 45
2944 | =========
2945 | 22
2946 | 14
2947 | =========
2948 | -90.0
2949 | 15
2950 | =========
2951 | 96
2952 | 52
2953 | =========
2954 | 11
2955 | 11
2956 | =========
2957 | 2.0
2958 | 2
2959 | =========
2960 | 12
2961 | 12
2962 | =========
2963 | -13
2964 | 3
2965 | =========
2966 | 6000
2967 | 6600
2968 | =========
2969 | 50
2970 | 50
2971 | =========
2972 | 66.66666666666666
2973 | 25
2974 | =========
2975 | -32.25
2976 | 1
2977 | =========
2978 | 120
2979 | 2
2980 | =========
2981 | 8.0
2982 | 8
2983 | =========
2984 | 480.0
2985 | 480
2986 | =========
2987 | -8/3
2988 | 8
2989 | =========
2990 | 1490
2991 | 1490
2992 | =========
2993 | 826.0
2994 | 826
2995 | =========
2996 | 34
2997 | 34
2998 | =========
2999 | 230
3000 | 230
3001 | =========
3002 | 1875
3003 | 1875
3004 | =========
3005 | 5.0
3006 | 5
3007 | =========
3008 | 6000.0
3009 | 6000
3010 | =========
3011 | 36.14001293780645
3012 | 94
3013 | =========
3014 | 50.0
3015 | 2
3016 | =========
3017 | 0
3018 | 3
3019 | =========
3020 | 78.0
3021 | 78
3022 | =========
3023 | 139
3024 | 138
3025 | =========
3026 | 27 18
3027 | 45
3028 | =========
3029 | 60.0
3030 | 60
3031 | =========
3032 | 98
3033 | 98
3034 | =========
3035 | 22
3036 | 22
3037 | =========
3038 | 36
3039 | 36
3040 | =========
3041 | 15
3042 | 12
3043 | =========
3044 | 77
3045 | 77
3046 | =========
3047 | 300
3048 | 300
3049 | =========
3050 | 30
3051 | 30
3052 | =========
3053 | 43200.0
3054 | 43200
3055 | =========
3056 | 6.0
3057 | 12
3058 | =========
3059 | 200.0
3060 | 200
3061 | =========
3062 | 34
3063 | 34
3064 | =========
3065 | -10.000000000000009
3066 | 24
3067 | =========
3068 | 5.0
3069 | 5
3070 | =========
3071 | 450
3072 | 450
3073 | =========
3074 | 12.0
3075 | 2
3076 | =========
3077 | 134
3078 | 66
3079 | =========
3080 | 40.0
3081 | 35
3082 | =========
3083 | 10
3084 | 10
3085 | =========
3086 | 10
3087 | 10
3088 | =========
3089 | 1.0
3090 | 4
3091 | =========
3092 | 80
3093 | 160
3094 | =========
3095 | 736.0
3096 | 736
3097 | =========
3098 | 101.0
3099 | 101
3100 | =========
3101 | -9.166666666666666
3102 | 3
3103 | =========
3104 | 130000
3105 | 130000
3106 | =========
3107 | 2.666666666666667
3108 | 1
3109 | =========
3110 | 1840/3
3111 | 420
3112 | =========
3113 | 1.96875
3114 | 189
3115 | =========
3116 | 46/5
3117 | 10
3118 | =========
3119 | 6154.112798000001
3120 | 7400
3121 | =========
3122 | 20
3123 | 20
3124 | =========
3125 | 655
3126 | 655
3127 | =========
3128 | 15.0
3129 | 15
3130 | =========
3131 | 110
3132 | 110
3133 | =========
3134 | 55.0
3135 | 55
3136 | =========
3137 | 2400.0
3138 | 2400
3139 | =========
3140 | 2304.0
3141 | 2304
3142 | =========
3143 | 3
3144 | 156
3145 | =========
3146 | 24.0
3147 | 24
3148 | =========
3149 | 250
3150 | 250
3151 | =========
3152 | 2.0
3153 | 2
3154 | =========
3155 | 31
3156 | 31
3157 | =========
3158 | 58.0
3159 | 58
3160 | =========
3161 | 482.0
3162 | 482
3163 | =========
3164 | 320
3165 | 320
3166 | =========
3167 | 247
3168 | 247
3169 | =========
3170 | 95.0
3171 | 95
3172 | =========
3173 | 14.0
3174 | 14
3175 | =========
3176 | 245.0
3177 | 245
3178 | =========
3179 | 24
3180 | 24
3181 | =========
3182 | 300.0
3183 | 300
3184 | =========
3185 | 12.0
3186 | 18
3187 | =========
3188 | 251
3189 | 251
3190 | =========
3191 | 25.0
3192 | 85
3193 | =========
3194 | 30.0
3195 | 21
3196 | =========
3197 | 750.0
3198 | 750
3199 | =========
3200 | -20
3201 | 16
3202 | =========
3203 | 162.0
3204 | 162
3205 | =========
3206 | 160.0
3207 | 145
3208 | =========
3209 | 8.0
3210 | 8
3211 | =========
3212 | 10.0
3213 | 10
3214 | =========
3215 | 1200
3216 | 72000
3217 | =========
3218 | 195
3219 | 195
3220 | =========
3221 | 2.0
3222 | 2
3223 | =========
3224 | 2.0
3225 | 2
3226 | =========
3227 | 60
3228 | 20
3229 | =========
3230 | 26.0
3231 | 26
3232 | =========
3233 | 131250.0
3234 | 131250
3235 | =========
3236 | 48
3237 | 12
3238 | =========
3239 | 22.833333333333332
3240 | 30
3241 | =========
3242 | 32.0
3243 | 32
3244 | =========
3245 | 72
3246 | 72
3247 | =========
3248 | 1000.0
3249 | 1000
3250 | =========
3251 | 2160
3252 | 1080
3253 | =========
3254 | 144.0
3255 | 144
3256 | =========
3257 | 33.33333333333333
3258 | 25
3259 | =========
3260 | 270.0
3261 | 270
3262 | =========
3263 | 148.8
3264 | 240
3265 | =========
3266 | 480
3267 | 480
3268 | =========
3269 | 30
3270 | 30
3271 | =========
3272 | 2.0
3273 | 2
3274 | =========
3275 | 3.75
3276 | 5
3277 | =========
3278 | 16.0
3279 | 16
3280 | =========
3281 | 77.0
3282 | 113
3283 | =========
3284 | 90.0
3285 | 90
3286 | =========
3287 | 24
3288 | 24
3289 | =========
3290 | 60.0
3291 | 40
3292 | =========
3293 | 5.0
3294 | 5
3295 | =========
3296 | 400.0
3297 | 360
3298 | =========
3299 | 38
3300 | 38
3301 | =========
3302 | 3.0
3303 | 3
3304 | =========
3305 | 185.0
3306 | 60
3307 | =========
3308 | 157
3309 | 157
3310 | =========
3311 | 5.0
3312 | 5
3313 | =========
3314 | -3
3315 | -3
3316 | =========
3317 | 8
3318 | 8
3319 | =========
3320 | 5.0
3321 | 5
3322 | =========
3323 | 60.0
3324 | 60
3325 | =========
3326 | 9.0
3327 | 9
3328 | =========
3329 | 10
3330 | 5
3331 | =========
3332 | 18
3333 | 18
3334 | =========
3335 | 560
3336 | 560
3337 | =========
3338 | 35.0
3339 | 35
3340 | =========
3341 | 20.0
3342 | 18
3343 | =========
3344 | 105.0
3345 | 105
3346 | =========
3347 | 64
3348 | 64
3349 | =========
3350 | 90
3351 | 90
3352 | =========
3353 | 50.0
3354 | 50
3355 | =========
3356 | 750.0
3357 | 750
3358 | =========
3359 | 9
3360 | 9
3361 | =========
3362 | 25.0
3363 | 25
3364 | =========
3365 | 96.0
3366 | 96
3367 | =========
3368 | 45000
3369 | 45000
3370 | =========
3371 | 2.5
3372 | 50
3373 | =========
3374 | 7.0
3375 | 7
3376 | =========
3377 | 32
3378 | 32
3379 | =========
3380 | 26
3381 | 26
3382 | =========
3383 | 40
3384 | 68
3385 | =========
3386 | 700
3387 | 700
3388 | =========
3389 | 1.0
3390 | 1
3391 | =========
3392 | 56
3393 | 27
3394 | =========
3395 | 15.0
3396 | 20
3397 | =========
3398 | 9
3399 | 9
3400 | =========
3401 | 300
3402 | 300
3403 | =========
3404 | 17
3405 | 34
3406 | =========
3407 | 60.0
3408 | 291
3409 | =========
3410 | 16.0
3411 | 16
3412 | =========
3413 | 7.222222222222222
3414 | 22
3415 | =========
3416 | 9.0
3417 | 9
3418 | =========
3419 | 93
3420 | 93
3421 | =========
3422 | 21
3423 | 21
3424 | =========
3425 | 50.0
3426 | 50
3427 | =========
3428 | 12.000000000000002
3429 | 12
3430 | =========
3431 | 20.0
3432 | 20
3433 | =========
3434 | 30
3435 | 30
3436 | =========
3437 | 13.0
3438 | 13
3439 | =========
3440 | 120
3441 | 120
3442 | =========
3443 | 3.0
3444 | 3
3445 | =========
3446 | 22995800
3447 | 7300
3448 | =========
3449 | 50.0
3450 | 50
3451 | =========
3452 | 1125
3453 | 1125
3454 | =========
3455 | 2660.0
3456 | 170
3457 | =========
3458 | 3.0
3459 | 3
3460 | =========
3461 | 12
3462 | 12
3463 | =========
3464 | 9
3465 | 9
3466 | =========
3467 | 1248.0
3468 | 1248
3469 | =========
3470 | 2750
3471 | 2350
3472 | =========
3473 | 120.0
3474 | 120
3475 | =========
3476 | 148/9
3477 | 20
3478 | =========
3479 | 3.0
3480 | 2
3481 | =========
3482 | 3.0
3483 | 2
3484 | =========
3485 | 3160
3486 | 3160
3487 | =========
3488 | 48
3489 | 93
3490 | =========
3491 | 10.0
3492 | 10
3493 | =========
3494 | 240
3495 | 240
3496 | =========
3497 | 16.0
3498 | 16
3499 | =========
3500 | 13
3501 | 2
3502 | =========
3503 | 17
3504 | 17
3505 | =========
3506 | 17
3507 | 17
3508 | =========
3509 | 50.0
3510 | 50
3511 | =========
3512 | 5600.0
3513 | 5600
3514 | =========
3515 | 18
3516 | 1800
3517 | =========
3518 | 13
3519 | 11
3520 | =========
3521 | 306.0
3522 | 306
3523 | =========
3524 | 12
3525 | 6
3526 | =========
3527 | 19
3528 | 19
3529 | =========
3530 | 108.0
3531 | 5
3532 | =========
3533 | 14.0
3534 | 24
3535 | =========
3536 | 6
3537 | 6
3538 | =========
3539 | -8.5
3540 | 19
3541 | =========
3542 | -20
3543 | 100
3544 | =========
3545 | 280.0
3546 | 280
3547 | =========
3548 | 9.0
3549 | 9
3550 | =========
3551 | 1200
3552 | 1200
3553 | =========
3554 | 320
3555 | 320
3556 | =========
3557 | 75
3558 | 75
3559 | =========
3560 | 780.0
3561 | 2400
3562 | =========
3563 | 140.0
3564 | 140
3565 | =========
3566 | 2.6666666666666665
3567 | 2
3568 | =========
3569 | 8
3570 | 8
3571 | =========
3572 | 42
3573 | 42
3574 | =========
3575 | 19
3576 | 19
3577 | =========
3578 | 240
3579 | 240
3580 | =========
3581 | 168
3582 | 168
3583 | =========
3584 | 4
3585 | 4
3586 | =========
3587 | 40000.0
3588 | 40000
3589 | =========
3590 | 64.0
3591 | 64
3592 | =========
3593 | 27.0
3594 | 27
3595 | =========
3596 | 29
3597 | 29
3598 | =========
3599 | 288
3600 | 288
3601 | =========
3602 | 448
3603 | 448
3604 | =========
3605 | 150.0
3606 | 150
3607 | =========
3608 | 281.0
3609 | 31
3610 | =========
3611 | 5
3612 | 5
3613 | =========
3614 | 450.0
3615 | 36
3616 | =========
3617 | 110.0
3618 | 20
3619 | =========
3620 | 50.7
3621 | 75
3622 | =========
3623 | 225
3624 | 225
3625 | =========
3626 | 100
3627 | 100
3628 | =========
3629 | 8
3630 | 32
3631 | =========
3632 | 10.0
3633 | 10
3634 | =========
3635 | 350
3636 | 350
3637 | =========
3638 | 7.2
3639 | 8
3640 | =========
3641 | 5
3642 | 5
3643 | =========
3644 | 2
3645 | 3
3646 | =========
3647 | 84.0
3648 | 90
3649 | =========
3650 | 66
3651 | 66
3652 | =========
3653 | 31
3654 | 31
3655 | =========
3656 | 36.0
3657 | 36
3658 | =========
3659 | 440.0
3660 | 440
3661 | =========
3662 | 70.0
3663 | 70
3664 | =========
3665 | 15
3666 | 15
3667 | =========
3668 | 81.0
3669 | 81
3670 | =========
3671 | 12
3672 | 12
3673 | =========
3674 | 2.5
3675 | 60
3676 | =========
3677 | 3.0
3678 | 84
3679 | =========
3680 | 78
3681 | 78
3682 | =========
3683 | 520.0
3684 | 520
3685 | =========
3686 | 3000.0
3687 | 50
3688 | =========
3689 | 2
3690 | 2
3691 | =========
3692 | 8.0
3693 | 8
3694 | =========
3695 | 20
3696 | 20
3697 | =========
3698 | 50.0
3699 | 50
3700 | =========
3701 | 35
3702 | 35
3703 | =========
3704 | 68/3
3705 | 96
3706 | =========
3707 | 3360
3708 | 3360
3709 | =========
3710 | 7
3711 | 7
3712 | =========
3713 | 750
3714 | 750
3715 | =========
3716 | 40.0
3717 | 56
3718 | =========
3719 | 22.0
3720 | 22
3721 | =========
3722 | 30.0
3723 | 30
3724 | =========
3725 | 40.0
3726 | 70
3727 | =========
3728 | 120
3729 | 120
3730 | =========
3731 | 30
3732 | 30
3733 | =========
3734 | 12.0
3735 | 12
3736 | =========
3737 | 15.0
3738 | 15
3739 | =========
3740 | 14.0
3741 | 14
3742 | =========
3743 | 60.0
3744 | 60
3745 | =========
3746 | 600
3747 | 7200
3748 | =========
3749 | 5
3750 | 5
3751 | =========
3752 | 235.0
3753 | 235
3754 | =========
3755 | 12.0
3756 | 12
3757 | =========
3758 | 500
3759 | 500
3760 | =========
3761 | 210
3762 | 210
3763 | =========
3764 | 30
3765 | 36
3766 | =========
3767 | 147
3768 | 147
3769 | =========
3770 | 80
3771 | 40
3772 | =========
3773 | 20
3774 | 20
3775 | =========
3776 | 234
3777 | 54
3778 | =========
3779 | 1800.0
3780 | 3528
3781 | =========
3782 | 43
3783 | 43
3784 | =========
3785 | 136.0
3786 | 296
3787 | =========
3788 | 31
3789 | 27
3790 | =========
3791 | 26
3792 | 38
3793 | =========
3794 | 16.0
3795 | 16
3796 | =========
3797 | 70
3798 | 70
3799 | =========
3800 | 48.0
3801 | 48
3802 | =========
3803 | 665
3804 | 665
3805 | =========
3806 | 180.0
3807 | 180
3808 | =========
3809 | 7.0
3810 | 7
3811 | =========
3812 | 20
3813 | 20
3814 | =========
3815 | 12.0
3816 | 12
3817 | =========
3818 | 60
3819 | 60
3820 | =========
3821 | 25
3822 | 25
3823 | =========
3824 | 1218
3825 | 1218
3826 | =========
3827 | 105
3828 | 105
3829 | =========
3830 | 84
3831 | 84
3832 | =========
3833 | 1800
3834 | 34
3835 | =========
3836 | 101
3837 | 101
3838 | =========
3839 | 90.0
3840 | 90
3841 | =========
3842 | 27
3843 | 27
3844 | =========
3845 | 67
3846 | 67
3847 | =========
3848 | 140000
3849 | 140000
3850 | =========
3851 | 32.0
3852 | 36
3853 | =========
3854 | 2.0
3855 | 2
3856 | =========
3857 | 335
3858 | 335
3859 | =========
3860 | 60.0
3861 | 60
3862 | =========
3863 | 31.0
3864 | 31
3865 | =========
3866 | 13.0
3867 | 13
3868 | =========
3869 | 120
3870 | 120
3871 | =========
3872 | 23
3873 | 23
3874 | =========
3875 | 72.0
3876 | 72
3877 | =========
3878 | -2.0
3879 | 4
3880 | =========
3881 | 1000.0
3882 | 1000
3883 | =========
3884 | 2325
3885 | 2325
3886 | =========
3887 | 2
3888 | 2
3889 | =========
3890 | 8
3891 | 8
3892 | =========
3893 | 30
3894 | 30
3895 | =========
3896 | 2180
3897 | 2280
3898 | =========
3899 | 4.0
3900 | 64
3901 | =========
3902 | 594
3903 | 594
3904 | =========
3905 | 180
3906 | 180
3907 | =========
3908 | 2.0
3909 | 2
3910 | =========
3911 | 35
3912 | 8
3913 | =========
3914 | 5
3915 | 5
3916 | =========
3917 | 230.0
3918 | 230
3919 | =========
3920 | 10.0
3921 | 5
3922 | =========
3923 | 14.0
3924 | 14
3925 | =========
3926 |
--------------------------------------------------------------------------------