Merge pull request #70 from ananyas168/remove-stale-replica

removed the TRL-neurips folder
This commit is contained in:
josepablocam 2023-11-15 12:33:33 -05:00 коммит произвёл GitHub
Родитель 400f610ede 0dd42d6aea
Коммит 0c5c19d51b
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
28 изменённых файлов: 0 добавлений и 16739 удалений

Просмотреть файл

@ -1,119 +0,0 @@
ArbitraryColumnNames
| | a | G8DkqkuE | b8T | H |
|---:|:--------|-----------:|:------------|:----|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
ShuffleRows
| | Name | Age | City | Sex |
|---:|:--------|------:|:------------|:------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
TransposeTable
| | 0 | 1 | 2 |
|:-----|:---------|:------------|:--------|
| Name | Alice | Bob | Charlie |
| Age | 25 | 30 | 22 |
| City | New York | Los Angeles | Chicago |
| Sex | F | M | M |
ColumnCluster
| | Name-----Age | City-----Sex |
|---:|:---------------|:------------------|
| 0 | Alice-----25 | New York-----F |
| 1 | Bob-----30 | Los Angeles-----M |
| 2 | Charlie-----22 | Chicago-----M |
SerializeTable
Name:Alice,Age:25,City:New York,Sex:F
Name:Bob,Age:30,City:Los Angeles,Sex:M
Name:Charlie,Age:22,City:Chicago,Sex:M
ShuffleColumns
| | Age | Sex | Name | City |
|---:|------:|:------|:--------|:------------|
| 0 | 25 | F | Alice | New York |
| 1 | 30 | M | Bob | Los Angeles |
| 2 | 22 | M | Charlie | Chicago |
ShuffleColumnNames
| | Age | Sex | Name | City |
|---:|:--------|------:|:------------|:-------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
OriginalData
| | Name | Age | City | Sex |
|---:|:--------|------:|:------------|:------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
SequentialColumnNames
| | col_0 | col_1 | col_2 | col_3 |
|---:|:--------|--------:|:------------|:--------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
| | |
|---:|:---------------------------------------|
| 0 | Name:Alice,Age:25,City:New York,Sex:F |
| 1 | Name:Bob,Age:30,City:Los Angeles,Sex:M |
| 2 | Name:Charlie,Age:22,City:Chicago,Sex:M |
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>Name</th>
<th>Age</th>
<th>City</th>
<th>Sex</th>
</tr>
</thead>
<tbody>
<tr>
<th>0</th>
<td>Alice</td>
<td>25</td>
<td>New York</td>
<td>F</td>
</tr>
<tr>
<th>1</th>
<td>Bob</td>
<td>30</td>
<td>Los Angeles</td>
<td>M</td>
</tr>
<tr>
<th>2</th>
<td>Charlie</td>
<td>22</td>
<td>Chicago</td>
<td>M</td>
</tr>
</tbody>
</table><table border="1" class="dataframe"> <thead> <tr style="text-align: right;"><th></th><th>Name</th><th>Age</th><th>City</th><th>Sex</th> </tr> </thead> <tbody> <tr><th>0</th><td>Alice</td><td>25</td><td>New York</td><td>F</td> </tr> <tr><th>1</th><td>Bob</td><td>30</td><td>Los Angeles</td><td>M</td> </tr> <tr><th>2</th><td>Charlie</td><td>22</td><td>Chicago</td><td>M</td> </tr> </tbody></table>

Просмотреть файл

@ -1,119 +0,0 @@
ArbitraryColumnNames
| | a | G8DkqkuE | b8T | H |
|---:|:--------|-----------:|:------------|:----|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
ShuffleRows
| | Name | Age | City | Sex |
|---:|:--------|------:|:------------|:------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
TransposeTable
| | 0 | 1 | 2 |
|:-----|:---------|:------------|:--------|
| Name | Alice | Bob | Charlie |
| Age | 25 | 30 | 22 |
| City | New York | Los Angeles | Chicago |
| Sex | F | M | M |
ColumnCluster
| | Name-----Age | City-----Sex |
|---:|:---------------|:------------------|
| 0 | Alice-----25 | New York-----F |
| 1 | Bob-----30 | Los Angeles-----M |
| 2 | Charlie-----22 | Chicago-----M |
SerializeTable
Name:Alice,Age:25,City:New York,Sex:F
Name:Bob,Age:30,City:Los Angeles,Sex:M
Name:Charlie,Age:22,City:Chicago,Sex:M
ShuffleColumns
| | Age | Sex | Name | City |
|---:|------:|:------|:--------|:------------|
| 0 | 25 | F | Alice | New York |
| 1 | 30 | M | Bob | Los Angeles |
| 2 | 22 | M | Charlie | Chicago |
ShuffleColumnNames
| | Age | Sex | Name | City |
|---:|:--------|------:|:------------|:-------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
OriginalData
| | Name | Age | City | Sex |
|---:|:--------|------:|:------------|:------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
SequentialColumnNames
| | col_0 | col_1 | col_2 | col_3 |
|---:|:--------|--------:|:------------|:--------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
| | |
|---:|:---------------------------------------|
| 0 | Name:Alice,Age:25,City:New York,Sex:F |
| 1 | Name:Bob,Age:30,City:Los Angeles,Sex:M |
| 2 | Name:Charlie,Age:22,City:Chicago,Sex:M |
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>Name</th>
<th>Age</th>
<th>City</th>
<th>Sex</th>
</tr>
</thead>
<tbody>
<tr>
<th>0</th>
<td>Alice</td>
<td>25</td>
<td>New York</td>
<td>F</td>
</tr>
<tr>
<th>1</th>
<td>Bob</td>
<td>30</td>
<td>Los Angeles</td>
<td>M</td>
</tr>
<tr>
<th>2</th>
<td>Charlie</td>
<td>22</td>
<td>Chicago</td>
<td>M</td>
</tr>
</tbody>
</table><table border="1" class="dataframe"> <thead> <tr style="text-align: right;"><th></th><th>Name</th><th>Age</th><th>City</th><th>Sex</th> </tr> </thead> <tbody> <tr><th>0</th><td>Alice</td><td>25</td><td>New York</td><td>F</td> </tr> <tr><th>1</th><td>Bob</td><td>30</td><td>Los Angeles</td><td>M</td> </tr> <tr><th>2</th><td>Charlie</td><td>22</td><td>Chicago</td><td>M</td> </tr> </tbody></table>

Просмотреть файл

@ -1,63 +0,0 @@
| | Name | Age | City | Sex |
|---:|:----------|------:|:--------------|:------|
| 0 | Alice | 25 | New York | F |
| 1 | Bob | 30 | Los Angeles | M |
| 2 | Charlie | 22 | Chicago | M |
| 3 | David | 28 | Boston | M |
| 4 | Emily | 35 | San Francisco | F |
| 5 | Frank | 29 | Dallas | M |
| 6 | Grace | 27 | Miami | F |
| 7 | Henry | 32 | Seattle | M |
| 8 | Ivy | 24 | Denver | F |
| 9 | Jack | 33 | Houston | M |
| 10 | Katherine | 26 | Atlanta | F |
| 11 | Liam | 31 | Phoenix | M |
| 12 | Mia | 36 | Philadelphia | F |
| 13 | Noah | 23 | San Diego | M |
| 14 | Olivia | 29 | Austin | F |
NavigationTests
What value is at row 14 and column Name?
What value is at row 7 and column City?
What value is at row 4 and column Name?
ColumnLookupTests
What column is the Olivia in?
What column is the Seattle in?
What column is the Emily in?
RowLookupTests
What row is the Olivia in?
What row is the Seattle in?
What row is the Emily in?
DataTypeLookupTests
What type (using Pandas datatype notation) is column Age?
What type (using Pandas datatype notation) is column City?
What type (using Pandas datatype notation) is column Sex?
TableColumnReorderTests
Can you reorder the table such that the column are in this new order ['Sex', 'City', 'Age', 'Name']? Make sure to return the complete reordered table.
Can you reorder the table such that the column are in this new order ['Name', 'Sex', 'City', 'Age']? Make sure to return the complete reordered table.
Can you reorder the table such that the column are in this new order ['Sex', 'Name', 'Age', 'City']? Make sure to return the complete reordered table.
TableReconstructionTests
Can you reconstruct the table by deserializing the table above?
TableTransposeTests
Can you transpose the table?

Просмотреть файл

@ -1,60 +0,0 @@
# General Guideline
This folder contains evaluation code and results associated with the paper[**Tabular Representation, Noisy Operators, and Impacts on Table Structure Understanding Tasks in LLMs**](https://openreview.net/forum?id=Ld5UCpiT07) to appear at
[TRL@NeurIPS 2023](https://table-representation-learning.github.io/)
## Setup
Assuming you want to setup inside a Python virtual environment.
```powershell
python -m virtualenv venv/
venv/scripts/activate
pip install -r requirements.txt
```
You should have your OPENAI key as an environment variable, because we use the key to access the GPT3.5 `text-davinci-003 model`
```
$env:OPENAI_API_KEY=...
```
## Resources
Inside the `resources` folder you will find `\BenchmarkDataset` subfolder which consists of seven datasets from kaggle. These seven datasets are used as benchmarks to evaluate LLM's understanding through our self-supervised evalution setup.
In order to access the Benchmark results that we obtained, we request you to download the `BenchmarkResults.zip` from [TodO](). Make sure to unzip the file at root of `TRL-neurips\resources\` folder for sucessful code runs.
## Code Usage Guideline
In the paper we talk introduce Self-Supervised Structural Tasks to evaluate the performance of the LLM.
For the fact-finding task you can refer the `.\code_\tableTestingMicroScript.py`. A notebook `.\notebooks\MicroTableTesting.ipynb` is also implemented to how our fact-finding evaluation suit is used.
For the table-transformation task you can refer the `code_\tableTestingMacroScript.py`. A notebook `.\notebooks\MacroTableTesting.ipynb` is also implemented to how our table-transformation evaluation suit is used.
## Metrics
### Fact-finding tasks
We report the average pass@1 and p-value for the fact-finding tasks.
`.\notebooks\MicroTestMetricsCalculation.ipynb` takes the log file generated by the `.\notebooks\MicroTableTesting.ipynb`file and does some pre-requite metric calculation and finally generate the final csv files.
The final csv files are then used by `.\notebooks\MicroTestResults.ipynb` to provide the pass@1 scores on the Original data across all the fact-finding tasks, along with p-value and the average pass@1 delta from original to noisy operation.
### Table-transformation tasks
We report the average F1 score and p-value for the table-transformation tasks.
`.\notebooks\MacroTestMetricsCalculation.ipynb` takes the log file generated by the `.\notebooks\MacroTableTesting.ipynb` file and does some pre-requite metric calculation and finally generate the final csv files.
The final csv files are then used by `.\notebooks\MacroTestResults_RQ1.ipynb` to provide the F1 scores on the Original data across all the fact-finding tasks, along with p-value. Whereas `.\notebooks\MacroTestResults_RQ2.ipynb` reports average F1 score delta from original to noisy for transformation tasks along with the p-values.
# Citation
If you find our work useful in your research, please consider citing the paper:
@inproceedings{singha2023tabular,
title={Tabular Representation, Noisy Operators, and Impacts on Table Structure Understanding Tasks in LLMs},
author={Singha, Ananya and Cambronero, Jos{\'e} and Gulwani, Sumit and Le, Vu and Parnin, Chris},
booktitle={NeurIPS 2023 Second Table Representation Learning Workshop},
year={2023}
}
# Contact
For any questions or issues, please submit repository issues or reach us via email at `t-asingha@microsoft.com`

Просмотреть файл

@ -1,57 +0,0 @@
import openai
import json
import time
import re
import os
from typing import List, Dict
from openai.embeddings_utils import get_embedding
DEFAULT_KEY = os.env("OPENAI_API_KEY")
def set_openai_key_attribute(open_api_key: str):
openai.api_type = "open_ai"
openai.api_base = "https://api.openai.com/v1"
openai.api_key = open_api_key
openai.api_version = None
def openapi_call_completions(prompt: str, modelName="text-davinci-003", temp=0.7, maxTok=500, num_n=1, open_api_key: str = DEFAULT_KEY):
set_openai_key_attribute(open_api_key)
max_attempts = 100000
attempt = 1
while attempt <= max_attempts:
try:
response = openai.Completion.create(
model=modelName,
prompt=prompt,
temperature=temp,
max_tokens=maxTok,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
n=num_n,
logprobs=1
)
generation_list_all = []
response = json.loads(str(response))
meta_info = {"prompt": prompt, "response": response}
for i in range(len(response["choices"])):
output = response["choices"][i]["text"].strip()
generation_list_all.append(output)
meta_info["generations"] = generation_list_all
return generation_list_all, meta_info
except openai.error.RateLimitError as e:
# Rate limit error occurred, wait for a while before retrying
wait_duration = 1 # Wait for 60 seconds
print(
f"Rate limit exceeded. Retrying in {wait_duration} seconds...")
time.sleep(wait_duration)
attempt += 1
return

Просмотреть файл

@ -1,178 +0,0 @@
from typing import Any, Union
import pandas as pd
import numpy as np
import datetime
def parse_cell_value(reference_cell: Any, other_cell: Any) -> Any:
"""
Parses other_cell as reference_cell type if other_cell is string
"""
if not isinstance(other_cell, str):
return other_cell
try:
if isinstance(reference_cell, bool):
# check bool first since bool isinstance of int as well
# tolerate capitalization differences for boolean
return other_cell.lower() == "true"
elif isinstance(reference_cell, int) or isinstance(reference_cell, float):
# always parse as float to be safe with decimal points
return float(other_cell)
elif isinstance(reference_cell, datetime.date):
# best guess parse
return pd.to_datetime(other_cell).date()
elif isinstance(reference_cell, datetime.datetime):
# best guess parse
return pd.to_datetime(other_cell).to_pydatetime()
else:
# fall back to str
return other_cell
except ValueError:
# fall back to str if fails to parse
return other_cell
def cells_are_equal(cell1: Any, cell2: Any) -> int:
try:
if isinstance(cell1, (bool, str, datetime.date, datetime.datetime, pd.Timestamp)):
return cell1 == cell2
elif isinstance(cell1, (float, int, np.number)):
return np.allclose(cell1, cell2)
elif np.isnan(cell1):
return np.isnan(cell2)
elif np.isinf(cell1):
return np.isinf(cell2) and (np.sign(cell1) == np.sign(cell2))
else:
# fall back
return cell1 == cell2
except:
# try again in case it was numpy issues
try:
return cell1 == cell2
except:
return False
def make_two_dim(obj):
if obj.ndim == 2:
return obj
if obj.ndim == 1:
return obj.reshape(-1, 1)
else:
raise Exception("Expect object with 1 or 2 dimensions")
def compare_per_cell(
reference_df: pd.DataFrame,
other_df: pd.DataFrame,
type_reference_df: pd.DataFrame = None,
count_header_and_index: bool = True,
return_fraction: bool = True,
):
"""
Per-cell comparison with respect to reference_df (i.e.
(other_df matches) / (reference_df cells)
).
We cast other_df cells to match the type in type_reference_df (default to
reference_df if not provided)
We treat header row and column indices as just other cells.
"""
if type_reference_df is None:
type_reference_df = reference_df
as_dataframe = isinstance(reference_df, pd.DataFrame)
if as_dataframe:
ref_vals = reference_df.reset_index(drop=True)
other_vals = other_df.reset_index(drop=True)
type_ref_vals = type_reference_df.reset_index(drop=True)
# iterate over reference dimensions
rows_range = range(ref_vals.shape[0])
cols_range = ref_vals.columns
def lookup_ij(df, i, j): return df.loc[i, j]
else:
ref_vals = make_two_dim(reference_df.values)
other_vals = make_two_dim(other_df.values)
type_ref_vals = make_two_dim(type_reference_df.values)
nrows, ncols = ref_vals.shape
rows_range = range(nrows)
cols_range = range(ncols)
def lookup_ij(mat, i, j): return mat[i, j]
success_ct = 0
total_ct = 0
for i in rows_range:
for j in cols_range:
# print(i,j)
# print(ref_vals.index)
ref_cell = lookup_ij(ref_vals, i, j)
total_ct += 1
try:
other_cell = lookup_ij(other_vals, i, j)
type_ref_cell = lookup_ij(type_ref_vals, i, j)
except (IndexError, KeyError):
# failed since out of bounds
# print("failed")
continue
other_cell_parsed = parse_cell_value(type_ref_cell, other_cell)
success_ct += int(cells_are_equal(ref_cell, other_cell_parsed))
if count_header_and_index:
# compare header
ref_header_row = reference_df.columns
other_header_row = other_df.columns
add_to_success, add_to_total = compare_per_cell(
ref_header_row, other_header_row, count_header_and_index=False, return_fraction=False)
total_ct += add_to_total
success_ct += add_to_success
# compare column index
ref_index_col = reference_df.index
other_index_col = other_df.index
add_to_success, add_to_total = compare_per_cell(
ref_index_col, other_index_col, count_header_and_index=False, return_fraction=False)
total_ct += add_to_total
success_ct += add_to_success
if return_fraction:
return float(success_ct) / float(total_ct)
else:
return success_ct, total_ct
if __name__ == "__main__":
data1 = {'A': [1, 2, 3],
'B': [True, False, True],
'C': ['apple', 'banana', 'cherry'],
'D': [datetime.date(2023, 1, 1), datetime.date(2023, 2, 2), datetime.date(2023, 3, 3)]}
df1 = pd.DataFrame(data1)
# Create the second DataFrame with overlapping values
data2 = {'A': [1, 5, 3],
'B': [True, True, False],
'C': ['apple', 'elephant', 'cherry'],
'D': [datetime.date(2023, 1, 1), datetime.date(2023, 2, 2), datetime.date(2023, 3, 3)]}
df2 = pd.DataFrame(data2)
# equal to self
assert compare_per_cell(df1, df1) == 1.0
assert compare_per_cell(df2, df2) == 1.0
# empty
assert compare_per_cell(df1, pd.DataFrame()) == 0.0
assert compare_per_cell(
df1, df1.iloc[:0], count_header_and_index=False) == 0.0
assert compare_per_cell(
df1.iloc[:0], df1, count_header_and_index=False, return_fraction=False) == (0, 0)
# both directions
assert compare_per_cell(df1, df2, type_reference_df=df1,
count_header_and_index=False, return_fraction=False) == (8, 12)
assert compare_per_cell(df2, df1, type_reference_df=df2,
count_header_and_index=False, return_fraction=False) == (8, 12)
# subsets
df1_subset = df1[["C", "D"]]
assert compare_per_cell(df1_subset, df2, type_reference_df=df1_subset,
count_header_and_index=False, return_fraction=False) == (5, 6)
assert compare_per_cell(df2, df1_subset, type_reference_df=df2,
count_header_and_index=False, return_fraction=False) == (5, 12)

Просмотреть файл

@ -1,57 +0,0 @@
from typing import Options
import os
import matplotlib.pyplot as plt
import numpy as np
def compute_ecdf(data):
"""Compute ECDF for a one-dimensional array of measurements."""
n = len(data)
x = np.sort(data)
y = np.arange(1, n+1) / n
return x, y
def ecdf_scatter_plot(attributes, save_path,pivot_table,name_save, type:str= Options['dots',"step"], figsize:tuple=(30,20), k:int=1,temperature:float=0.1):
"""With table formats as columns and test cases as row. ecdf plot across table manipulation for a specific temperature and metric """
# Define the number of subplots and their arrangement
x_list = [[test, "", temperature ] for test in attributes["TestCase"] ]
num_rows = len(x_list)
num_cols = 6
# Create a figure and a grid of subplots
fig, axes = plt.subplots(num_rows, num_cols, figsize=figsize)
# Loop through each subplot and plot the scatter plot
for i in range(num_rows):
for j in range(num_cols):
TableFormat = attributes["tableFormat"][j]
ax = axes[i][j]
# Filter data for each subplot based on data_labels
for manipulation in attributes["TableManipulation"]:
x= x_list[i]
x[1] = manipulation
x = tuple(x)
y = ( f'pass_{k}', TableFormat)
scores = pivot_table.loc[x][y]
scores_x, scores_y = compute_ecdf(scores)
if type == "dots":
ax.plot(scores_x, scores_y, label=f'{manipulation}',marker = ".", linestyle = "none")
else:
ax.step(scores_x, scores_y, label=f'{manipulation}')
name = f'Test: {x[0]}, metric: pass@{k}\nTemperature: {x[2]}, Format: {y[1]}'
ax.legend()
ax.set_title(name)
ax.set_xlabel('score (%)')
ax.set_ylabel('ecdf')
# Adjust layout to prevent overlapping labels
plt.tight_layout()
plt.savefig(os.path.join(save_path, name_save.replace(".csv", "_") +f"ecdf_{type}_plot.pdf"))
# Show the plot
plt.show()
plt.clf()

Просмотреть файл

@ -1,26 +0,0 @@
import pandas
import os
import numpy as np
from itertools import combinations
from typing import Any
def metric_pass_k(expected_answer, answer: Any, k: int) -> float:
if not isinstance(answer, list):
raise ValueError("Answer should be a list for pass@k metric.")
# Convert the test.expect to a list if it's not already
expected_values = expected_answer if isinstance(
expected_answer, list) else [expected_answer]
expected_values = list(map(str, expected_values))
boolean_answers = [
answer[i] in expected_values for i in range(len(answer))]
combinations_k = list(combinations(boolean_answers, k))
passed_at_k = 0
# Calculate the pass@k metric
for comb in combinations_k:
if any(comb):
passed_at_k += 1
pass_at_k_percentage = (passed_at_k / len(combinations_k))*100
return pass_at_k_percentage

Просмотреть файл

@ -1,87 +0,0 @@
from typing import Options
import os
import matplotlib.pyplot as plt
import numpy as np
from collections import Counter
def scatter_plot(attributes, save_path,pivot_table,name_save):
x_list = [(test, manipulation, temp ) for test in attributes["TestCase"] for manipulation in attributes["TableManipulation"] for temp in attributes["temperature"]]
# Define the number of subplots and their arrangement
num_rows = len(x_list)
num_cols = 6
# Create a figure and a grid of subplots
fig, axes = plt.subplots(num_rows, num_cols, figsize=(50,500))
# Loop through each subplot and plot the scatter plot
for i in range(num_rows):
for j in range(num_cols):
TableFormat = attributes["tableFormat"][j]
ax = axes[i][j]
markerss= [".", "x", "p", "^","o","s"]
# Filter data for each subplot based on data_labels
for j, k in enumerate([1,3,5,10,15]):
x= x_list[i]
y = ( f'pass_{k}', TableFormat)
scores = pivot_table.loc[x][y]
ax.scatter(range(len(scores)), scores, label=f'pass@{k}', s=int(300/k), marker =markerss[j] )
name = f'Test: {x[0]}, TableManipulation: {x[1]}\nTemperature: {x[2]}, Format: {y[1]}'
ax.set_title(name)
ax.set_xlabel('data-points')
ax.set_ylabel('score(%)')
ax.legend()
# Adjust layout to prevent overlapping labels
plt.tight_layout()
plt.savefig(os.path.join(save_path, name_save.replace(".csv", "_") +"_scatter_plot.pdf"))
# Show the plot
plt.show()
plt.clf()
def density_scatter_plot(attributes, save_path,pivot_table,name_save):
x_list = [(test, manipulation, temp ) for test in attributes["TestCase"] for manipulation in attributes["TableManipulation"] for temp in attributes["temperature"]]
# Define the number of subplots and their arrangement
num_rows = len(x_list)
num_cols = 6
# Create a figure and a grid of subplots
fig, axes = plt.subplots(num_rows, num_cols, figsize=(50,500))
# Loop through each subplot and plot the scatter plot
for i in range(num_rows):
for j in range(num_cols):
TableFormat = attributes["tableFormat"][j]
ax = axes[i][j]
markerss= [".", "x", "p", "^","o","s"]
# Filter data for each subplot based on data_labels
for j, k in enumerate([1,3,5,10,15]):
x= x_list[i]
y = ( f'pass_{k}', TableFormat)
scores = pivot_table.loc[x][y]
counter_scores = Counter(scores)
scores_x= counter_scores.keys()
scores_y= counter_scores.values()
ax.scatter(scores_x, scores_y, label=f'pass@{k}', s=int(300/k), marker =markerss[j] )
name = f'Test: {x[0]}, TableManipulation: {x[1]}\nTemperature: {x[2]}, Format: {y[1]}'
ax.set_xlim([-1,101])
ax.set_title(name)
ax.set_xlabel('score (%)')
ax.set_ylabel('frequency')
ax.legend()
# Adjust layout to prevent overlapping labels
plt.tight_layout()
plt.savefig(os.path.join(save_path, name_save.replace(".csv", "_") +"density_scatter_plot.pdf"))
# Show the plot
plt.show()
plt.clf()

Просмотреть файл

@ -1,195 +0,0 @@
import random
import pandas as pd
from utils import stringify_serialzed_df
data = {
'Date': ['2023-01-01 00:00:00',
'2023-01-02 00:00:00',
'2023-01-03 00:00:00',
'2023-01-04 00:00:00',
'2023-01-05 00:00:00'],
'Ticker': ['AAPL', 'GOOG', 'MSFT', 'AMZN', 'FB'],
'Price': [150.20, 2700.50, 330.80, 3200.00, 350.75],
'Shares': [100, 50, 200, 30, 80],
'Transaction': ['Buy', 'Sell', 'Buy', 'Sell', 'Buy']
}
df_EX1 = pd.DataFrame(data)
data2 = {'Name': ['Alice', 'Bob', 'Charlie'],
'Age': [25, 30, 22],
'City': ['New York', 'Los Angeles', 'Chicago'],
"Sex": ['F', "M", "M"]}
df_EX2 = pd.DataFrame(data2)
df_EX1 = pd.DataFrame(data)
DATA_QUES_INSTRUCTION = """Given the tabular data your job is to provide answer to the question asked over the table.
[Example]
Data:
[Data_format]
Questions: [Ques]
Answer:
"""
EXAMPLES = """Data:
[Data_format_example1]
[QA1]
Data:
[Data_format_example2]
[QA2]
"""
###### table formats: #########
TF_EX1_MarkdownFormat = '| | Date | Ticker | Price | Shares | Transaction |\n|---:|:--------------------|:---------|--------:|---------:|:--------------|\n| 0 | 2023-01-01 00:00:00 | AAPL | 150.2 | 100 | Buy |\n| 1 | 2023-01-02 00:00:00 | GOOG | 2700.5 | 50 | Sell |\n| 2 | 2023-01-03 00:00:00 | MSFT | 330.8 | 200 | Buy |\n| 3 | 2023-01-04 00:00:00 | AMZN | 3200 | 30 | Sell |\n| 4 | 2023-01-05 00:00:00 | FB | 350.75 | 80 | Buy |'
TF_EX1_DataMatrixFormat = """[['', 'Date', 'Ticker', 'Price', 'Shares', 'Transaction'],
[0, '2023-01-01 00:00:00', 'AAPL', 150.2, 100, 'Buy'],
[1, '2023-01-02 00:00:00', 'GOOG', 2700.5, 50, 'Sell'],
[2, '2023-01-03 00:00:00', 'MSFT', 330.8, 200, 'Buy'],
[3, '2023-01-04 00:00:00', 'AMZN', 3200.0, 30, 'Sell'],
[4, '2023-01-05 00:00:00', 'FB', 350.75, 80, 'Buy']]
"""
TF_EX1_JsonFormat = '{"0":{"Date":2023-01-01 00:00:00,"Ticker":"AAPL","Price":150.2,"Shares":100,"Transaction":"Buy"},"1":{"Date":2023-01-02 00:00:00,"Ticker":"GOOG","Price":2700.5,"Shares":50,"Transaction":"Sell"},"2":{"Date":2023-01-03 00:00:00,"Ticker":"MSFT","Price":330.8,"Shares":200,"Transaction":"Buy"},"3":{"Date":2023-01-04 00:00:00,"Ticker":"AMZN","Price":3200.0,"Shares":30,"Transaction":"Sell"},"4":{"Date":2023-01-05 00:00:00,"Ticker":"FB","Price":350.75,"Shares":80,"Transaction":"Buy"}}'
TF_EX1_DFloaderFormat = "pd.DataFrame({Date : ['2023-01-01 00:00:00', '2023-01-02 00:00:00', '2023-01-03 00:00:00', '2023-01-04 00:00:00', '2023-01-05 00:00:00'], Ticker : ['AAPL', 'GOOG', 'MSFT', 'AMZN', 'FB'], Price : [150.2, 2700.5, 330.8, 3200.0, 350.75], Shares : [100, 50, 200, 30, 80], Transaction : ['Buy', 'Sell', 'Buy', 'Sell', 'Buy']}, index=[0, 1, 2, 3, 4])"
TF_EX1_HTMLFormat = '<table border="1" class="dataframe">\n <thead>\n <tr style="text-align: right;">\n <th></th>\n <th>Date</th>\n <th>Ticker</th>\n <th>Price</th>\n <th>Shares</th>\n <th>Transaction</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>2023-01-01 00:00:00</td>\n <td>AAPL</td>\n <td>150.20</td>\n <td>100</td>\n <td>Buy</td>\n </tr>\n <tr>\n <th>1</th>\n <td>2023-01-02 00:00:00</td>\n <td>GOOG</td>\n <td>2700.50</td>\n <td>50</td>\n <td>Sell</td>\n </tr>\n <tr>\n <th>2</th>\n <td>2023-01-03 00:00:00</td>\n <td>MSFT</td>\n <td>330.80</td>\n <td>200</td>\n <td>Buy</td>\n </tr>\n <tr>\n <th>3</th>\n <td>2023-01-04 00:00:00</td>\n <td>AMZN</td>\n <td>3200.00</td>\n <td>30</td>\n <td>Sell</td>\n </tr>\n <tr>\n <th>4</th>\n <td>2023-01-05 00:00:00</td>\n <td>FB</td>\n <td>350.75</td>\n <td>80</td>\n <td>Buy</td>\n </tr>\n </tbody>\n</table>'
TF_EX1_HTMLNoSpaceFormat = '<table border="1" class="dataframe"> <thead><tr style="text-align: right;"> <th></th> <th>Date</th> <th>Ticker</th> <th>Price</th> <th>Shares</th> <th>Transaction</th></tr> </thead> <tbody><tr> <th>0</th> <td>2023-01-01 00:00:00</td> <td>AAPL</td> <td>150.20</td> <td>100</td> <td>Buy</td></tr><tr> <th>1</th> <td>2023-01-02 00:00:00</td> <td>GOOG</td> <td>2700.50</td> <td>50</td> <td>Sell</td></tr><tr> <th>2</th> <td>2023-01-03 00:00:00</td> <td>MSFT</td> <td>330.80</td> <td>200</td> <td>Buy</td></tr><tr> <th>3</th> <td>2023-01-04 00:00:00</td> <td>AMZN</td> <td>3200.00</td> <td>30</td> <td>Sell</td></tr><tr> <th>4</th> <td>2023-01-05 00:00:00</td> <td>FB</td> <td>350.75</td> <td>80</td> <td>Buy</td></tr> </tbody></table>'
TF_EX1_TabSeparatedFormat = """ Date Ticker Price Shares Transaction
0 2023-01-01 AAPL 150.2 100 Buy
1 2023-01-02 GOOG 2700.5 50 Sell
2 2023-01-03 MSFT 330.8 200 Buy
3 2023-01-04 AMZN 3200.0 30 Sell
4 2023-01-05 FB 350.75 80 Buy"""
TF_EX1_CommaSeparatedFormat = """,Date,Ticker,Price,Shares,Transaction
0,2023-01-01,AAPL,150.2,100,Buy
1,2023-01-02,GOOG,2700.5,50,Sell
2,2023-01-03,MSFT,330.8,200,Buy
3,2023-01-04,AMZN,3200.0,30,Sell
4,2023-01-05,FB,350.75,80,Buy"""
######################################
###### Table ops Questionaire: #########
def Example_TransposeTests(FormatName, TransposeTable, SerializeTable, ColumnCluster, df_EX1=df_EX1, df_EX2=df_EX2):
Recontruction_Example_str = ""
Recontruction_Example_str += f"Data:\n{FormatName.formatting(df_EX1)}\n\n"
Recontruction_Example_str += f"""Question: Can you transpose the table?
Answer:
{FormatName.formatting(next(TransposeTable().modify(df_EX1)))}\n\n\n"""
Recontruction_Example_str += f"Data:\n{FormatName.formatting(df_EX2)}\n\n"
Recontruction_Example_str += f"""Question: Can you transpose the table?
Answer:
{FormatName.formatting(next(TransposeTable().modify(df_EX2)))}\n\n"""
return Recontruction_Example_str
def Example_ColumnReorder(FormatName, TransposeTable, SerializeTable, ColumnCluster, df_EX1=df_EX1, df_EX2=df_EX2):
Reordering_Example_str = ""
col_suffled1 = df_EX1.columns.to_list()
random.shuffle(col_suffled1)
Reordering_Example_str += f"Data:\n{FormatName.formatting(df_EX1)}\n\n"
Reordering_Example_str += f"""Question: Can you reorder the table such that the column are in this new order {str(col_suffled1)}?
Answer:
{FormatName.formatting(df_EX1[col_suffled1])}\n\n\n"""
col_suffled2 = df_EX2.columns.to_list()
random.shuffle(col_suffled2)
Reordering_Example_str += f"Data:\n{FormatName.formatting(df_EX2)}\n\n"
Reordering_Example_str += f"""Question: Can you reorder the table such that the column are in this new order {str(col_suffled2)} ?
Answer:
{FormatName.formatting(df_EX2[col_suffled2])}\n"""
return Reordering_Example_str
def Example_Reconstruction(FormatName, TransposeTable, SerializeTable, ColumnCluster, df_EX1=df_EX1, df_EX2=df_EX2):
Recontruction_Example_str = ""
Recontruction_Example_str += f"Data:\n{FormatName.formatting(next(SerializeTable().modify(df_EX1)))}\n\n"
Recontruction_Example_str += f"""Question: Can you reconstruct the table by deserializing the table above?
Answer:
{FormatName.formatting(df_EX1)}\n\n\n"""
Recontruction_Example_str += f"Data:\n{FormatName.formatting(next(ColumnCluster().modify(df_EX2)))}\n\n"
Recontruction_Example_str += f"""Question: Can you reconstruct the table by deserializing the table above?
Answer:
{FormatName.formatting(df_EX2)}\n\n"""
return Recontruction_Example_str
def Example_Reconstruction1(FormatName, TransposeTable, SerializeTable, ColumnCluster, df_EX1=df_EX1, df_EX2=df_EX2):
""" Similar to reconstruction but the input table is just string"""
Recontruction_Example_str = ""
Recontruction_Example_str += f"Data:\n{stringify_serialzed_df(next(SerializeTable().modify(df_EX1)))}\n\n"
Recontruction_Example_str += f"""Question: Can you reconstruct the table by deserializing the table above?
Answer:
{FormatName.formatting(df_EX1)}\n\n\n"""
Recontruction_Example_str += f"Data:\n{stringify_serialzed_df(next(ColumnCluster().modify(df_EX2)))}\n\n"
Recontruction_Example_str += f"""Question: Can you reconstruct the table by deserializing the table above?
Answer:
{FormatName.formatting(df_EX2)}\n\n"""
return Recontruction_Example_str
####################################################################
###### Test case Questionaire: #########
Ex1_QA1_NavigationTests = """Question: What value is at row 3 and column Ticker?
Answer:
AMZN"""
Ex1_QA2_NavigationTests = """Question: What value is at row 1 and column Transaction?
Answer:
Sell"""
Ex1_QA1_ColumnLookupTests = """Question: What column is the FB in?
Answer:
Ticker"""
Ex1_QA2_ColumnLookupTests = """Question: What column is the '2023-01-02 00:00:00' in?
Answer:
Date"""
Ex1_QA1_RowLookupTests = """Question: What row is the FB in?
Answer:
4"""
Ex1_QA2_RowLookupTests = """Question: What row is the '2023-01-02 00:00:00' in?
Answer:
1"""
Ex1_QA1_DataTypeLookupTests = """Question: What type (using Pandas datatype notation) is column Shares?
Answer:
int64"""
Ex1_QA2_DataTypeLookupTests = """Question: What type (using Pandas datatype notation) is column Price?
Answer:
float64"""
######################################
###### Example Dictionary defination: #########
EXAMPLE_Dictionary = {"TF_EX1_MarkdownFormat": TF_EX1_MarkdownFormat,
"TF_EX1_DataMatrixFormat": TF_EX1_DataMatrixFormat,
"TF_EX1_JsonFormat": TF_EX1_JsonFormat,
"TF_EX1_DFloaderFormat": TF_EX1_DFloaderFormat,
"TF_EX1_HTMLFormat": TF_EX1_HTMLFormat,
"TF_EX1_HTMLNoSpaceFormat": TF_EX1_HTMLNoSpaceFormat,
"TF_EX1_TabSeparatedFormat": TF_EX1_TabSeparatedFormat,
"TF_EX1_CommaSeparatedFormat": TF_EX1_CommaSeparatedFormat,
"Ex1_QA1_NavigationTests": Ex1_QA1_NavigationTests,
"Ex1_QA2_NavigationTests": Ex1_QA2_NavigationTests,
"Ex1_QA1_ColumnLookupTests": Ex1_QA1_ColumnLookupTests,
"Ex1_QA2_ColumnLookupTests": Ex1_QA2_ColumnLookupTests,
"Ex1_QA1_RowLookupTests": Ex1_QA1_RowLookupTests,
"Ex1_QA2_RowLookupTests": Ex1_QA2_RowLookupTests,
"Ex1_QA1_DataTypeLookupTests": Ex1_QA1_DataTypeLookupTests,
"Ex1_QA2_DataTypeLookupTests": Ex1_QA2_DataTypeLookupTests,
"Ex_TableReconstructionTests": Example_Reconstruction,
"Ex_TableTransposeTests": Example_TransposeTests,
"Ex_TableColumnReorderTests": Example_ColumnReorder,
"Ex_TableReconstructionTests1": Example_Reconstruction1
}
######################################

Просмотреть файл

@ -1,703 +0,0 @@
import pandas as pd
from abc import ABC, abstractmethod
import itertools
from typing import Any, Callable, Iterator, List, Optional
from dataclasses import dataclass
import numpy as np
import json
import jsonlines
import os
import datetime
from prompts import *
from LLMCall import openapi_call_completions
from tabulate import tabulate
import random
import string
from collections import Counter
from itertools import combinations
from utils import Convert_back_to_df, num_tokens_from_string, stringify_serialized_df
from tqdm.auto import tqdm
class CustomJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
if isinstance(obj, pd.Series):
return obj.tolist()
elif isinstance(obj, datetime):
# Handle datetime objects
return obj.isoformat()
elif isinstance(obj, set):
# Handle sets
return list(obj)
return super().default(obj)
@dataclass
class TestCase:
question: str
expect: Any
TestType: str
RNG_SEED = 42
def get_random_state(random_state=None):
if random_state is None:
random_state = np.random.RandomState(seed=RNG_SEED)
return random_state
def create_combinations(entries: List) -> List:
result = []
i = 0
while i < len(entries):
if (len(entries) - i)not in [1, 2]:
if i < int(len(entries)/2):
lower_bound = 2
else:
lower_bound = 1
tuple_size = min(4, np.random.randint(
lower_bound, (len(entries) - i)))
else:
tuple_size = len(entries) - i
result.append(tuple(entries[i:i + tuple_size]))
i += tuple_size
return result
def combine_columns(df, combinations_list):
new_df = pd.DataFrame()
column_names = df.columns.to_list()
for en in combinations_list:
column_subset_names = list(np.array(column_names)[list(en)])
new_df[str("-----".join(column_subset_names))] = df[column_subset_names].apply(
lambda row: '-----'.join(map(str, row)), axis=1)
return new_df
class TestCaseGenerator(ABC):
def generate(
self,
df: pd.DataFrame,
random_state: Optional[np.random.RandomState] = None
) -> Iterator[TestCase]:
raise NotImplementedError()
def check(self, test: TestCase, answer: pd.DataFrame) -> bool:
expected_df = Convert_back_to_df("JsonFormat", str(test.expect))
expected_df = expected_df.astype(str)
if expected_df.shape == answer.shape:
return expected_df.equals(answer)
else:
return False
def metric_pass_k(self, test: TestCase, answer: List[pd.DataFrame], k: int) -> float:
expected_df = Convert_back_to_df("JsonFormat", str(test.expect))
expected_df = expected_df.astype(str)
boolean_answers = [expected_df.equals(
answer[i]) if expected_df.shape == answer[i].shape else False for i in range(len(answer))]
combinations_k = list(combinations(boolean_answers, k))
passed_at_k = 0
# Calculate the pass@k metric
for comb in combinations_k:
if any(comb):
passed_at_k += 1
pass_at_k_percentage = (passed_at_k / len(combinations_k))*100
return pass_at_k_percentage
def per_cell_accuracies(self, test: TestCase, answer: List[pd.DataFrame]) -> List[float]:
score_list = []
for ans in answer:
expected_df = Convert_back_to_df("JsonFormat", str(test.expect))
expected_df = expected_df.astype(str)
if expected_df.shape == ans.shape:
matching_cells = (expected_df == ans).sum().sum()
total_cells = expected_df.shape[0]*expected_df.shape[1]
per_cell_score = matching_cells/total_cells
else:
per_cell_score = 0.0
score_list.append(np.round(per_cell_score, 2))
return score_list
class NavigationTests(TestCaseGenerator):
def generate(self, df, random_state=None):
random_state = get_random_state(random_state)
cols = df.columns
indexes = df.index.to_list()
nrows = df.shape[0]
while True:
col_idx = random_state.choice(cols)
row_idx = random_state.randint(0, nrows)
question = f"What value is at row {indexes[row_idx]} and column {col_idx}?"
yield TestCase(question, df.iloc[row_idx][col_idx], "NavigationTests")
class ColumnLookupTests(TestCaseGenerator):
def generate(self, df, random_state=None):
random_state = get_random_state(random_state)
cols = df.columns
nrows = df.shape[0]
while True:
col_idx = random_state.choice(cols)
row_idx = random_state.randint(0, nrows)
value = df.iloc[row_idx][col_idx]
question = f"What column is the {value} in?"
_, col_indices = np.where(df.to_numpy() == value)
col_indices = list(set([cols[i] for i in col_indices.tolist()]))
yield TestCase(question, col_indices, "ColumnLookupTests")
class RowLookupTests(TestCaseGenerator):
def generate(self, df, random_state=None):
random_state = get_random_state(random_state)
cols = df.columns
indexes = df.index.to_list()
nrows = df.shape[0]
while True:
col_idx = random_state.choice(cols)
row_idx = random_state.randint(0, nrows)
value = df.iloc[row_idx][col_idx]
question = f"What row is the {value} in?"
row_indices, _ = np.where(df.to_numpy() == value)
row_indices = list(set(row_indices.tolist()))
row_indices_ = [indexes[v] for v in row_indices]
yield TestCase(question, row_indices_, "RowLookupTests")
class DataTypeLookupTests(TestCaseGenerator):
def generate(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
indices = list(df.index)
transpose_bool = all(isinstance(item, str) for item in indices)
header = "column"
if transpose_bool:
df = df.T
header = "row"
cols = list(df.columns)
for col in cols*100:
question = f"What type (using Pandas datatype notation) is {header} {col}?"
answer = str(df.dtypes[col])
yield TestCase(question, answer, "DataTypeLookupTests")
class TableReconstructionTests(TestCaseGenerator):
def generate(self, df, random_state=None):
question = f"Can you reconstruct the table by deserializing the table above?"
answer = df.to_json(orient='index', index=True)
yield TestCase(question, answer, "TableReconstructionTests")
class TableReconstructionTests1(TestCaseGenerator):
def generate(self, df, random_state=None):
question = f"Can you reconstruct the table by deserializing the table above?"
answer = df.to_json(orient='index', index=True)
yield TestCase(question, answer, "TableReconstructionTests1")
class TableColumnReorderTests(TestCaseGenerator):
def generate(self, df, random_state=None):
random_state = get_random_state(random_state)
cols = list(df.columns)
while True:
random_state.shuffle(cols)
new_df = df[cols]
new_column_order = new_df.columns.to_list()
question = f"""Can you reorder the table such that the column are in this new order {str(new_column_order)}? Make sure to return the complete reordered table."""
answer = new_df.to_json(orient='index', index=True)
yield TestCase(question, answer, "ColumnShuffleTests")
class TableTransposeTests(TestCaseGenerator):
def generate(self, df, random_state=None):
question = f"""Can you transpose the table?"""
answer = df.T.to_json(orient='index', index=True)
yield TestCase(question, answer, "ColumnShuffleTests")
class TableOperation(ABC):
@abstractmethod
def modify(
self, df: pd.DataFrame, random_state: Optional[np.random.RandomState]
) -> Iterator[pd.DataFrame]:
raise NotImplementedError()
class ShuffleRows(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
while True:
yield df.sample(frac=1.0, random_state=random_state, replace=False)
class ShuffleColumns(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
cols = list(df.columns)
while True:
random_state.shuffle(cols)
yield df[cols]
class ArbitraryColumnNames(TableOperation):
def __init__(self, get_column_name: Optional[Callable[[int], str]] = None):
if get_column_name is None:
get_column_name = []
while len(get_column_name) <= 200:
arb_name = ''.join(random.choices(
string.ascii_letters + string.digits, k=np.random.randint(1, 11)))
if arb_name not in get_column_name:
get_column_name.append(arb_name)
self.get_column_name = get_column_name
def modify(self, df, random_state=None):
df = df.copy(deep=True)
while True:
new_columns = random.sample(self.get_column_name, len(df.columns))
df.columns = new_columns
yield df
class SequentialColumnNames(TableOperation):
def __init__(self, get_column_name: Optional[Callable[[int], str]] = None):
if get_column_name is None:
def get_column_name(col_idx): return f"col_{col_idx}"
self.get_column_name = get_column_name
def modify(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
new_columns = [
self.get_column_name(idx) for idx in range(len(df.columns))
]
df.columns = new_columns
while True:
yield df.sample(frac=0.7,
random_state=random_state,
replace=False,
)
class OriginalData(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
yield df
class ShuffleColumnNames(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
cols = list(df.columns)
while True:
random_state.shuffle(cols)
df.columns = cols
yield df
class SampleRows(TableOperation):
def __init__(self, fraction: float = 0.5, replace: bool = True):
self.fraction = fraction
self.replace = replace
def modify(self, df, random_state=None):
random_state = get_random_state(random_state)
while True:
yield df.sample(frac=self.fraction,
random_state=random_state,
replace=self.replace,
ignore_index=True)
class TransposeTable(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
df = df.T
random_state = get_random_state(random_state)
while True:
yield df.sample(frac=0.7,
random_state=random_state,
replace=False,
)
class ColumnCluster(TableOperation):
def __init__(self, get_column_combination: Optional[Callable[[int], str]] = None):
if get_column_combination is None:
get_column_combination = create_combinations
self.get_column_combination = get_column_combination
def modify(self, df, random_state=None):
df = df.copy(deep=True)
while True:
combinations_list = self.get_column_combination(range(df.shape[1]))
new_df = pd.DataFrame()
column_names = df.columns.to_list()
for en in combinations_list:
column_subset_names = list(np.array(column_names)[list(en)])
new_df[str("-----".join(column_subset_names))] = df[column_subset_names].apply(
lambda row: '-----'.join(map(str, row)), axis=1)
yield new_df
class SerializeTable(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
new_df = pd.DataFrame()
new_df[""] = df.apply(lambda row: ','.join(
[f'{col}:{value}' for col, value in row.items()]), axis=1)
random_state = get_random_state(random_state)
while True:
yield new_df.sample(frac=0.7,
random_state=random_state,
replace=False,
)
class CompositeTableOperation(object):
def __init__(self, table_ops: List[TableOperation]):
self.table_ops = table_ops
def modify(self, df, random_state=None):
random_state = get_random_state(random_state)
def recursive_app(val, table_ops, random_state):
if len(table_ops) == 0:
yield val
else:
table_op = table_ops[0]
for new_val in table_op.modify(val, random_state=random_state):
for recursed_val in recursive_app(new_val, table_ops[1:],
random_state):
yield recursed_val
for df_variant in recursive_app(df, self.table_ops, random_state):
yield df_variant
class TableFormats(ABC):
@abstractmethod
def formatting(
self, df: pd.DataFrame) -> pd.DataFrame:
raise NotImplementedError()
class MarkdownFormat(TableFormats):
def formatting(self, df):
return tabulate(df, headers='keys', tablefmt='pipe', showindex=True)
class DataMatrixFormat(TableFormats):
def formatting(self, df):
df_ = pd.DataFrame()
df_[""] = df.index.to_list()
df_[df.columns] = df.values.tolist()
matrix = df_.columns.tolist() # Get column headers as a list
matrix_data = df_.values.tolist() # Get data as a list of lists
matrix_data.insert(0, matrix)
return matrix_data
class JsonFormat(TableFormats):
def formatting(self, df):
return df.to_json(orient='index', index=True)
class DFloaderFormat(TableFormats):
def formatting(self, df):
str_df_loader = "pd.DataFrame({[data]}, index=[indices])"
data = ""
indi = str(df.index.to_list())
for d in df.columns:
data += f"'{str(d)}'" + " : " + str(df[d].to_list()) + ", "
data = data[:-2]
str_df_loader = str_df_loader.replace(
"[data]", data).replace("[indices]", indi)
return str_df_loader
class HTMLFormat(TableFormats):
def formatting(self, df):
html = df.to_html(index=True)
return html
class HTMLNoSpaceFormat(TableFormats):
def formatting(self, df):
html = df.to_html(index=True)
return str(html).replace("\t", "").replace("\n", "").replace(" ", "")
class TabSeparatedFormat(TableFormats):
def formatting(self, df: pd.DataFrame) -> str:
return df.to_csv(sep='\t', index=True)
class CommaSeparatedFormat(TableFormats):
def formatting(self, df: pd.DataFrame) -> str:
return df.to_csv(index=True)
class SQLQueryFormat(TableFormats):
def formatting(self, df):
return
def gather_Examples_Prompt(tab_format, test_case):
TF, TC = tab_format.__class__.__name__, test_case.__class__.__name__
if TC in ["TableColumnReorderTests", "TableReconstructionTests", "TableTransposeTests", "TableReconstructionTests1"]:
Examples = EXAMPLE_Dictionary[f"Ex_{TC}"](
tab_format, TransposeTable, SerializeTable, ColumnCluster)
else:
Examples = EXAMPLES.replace("[Data_format_example1]", EXAMPLE_Dictionary[f"TF_EX1_{TF}"])\
.replace("[QA1]", EXAMPLE_Dictionary[f"Ex1_QA1_{TC}"])\
.replace("[Data_format_example2]", EXAMPLE_Dictionary[f"TF_EX1_{TF}"])\
.replace("[QA2]", EXAMPLE_Dictionary[f"Ex1_QA2_{TC}"])
return Examples
class LLMTableLearner():
def get_prompt(self, examples, df_format, question: str, temperature: float) -> Any:
prompt = DATA_QUES_INSTRUCTION.replace("[Ques]", str(question)).replace(
"[Data_format]", str(df_format)).replace("[Example]", examples)
total_tokens = num_tokens_from_string(prompt)
no_of_token_left = 4051-total_tokens
expected_token = num_tokens_from_string(str(df_format))+51
maxTok = min(total_tokens, no_of_token_left)
num_n = 3
modelName = "text-davinci-003"
prompt_cache = {"model": modelName,
"prompt": prompt,
"temperature": temperature,
"max_tokens": maxTok,
"top_p": 1,
"frequency_penalty": 0,
"presence_penalty": 0,
"n": num_n,
"logprobs": 1}
return prompt_cache
def get_answer(self, examples, df_format, question: str, temperature: float, open_api_key: str) -> Any:
prompt = DATA_QUES_INSTRUCTION.replace("[Ques]", str(question)).replace(
"[Data_format]", str(df_format)).replace("[Example]", examples)
total_tokens = num_tokens_from_string(prompt)
no_of_token_left = 4051-total_tokens
expected_token = num_tokens_from_string(str(df_format))+51
maxTok = min(total_tokens, no_of_token_left)
num_n = 3
modelName = "text-davinci-003"
answer, cache = openapi_call_completions(
prompt, modelName=modelName, temp=temperature, maxTok=maxTok, num_n=num_n, open_api_key=open_api_key)
return answer, cache
class TableExperimentSuite(object):
def __init__(self, llm_learner: LLMTableLearner(),
table_formats: List[TableFormats],
table_ops: List[TableOperation],
test_gens: List[TestCaseGenerator],
cache_save_path: str,
open_api_key: str):
self.llm = llm_learner
self.table_formats = table_formats
self.table_ops = table_ops
self.test_gens = test_gens
self.cache_save_path = cache_save_path
self.open_api_key = open_api_key
def run_experiment(self,
df,
per_table_op: int = 10,
per_test_gen: int = 10,
save_cache=True):
results = []
cacahe_all_dict = []
print("OPEN AI KEY USED: ", self.open_api_key)
if save_cache:
current_datetime = datetime.datetime.now()
formatted_datetime = current_datetime.strftime("%Y_%m_%d_%H_%M_%S")
cache_path = os.path.join(self.cache_save_path, f"cache_logger")
if not os.path.exists(cache_path):
os.makedirs(cache_path)
cache_path2 = os.path.join(
self.cache_save_path, f"cache_logger_all")
if not os.path.exists(cache_path2):
os.makedirs(cache_path2)
save_catch_file = os.path.join(
cache_path, f"macro_test_log_{formatted_datetime}.json")
save_catch_file2 = os.path.join(
cache_path, f"macro_test_log_{formatted_datetime}.json")
with open(save_catch_file, mode="w") as writer:
pass
table_ops = [OriginalData(), SampleRows(),
ColumnCluster(), SerializeTable()]
table_ops_test = {"ArbitraryColumnNames": {"TableColumnReorderTests": [5, 10],
"TableTransposeTests": [50, 1],
"TableReconstructionTests1": [50, 1]},
"ShuffleRows": {"TableColumnReorderTests": [5, 10],
"TableTransposeTests": [50, 1],
"TableReconstructionTests1": [50, 1]},
"ColumnCluster": {"TableColumnReorderTests": [5, 10],
"TableTransposeTests": [50, 1],
"TableReconstructionTests1": [50, 1]},
"ShuffleColumns": {"TableColumnReorderTests": [5, 10],
"TableTransposeTests": [50, 1],
"TableReconstructionTests1": [50, 1]},
"ShuffleColumnNames": {"TableColumnReorderTests": [5, 10],
"TableTransposeTests": [50, 1],
"TableReconstructionTests1": [50, 1]},
"TransposeTable": {"TableColumnReorderTests": [5, 10],
"TableTransposeTests": [50, 1],
"TableReconstructionTests1": [50, 1]},
"SequentialColumnNames": {"TableColumnReorderTests": [5, 10],
"TableTransposeTests": [50, 1],
"TableReconstructionTests1": [50, 1]}}
table_ops_test = {"OriginalData": ["TableColumnReorderTests"],
"SampleRows": ["TableTransposeTests"],
"ColumnCluster": ["TableReconstructionTests"],
"SerializeTable": ["TableReconstructionTests", "TableReconstructionTests1"]}
for table_op in tqdm(self.table_ops):
table_op_name = table_op.__class__.__name__
count_df_variant = 0
for df_variant in tqdm(itertools.islice(table_op.modify(df),
per_table_op)):
df_feed_test = df_variant
count_df_variant += 1
for gen in tqdm(self.test_gens):
if table_op_name in ["ColumnCluster", "SerializeTable"] and gen.__class__.__name__ == "TableReconstructionTests1":
df_feed_test = df
for test in itertools.islice(gen.generate(df_feed_test),
per_test_gen):
for tab_format in tqdm(self.table_formats):
temperature_list = [0]
for temp in temperature_list:
try:
examples = gather_Examples_Prompt(
tab_format, gen)
df_in_desired_format = tab_format.formatting(
df_variant)
prompt_cache_per_call = self.llm.get_prompt(
examples, df_in_desired_format, test.question, temp)
if gen.__class__.__name__ == "TableReconstructionTests1":
df_in_desired_format = stringify_serialized_df(
df_variant)
answer, cache_per_call = self.llm.get_answer(
examples, df_in_desired_format, test.question, temp, self.open_api_key)
error = None
try:
answer_changed_format = [Convert_back_to_df(
tab_format.__class__.__name__, a) for a in answer]
result = gen.check(
test, answer_changed_format[0])
pass_1 = gen.metric_pass_k(
test, answer_changed_format, 1)
pass_3 = gen.metric_pass_k(
test, answer_changed_format, 3)
pass_5 = gen.metric_pass_k(
test, answer_changed_format, 5)
pass_10 = gen.metric_pass_k(
test, answer, 10)
pass_15 = gen.metric_pass_k(
test, answer, 15)
per_cell_accracies = gen.per_cell_accuracies(
test, answer_changed_format)
per_cell_accracies_top1 = per_cell_accracies[0]
error = None
except Exception as Err:
result = None
pass_1 = pass_3 = pass_5 = pass_10 = pass_15 = None
error = str(Err)
per_cell_accracies = []
per_cell_accracies_top1 = None
print(f"error {Err} encountered")
except Exception as Err:
answer, cache_per_call = None, None
error = str(Err)
print(f"error {Err} encountered")
cache_all = {"model-Temperature": str(temp),
"tab_format": str(tab_format.__class__.__name__),
"table_op": str(table_op_name),
"gen": str(gen.__class__.__name__),
"test-Question": test.question,
"test-expected-answer": test.expect,
"test-TestType": str(test.TestType),
"test": str(test),
"answer": answer,
"result_match_top1": result,
"error": str(error),
"pass_1": str(pass_1),
"pass_3": str(pass_3),
"pass_5": str(pass_5),
"pass_10": str(pass_10),
"pass_15": str(pass_15),
"per_cell_accracies": str(per_cell_accracies),
"prompt_cache_per_call": prompt_cache_per_call,
"LLMOutput": cache_per_call}
results.append((temp, tab_format.__class__.__name__, table_op.__class__.__name__, gen.__class__.__name__, test.question, test.expect,
test.TestType, pass_1, pass_3, pass_5, pass_10, pass_15, test, answer, answer, per_cell_accracies_top1, per_cell_accracies, result, error))
# cacahe_all_dict.append(cache_all)
if save_cache:
with open(save_catch_file, mode="a") as file:
serializable_entry = json.dumps(
cache_all, cls=CustomJSONEncoder)
# Add a newline separator
file.write(serializable_entry + '\n')
return results

Просмотреть файл

@ -1,612 +0,0 @@
import pandas as pd
from abc import ABC, abstractmethod
import itertools
from typing import Any, Callable, Iterator, List, Optional
from dataclasses import dataclass
import numpy as np
import json
import jsonlines
import os
import datetime
from prompts import *
from LLMCall import openapi_call_completions
from tabulate import tabulate
import random
import string
from collections import Counter
from itertools import combinations
from utils import num_tokens_from_string
class CustomJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
if isinstance(obj, pd.Series):
return obj.tolist()
elif isinstance(obj, datetime):
# Handle datetime objects
return obj.isoformat()
elif isinstance(obj, set):
# Handle sets
return list(obj)
return super().default(obj)
@dataclass
class TestCase:
question: str
expect: Any
TestType: str
RNG_SEED = 42
def get_random_state(random_state=None):
if random_state is None:
random_state = np.random.RandomState(seed=RNG_SEED)
return random_state
def create_combinations(entries: List) -> List:
result = []
i = 0
while i < len(entries):
if (len(entries) - i)not in [1, 2]:
if i < int(len(entries)/2):
lower_bound = 2
else:
lower_bound = 1
tuple_size = min(4, np.random.randint(
lower_bound, (len(entries) - i)))
else:
tuple_size = len(entries) - i
result.append(tuple(entries[i:i + tuple_size]))
i += tuple_size
return result
def combine_columns(df, combinations_list):
new_df = pd.DataFrame()
column_names = df.columns.to_list()
for en in combinations_list:
column_subset_names = list(np.array(column_names)[list(en)])
new_df[str("-----".join(column_subset_names))] = df[column_subset_names].apply(
lambda row: '-----'.join(map(str, row)), axis=1)
return new_df
class TestCaseGenerator(ABC):
def generate(
self,
df: pd.DataFrame,
random_state: Optional[np.random.RandomState] = None
) -> Iterator[TestCase]:
raise NotImplementedError()
def check(self, test: TestCase, answer: Any) -> bool:
if test.TestType in ["ColumnLookupTests", "RowLookupTests"]:
if isinstance(test.expect, list):
expected_values = test.expect if isinstance(
test.expect, list) else [test.expect]
expected_values = list(map(str, expected_values))
matches = set(expected_values).intersection(set([answer]))
else:
print("Error: The expected Answer 'List' should be a list not string")
return len(matches) > 0
return str(test.expect) == answer
def metric_pass_k(self, test: TestCase, answer: Any, k: int) -> float:
if not isinstance(answer, list):
raise ValueError("Answer should be a list for pass@k metric.")
# Convert the test.expect to a list if it's not already
expected_values = test.expect if isinstance(
test.expect, list) else [test.expect]
expected_values = list(map(str, expected_values))
boolean_answers = [
True if answer[i] in expected_values else False for i in range(len(answer))]
combinations_k = list(combinations(boolean_answers, k))
passed_at_k = 0
# Calculate the pass@k metric
for comb in combinations_k:
if any(comb):
passed_at_k += 1
pass_at_k_percentage = (passed_at_k / len(combinations_k))*100
return pass_at_k_percentage
class NavigationTests(TestCaseGenerator):
def generate(self, df, random_state=None):
random_state = get_random_state(random_state)
cols = df.columns
indexes = df.index.to_list()
nrows = df.shape[0]
while True:
col_idx = random_state.choice(cols)
row_idx = random_state.randint(0, nrows)
question = f"What value is at row {indexes[row_idx]} and column {col_idx}?"
yield TestCase(question, df.iloc[row_idx][col_idx], "NavigationTests")
class ColumnLookupTests(TestCaseGenerator):
def generate(self, df, random_state=None):
random_state = get_random_state(random_state)
cols = df.columns
nrows = df.shape[0]
while True:
col_idx = random_state.choice(cols)
row_idx = random_state.randint(0, nrows)
value = df.iloc[row_idx][col_idx]
question = f"What column is the {value} in?"
_, col_indices = np.where(df.to_numpy() == value)
col_indices = list(set([cols[i] for i in col_indices.tolist()]))
yield TestCase(question, col_indices, "ColumnLookupTests")
class RowLookupTests(TestCaseGenerator):
def generate(self, df, random_state=None):
random_state = get_random_state(random_state)
cols = df.columns
indexes = df.index.to_list()
nrows = df.shape[0]
while True:
col_idx = random_state.choice(cols)
row_idx = random_state.randint(0, nrows)
value = df.iloc[row_idx][col_idx]
question = f"What row is the {value} in?"
row_indices, _ = np.where(df.to_numpy() == value)
row_indices = list(set(row_indices.tolist()))
row_indices_ = [indexes[v] for v in row_indices]
yield TestCase(question, row_indices_, "RowLookupTests")
class DataTypeLookupTests(TestCaseGenerator):
def generate(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
indices = list(df.index)
transpose_bool = all(isinstance(item, str) for item in indices)
header = "column"
if transpose_bool:
df = df.T
header = "row"
cols = list(df.columns)
for col in cols*100:
question = f"What type (using Pandas datatype notation) is {header} {col}?"
answer = str(df.dtypes[col])
yield TestCase(question, answer, "DataTypeLookupTests")
class TableReconstructionTests(TestCaseGenerator):
def generate(self, df, random_state=None):
question = f"Can you reconstruct the table in a json 'index' format by deserializing the table above?"
answer = df.to_json(orient='index', index=True)
yield TestCase(question, answer, "TableReconstructionTest")
class TableColumnReorderTests(TestCaseGenerator):
def generate(self, df, random_state=None):
random_state = get_random_state(random_state)
cols = list(df.columns)
while True:
random_state.shuffle(cols)
new_df = df[cols]
new_column_order = str(new_df.columns.to_list())
question = f"""Can you reorder the table such that the column are in this new order {new_column_order}?
Return the reordered table in json 'index' format. """
answer = new_df.to_json(orient='index', index=True)
yield TestCase(question, answer, "ColumnShuffleTests")
class TableTransposeTests(TestCaseGenerator):
def generate(self, df, random_state=None):
question = f"""Can you transpose the table? Return the transposed table in json 'index' format."""
answer = df.to_json(orient='index', index=True)
yield TestCase(question, answer, "ColumnShuffleTests")
class TableOperation(ABC):
@abstractmethod
def modify(
self, df: pd.DataFrame, random_state: Optional[np.random.RandomState]
) -> Iterator[pd.DataFrame]:
raise NotImplementedError()
class ShuffleRows(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
while True:
yield df.sample(frac=1.0, random_state=random_state, replace=False)
class ShuffleColumns(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
cols = list(df.columns)
while True:
random_state.shuffle(cols)
yield df[cols]
class ArbitraryColumnNames(TableOperation):
def __init__(self, get_column_name: Optional[Callable[[int], str]] = None):
if get_column_name is None:
get_column_name = [''.join(random.choices(
string.ascii_letters + string.digits, k=np.random.randint(1, 11))) for _ in range(200)]
self.get_column_name = get_column_name
def modify(self, df, random_state=None):
df = df.copy(deep=True)
while True:
new_columns = random.sample(self.get_column_name, len(df.columns))
df.columns = new_columns
yield df
class SequentialColumnNames(TableOperation):
def __init__(self, get_column_name: Optional[Callable[[int], str]] = None):
if get_column_name is None:
def get_column_name(col_idx): return f"col_{col_idx}"
self.get_column_name = get_column_name
def modify(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
new_columns = [
self.get_column_name(idx) for idx in range(len(df.columns))
]
df.columns = new_columns
yield df
class OriginalData(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
yield df
class ShuffleColumnNames(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
random_state = get_random_state(random_state)
cols = list(df.columns)
while True:
random_state.shuffle(cols)
df.columns = cols
yield df
class SampleRows(TableOperation):
def __init__(self, fraction: float = 0.5, replace: bool = True):
self.fraction = fraction
self.replace = replace
def modify(self, df, random_state=None):
random_state = get_random_state(random_state)
while True:
yield df.sample(frac=self.fraction,
random_state=random_state,
replace=self.replace)
class TransposeTable(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
yield df.T
class ColumnCluster(TableOperation):
def __init__(self, get_column_combination: Optional[Callable[[int], str]] = None):
if get_column_combination is None:
get_column_combination = create_combinations
self.get_column_combination = get_column_combination
def modify(self, df, random_state=None):
df = df.copy(deep=True)
while True:
combinations_list = self.get_column_combination(range(df.shape[1]))
new_df = pd.DataFrame()
column_names = df.columns.to_list()
for en in combinations_list:
column_subset_names = list(np.array(column_names)[list(en)])
new_df[str("-----".join(column_subset_names))] = df[column_subset_names].apply(
lambda row: '-----'.join(map(str, row)), axis=1)
yield new_df
class SerializeTable(TableOperation):
def modify(self, df, random_state=None):
df = df.copy(deep=True)
new_df = pd.DataFrame()
new_df[""] = df.apply(lambda row: ','.join(
[f'{col}:{value}' for col, value in row.items()]), axis=1)
yield new_df
class CompositeTableOperation(object):
def __init__(self, table_ops: List[TableOperation]):
self.table_ops = table_ops
def modify(self, df, random_state=None):
random_state = get_random_state(random_state)
def recursive_app(val, table_ops, random_state):
if len(table_ops) == 0:
yield val
else:
table_op = table_ops[0]
for new_val in table_op.modify(val, random_state=random_state):
for recursed_val in recursive_app(new_val, table_ops[1:],
random_state):
yield recursed_val
for df_variant in recursive_app(df, self.table_ops, random_state):
yield df_variant
class TableFormats(ABC):
@abstractmethod
def formatting(
self, df: pd.DataFrame) -> pd.DataFrame:
raise NotImplementedError()
class MarkdownFormat(TableFormats):
def formatting(self, df):
return tabulate(df, headers='keys', tablefmt='pipe', showindex=True)
class DataMatrixFormat(TableFormats):
def formatting(self, df):
df_ = pd.DataFrame()
df_[""] = df.index.to_list()
df_[df.columns] = df.values.tolist()
matrix = df_.columns.tolist() # Get column headers as a list
matrix_data = df_.values.tolist() # Get data as a list of lists
matrix_data.insert(0, matrix)
return matrix_data
class JsonFormat(TableFormats):
def formatting(self, df):
return df.to_json(orient='index', index=True)
class DFloaderFormat(TableFormats):
def formatting(self, df):
str_df_loader = "pd.DataFrame({[data]}, index=[indices])"
data = ""
indi = str(df.index.to_list())
for d in df.columns:
data += str(d) + " : " + str(df[d].to_list()) + ", "
data = data[:-2]
str_df_loader = str_df_loader.replace(
"[data]", data).replace("[indices]", indi)
return str_df_loader
class HTMLFormat(TableFormats):
def formatting(self, df):
html = df.to_html(index=True)
return html
class HTMLNoSpaceFormat(TableFormats):
def formatting(self, df):
html = df.to_html(index=True)
return str(html).replace("\t", "").replace("\n", "").replace(" ", "")
class TabSeparatedFormat(TableFormats):
def formatting(self, df: pd.DataFrame) -> str:
return df.to_csv(sep='\t', index=True)
class CommaSeparatedFormat(TableFormats):
def formatting(self, df: pd.DataFrame) -> str:
return df.to_csv(index=True)
class SQLQueryFormat(TableFormats):
def formatting(self, df):
return
def gather_Examples_Prompt(tab_format, test_case):
TF, TC = tab_format.__class__.__name__, test_case.__class__.__name__
Examples = EXAMPLES.replace("[Data_format_example1]", EXAMPLE_Dictionary[f"TF_EX1_{TF}"])\
.replace("[QA1]", EXAMPLE_Dictionary[f"Ex1_QA1_{TC}"])\
.replace("[Data_format_example2]", EXAMPLE_Dictionary[f"TF_EX1_{TF}"])\
.replace("[QA2]", EXAMPLE_Dictionary[f"Ex1_QA2_{TC}"])
return Examples
class LLMTableLearner():
def get_prompt(self, examples, df_format, question: str, temperature: float) -> Any:
prompt = DATA_QUES_INSTRUCTION.replace("[Ques]", str(question)).replace(
"[Data_format]", str(df_format)).replace("[Example]", examples)
total_tokens = num_tokens_from_string(prompt)
maxTok = 100
num_n = 15
modelName = "text-davinci-003"
prompt_cache = {"model": modelName,
"prompt": prompt,
"temperature": temperature,
"max_tokens": maxTok,
"top_p": 1,
"frequency_penalty": 0,
"presence_penalty": 0,
"n": num_n,
"logprobs": 1}
return prompt_cache
def get_answer(self, examples, df_format, question: str, temperature: float, open_api_key: str) -> Any:
prompt = DATA_QUES_INSTRUCTION.replace("[Ques]", str(question)).replace(
"[Data_format]", str(df_format)).replace("[Example]", examples)
answer, cache = openapi_call_completions(
prompt, modelName="text-davinci-003", temp=temperature, maxTok=100, num_n=15, open_api_key=open_api_key)
return answer, cache
class TableExperimentSuite(object):
def __init__(self, llm_learner: LLMTableLearner(),
table_formats: List[TableFormats],
table_ops: List[TableOperation],
test_gens: List[TestCaseGenerator],
cache_save_path: str,
open_api_key: str):
self.llm = llm_learner
self.table_formats = table_formats
self.table_ops = table_ops
self.test_gens = test_gens
self.cache_save_path = cache_save_path
self.open_api_key = open_api_key
def run_experiment(self,
df,
per_table_op: int = 10,
per_test_gen: int = 10,
save_cache=True):
results = []
cacahe_all_dict = []
if save_cache:
current_datetime = datetime.datetime.now()
formatted_datetime = current_datetime.strftime("%Y_%m_%d_%H_%M_%S")
cache_path = os.path.join(self.cache_save_path, f"cache_logger")
if not os.path.exists(cache_path):
os.makedirs(cache_path)
cache_path2 = os.path.join(
self.cache_save_path, f"cache_logger_all")
if not os.path.exists(cache_path2):
os.makedirs(cache_path2)
save_catch_file = os.path.join(
cache_path, f"micro_test_log_{formatted_datetime}.json")
save_catch_file2 = os.path.join(
cache_path2, f"micro_test_log_{formatted_datetime}.json")
with open(save_catch_file, mode="w") as writer:
pass
for table_op in self.table_ops:
for df_variant in itertools.islice(table_op.modify(df),
per_table_op):
df_variant_for_test = df_variant
if table_op.__class__.__name__ in ["ColumnCluster", "SerializeTable"]:
df_variant_for_test = df
for gen in self.test_gens:
if table_op.__class__.__name__ in ["OriginalData", "SequentialColumnNames", "TransposeTable", "SerializeTable"]:
no_of_test = per_test_gen*per_table_op
else:
no_of_test = per_test_gen
for test in itertools.islice(gen.generate(df_variant_for_test),
no_of_test):
for tab_format in self.table_formats:
temperature_list = [0]
for temp in temperature_list:
try:
examples = gather_Examples_Prompt(
tab_format, gen)
prompt_cache_per_call = self.llm.get_prompt(
examples, tab_format.formatting(df_variant), test.question, temp)
answer, cache_per_call = self.llm.get_answer(examples, tab_format.formatting(
df_variant), test.question, temp, self.open_api_key)
result = gen.check(test, answer[0])
pass_1 = gen.metric_pass_k(test, answer, 1)
pass_3 = gen.metric_pass_k(test, answer, 3)
pass_5 = gen.metric_pass_k(test, answer, 5)
pass_10 = gen.metric_pass_k(
test, answer, 10)
pass_15 = gen.metric_pass_k(
test, answer, 15)
error = None
except Exception as Err:
answer, cache_per_call = None, None
result = None
pass_1 = pass_3 = pass_5 = pass_10 = pass_15 = None
error = str(Err)
print(f"error {Err} encountered")
error = str(Err)
print(f"error {Err} encountered")
cache_all = {"model-Temperature": str(temp),
"tab_format": str(tab_format.__class__.__name__),
"table_op": str(table_op.__class__.__name__),
"gen": str(gen.__class__.__name__),
"test-Question": test.question,
"test-expected-answer": test.expect,
"test-TestType": str(test.TestType),
"test": str(test),
"answer": answer,
"result_match_top1": result,
"error": str(error),
"pass_1": str(pass_1),
"pass_3": str(pass_3),
"pass_5": str(pass_5),
"pass_10": str(pass_10),
"pass_15": str(pass_15),
"prompt_cache_per_call": prompt_cache_per_call,
"LLMOutput": cache_per_call}
results.append((temp, tab_format.__class__.__name__, table_op.__class__.__name__, gen.__class__.__name__, test.question,
test.expect, test.TestType, pass_1, pass_3, pass_5, pass_10, pass_15, test, answer, answer, result, error))
cacahe_all_dict.append(cache_all)
if save_cache:
with open(save_catch_file, mode="a") as file:
serializable_entry = json.dumps(
cache_all, cls=CustomJSONEncoder)
file.write(serializable_entry + '\n')
if save_cache:
with open(save_catch_file2, mode="w") as file:
for result_entry in cacahe_all_dict:
serializable_entry = json.dumps(
result_entry, cls=CustomJSONEncoder)
file.write(serializable_entry + '\n')
return results

Просмотреть файл

@ -1,74 +0,0 @@
import tiktoken
from io import StringIO
import ast
import pandas as pd
def Convert_back_to_df(Format: str, data_string: str):
if Format == "JsonFormat":
return pd.DataFrame(ast.literal_eval(data_string)).T
if Format == "MarkdownFormat":
extracted_df = pd.read_csv(
StringIO(data_string.replace(' ', '')), # Get rid of whitespaces
sep='|',
index_col=1
).dropna(
axis=1,
how='all'
).iloc[1:]
extracted_df_ = extracted_df.map(
lambda x: x.strip() if isinstance(x, str) else x)
extracted_df_.columns = [col.strip() for col in extracted_df.columns]
return extracted_df_
if Format == "DFloaderFormat":
start_inx = data_string.find("pd.DataFrame(")+len("pd.DataFrame(")
stop_idx = data_string.find(", index=[")
string_dict = data_string[start_inx:stop_idx]
string_dict
dict_data_part = ast.literal_eval(data_string[start_inx:stop_idx])
list_index = ast.literal_eval(data_string[stop_idx+len(", index="):-1])
return pd.DataFrame(dict_data_part, index=list_index)
if Format == "DataMatrixFormat":
Matrix = ast.literal_eval(data_string.replace("\n", ""))
matrix_df = pd.DataFrame(Matrix)
matrix_df.index = matrix_df[0].to_list()
matrix_df = matrix_df.drop(columns=[0])
matrix_df.columns = matrix_df.iloc[0, :].tolist()
matrix_df = matrix_df.iloc[1:, :]
return matrix_df
if Format == "CommaSeparatedFormat":
dff = pd.read_csv(StringIO(data_string), sep=",")
index_col = dff.columns[0]
dff.index = dff[index_col].to_list()
dff = dff.drop(columns=[index_col])
return dff
if Format == "TabSeparatedFormat":
dff = pd.read_csv(StringIO(data_string), sep="\t")
index_col = dff.columns[0]
dff.index = dff[index_col].to_list()
dff = dff.drop(columns=[index_col])
return dff
if Format in ["HTMLNoSpaceFormat", "HTMLFormat"]:
dff = pd.read_html(StringIO(data_string))[0]
index_col = dff.columns[0]
dff.index = dff[index_col].to_list()
dff = dff.drop(columns=[index_col])
return dff
def num_tokens_from_string(string: str, encoding_name: str = "p50k_base") -> int:
"""Returns the number of tokens in a text string."""
encoding = tiktoken.get_encoding(encoding_name)
num_tokens = len(encoding.encode(string))
return num_tokens
num_tokens_from_string("tiktoken is great!", "cl100k_base")
def stringify_serialized_df(serialized_df: pd.DataFrame):
return "\n".join([v[0] for v in serialized_df.values])

Просмотреть файл

@ -1,173 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%cd ../code_"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"gather": {
"logged": 1694699855951
}
},
"outputs": [],
"source": [
"from tableTestingMicroScript import *\n",
"import pandas as pd\n",
"import os\n",
"import ast\n",
"from utils import num_tokens_from_string"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from prompts import EXAMPLES, EXAMPLE_Dictionary\n",
"import pandas as pd\n",
"from utils import num_tokens_from_string\n",
"\n",
"\n",
"def gather_Examples_Prompt(tab_format, test_case):\n",
"\n",
" TF, TC = tab_format.__class__.__name__, test_case\n",
" Examples = EXAMPLES.replace(\"[Data_format_example1]\", EXAMPLE_Dictionary[f\"TF_EX1_{TF}\"])\\\n",
" .replace(\"[QA1]\", EXAMPLE_Dictionary[f\"Ex1_QA1_{TC}\"])\\\n",
" .replace(\"[Data_format_example2]\", EXAMPLE_Dictionary[f\"TF_EX1_{TF}\"])\\\n",
" .replace(\"[QA2]\", EXAMPLE_Dictionary[f\"Ex1_QA2_{TC}\"])\n",
" return Examples\n",
"\n",
"\n",
"def get_max_no_of_rows_feedable(df, FormatFunction, Micro=True):\n",
"\n",
" per_test = []\n",
" test_case = [\"NavigationTests\", \"ColumnLookupTests\",\n",
" \"RowLookupTests\", \"DataTypeLookupTests\"]\n",
"\n",
" for tt in test_case:\n",
" exp = gather_Examples_Prompt(FormatFunction, tt)\n",
"\n",
" for i in range(min(df.shape[0], 100), 2, -1):\n",
" if Micro:\n",
" total_token = 30 + num_tokens_from_string(exp)+11+num_tokens_from_string(\n",
" str(FormatFunction.formatting(df.head(i))))+100\n",
" else:\n",
" total_token = 100 + num_tokens_from_string(exp)+11+num_tokens_from_string(\n",
" str(FormatFunction.formatting(df.head(i))))*2.5\n",
"\n",
" if total_token < 4081:\n",
"\n",
" per_test.append(i)\n",
" break\n",
" return (min(per_test))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"benchmark_dataset"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"gather": {
"logged": 1694736239538
}
},
"outputs": [],
"source": [
"\n",
"benchmark_data_path = \"../resources/BenchmarkDataset\"\n",
"benchmark_dataset = [f for f in os.listdir(\n",
" benchmark_data_path) if f.endswith(\".csv\")]\n",
"benchmark_save_path = \"../resources/BenchmarkResults/BenchmarkResults_Micro_tests/\"\n",
"if not os.path.exists(benchmark_save_path):\n",
" os.makedirs(benchmark_save_path)\n",
"open_api_key = os.env(\"OPENAI_API_KEY\")\n",
"for bd in benchmark_dataset:\n",
" print(bd)\n",
" df = pd.read_csv(os.path.join(benchmark_data_path, bd))\n",
" save_path = os.path.join(benchmark_save_path, bd)\n",
" if not os.path.exists(save_path):\n",
" os.makedirs(save_path)\n",
"\n",
" table_formats = [HTMLFormat(), HTMLNoSpaceFormat(), MarkdownFormat(), JsonFormat(\n",
" ), DataMatrixFormat(), DFloaderFormat(), TabSeparatedFormat(), CommaSeparatedFormat()],\n",
" max_rows = min([get_max_no_of_rows_feedable(df, format)\n",
" for format in table_formats])\n",
"\n",
" df = df.head(max_rows)\n",
" print(df.shape)\n",
" suite = TableExperimentSuite(LLMTableLearner(),\n",
" [MarkdownFormat(), JsonFormat(), DataMatrixFormat(\n",
" ), DFloaderFormat(), TabSeparatedFormat(), CommaSeparatedFormat()],\n",
" [\n",
" OriginalData(), SampleRows(), ColumnCluster(), SerializeTable()\n",
" ],\n",
" [TableColumnReorderTests(), TableTransposeTests(\n",
" ), TableReconstructionTests(), TableReconstructionTests1()],\n",
" save_path, open_api_key)\n",
" output = suite.run_experiment(df, per_table_op=50, per_test_gen=1)\n",
"\n",
" col_names = [\"temperature\", \"tableFormat\", \"TableManipulation\", \"TestCase\", \"TestQuestion\", \"TestExpectedAnswer\", \"TestCaseType\", \"pass_1\", \"pass_3\",\n",
" \"pass_5\", \"pass_10\", \"pass_15\", \"TestCaseTuple\", \"Answers_top_15\", \"Answer_top_1\", \"per_cell_accracies_top1\", \"per_cell_accracies\", \"Result\", \"Error\"]\n",
" output_df = pd.DataFrame(output, columns=col_names)\n",
"\n",
" output_df.to_csv(os.path.join(\n",
" save_path, f\"Macro_{bd}_output.csv\"), index=None)\n",
" try:\n",
" output_df.to_pickle(os.path.join(\n",
" save_path, f\"{bd}_output.pkl\"), index=None)\n",
" except:\n",
" pass"
]
}
],
"metadata": {
"kernel_info": {
"name": "python38-azureml"
},
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"microsoft": {
"ms_spell_check": {
"ms_spell_check_language": "en"
}
},
"nteract": {
"version": "nteract-front-end@1.0.0"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,228 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%cd ../code_\n",
"from collections import Counter\n",
"from tableTestingNewMacro import *\n",
"from utils import Convert_back_to_df\n",
"from compare import compare_per_cell\n",
"import pandas as pd\n",
"import os\n",
"import ast\n",
"import json\n",
"from typing import Any, List\n",
"from itertools import combinations\n",
"import tqdm\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def check(expected_df: pd.DataFrame, answer: pd.DataFrame) -> bool:\n",
" expected_df = expected_df.astype(str)\n",
" try:\n",
" if expected_df.shape == answer.shape:\n",
" return expected_df.equals(answer)\n",
" else:\n",
" return False\n",
" except:\n",
" return False\n",
"\n",
"\n",
"def metric_pass_k(expected_df: pd.DataFrame, answer: List[pd.DataFrame], k: int) -> float:\n",
" expected_df = expected_df.astype(str)\n",
" boolean_answers = [check(expected_df, answer[i])\n",
" for i in range(len(answer))]\n",
" combinations_k = list(combinations(boolean_answers, k))\n",
" passed_at_k = 0\n",
" # Calculate the pass@k metric\n",
" for comb in combinations_k:\n",
" if any(comb):\n",
" passed_at_k += 1\n",
" pass_at_k_percentage = (passed_at_k / len(combinations_k))*100\n",
"\n",
" return pass_at_k_percentage"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def serialize_it(expected_df, format_name):\n",
" if format_name == \"JsonFormat\":\n",
" return str(JsonFormat().formatting(next(SerializeTable().modify(expected_df))))\n",
" if format_name == \"DFloaderFormat\":\n",
" return str(DFloaderFormat().formatting(next(SerializeTable().modify(expected_df))))\n",
" if format_name == \"DataMatrixFormat\":\n",
" return str(DataMatrixFormat().formatting(next(SerializeTable().modify(expected_df))))\n",
" if format_name == \"MarkdownFormat\":\n",
" return str(MarkdownFormat().formatting(next(SerializeTable().modify(expected_df))))\n",
" if format_name == \"CommaSeparatedFormat\":\n",
" return str(CommaSeparatedFormat().formatting(next(SerializeTable().modify(expected_df))))\n",
" if format_name == \"TabSeparatedFormat\":\n",
" return str(TabSeparatedFormat().formatting(next(SerializeTable().modify(expected_df))))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"benchmark_data_path = \"../resources/BenchmarkDataset\"\n",
"benchmark_dataset = os.listdir(benchmark_data_path)\n",
"# 3----> macro test -noises 8 formats 3 test\n",
"benchmark_save_path = \"../resources/BenchmarkResults_29_9_2023_Macro_Tests_all_noise_redo_redo/\"\n",
"benchmark_dataset = [d for d in benchmark_dataset if d.endswith(\".csv\")]\n",
"sp = spa = benchmark_save_path\n",
"\n",
"for bd in benchmark_dataset:\n",
" try:\n",
" pass_metrics = []\n",
" t_F_bools = []\n",
" scores_all_precision = []\n",
" scores_all_recall = []\n",
" print(bd)\n",
" df = pd.read_csv(os.path.join(benchmark_save_path,\n",
" bd, \"Macro_\"+bd+\"_output.csv\"))\n",
" print(df.shape)\n",
" dirs = os.listdir(os.path.join(\n",
" benchmark_save_path, bd, \"cache_logger_new\"))\n",
" print(dirs)\n",
" error_lines = 0\n",
" error_converts = 0\n",
" found = 0\n",
" table_ops = []\n",
" error_converts_format = []\n",
" ops_candidate_error = []\n",
" test_candidate_error = []\n",
" error_logs = []\n",
" print(\"use:\", dirs[-1])\n",
" with open(os.path.join(benchmark_save_path, bd, \"cache_logger_new\", dirs[-1]), \"r\") as file:\n",
" for line in tqdm.tqdm(file):\n",
" try:\n",
" data_ = json.loads((line))\n",
" expected_ans = data_[\"test-expected-answer\"]\n",
" expected_df = Convert_back_to_df(\n",
" \"JsonFormat\", str(expected_ans))\n",
" gen = data_[\"gen\"]\n",
" if gen == \"TableReconstructionTests\" and data_[\"table_op\"] == \"SampleRows\":\n",
" if serialize_it(expected_df, data_[\"tab_format\"]) in data_[\"prompt_cache_per_call\"][\"prompt\"]:\n",
" table_op = \"SerializeTable\"\n",
" found += 1\n",
" elif gen == \"TableReconstructionTests1\":\n",
" table_op = \"SerializeTable\"\n",
" else:\n",
" table_op = data_[\"table_op\"]\n",
" table_ops.append(table_op)\n",
" answer = data_[\"answer\"]\n",
" tab_format = data_[\"tab_format\"]\n",
" answer_changed_format = []\n",
" for a in answer:\n",
" try:\n",
" conv = Convert_back_to_df(tab_format, a)\n",
" except Exception as err:\n",
" conv = pd.DataFrame()\n",
" error_converts += 1\n",
" if a == answer[0]:\n",
" error_converts_format.append(\n",
" data_[\"tab_format\"])\n",
"\n",
" answer_changed_format.append(conv)\n",
"\n",
" a = answer_changed_format[0]\n",
" try:\n",
" scores_all_precision.append(\n",
" compare_per_cell(a, expected_df)*100)\n",
" except:\n",
" scores_all_precision.append(0)\n",
" try:\n",
" # , type_reference_df=a, count_header_and_index=True, return_fraction=False))\n",
" scores_all_recall.append(\n",
" compare_per_cell(expected_df, a)*100)\n",
" except:\n",
" scores_all_recall.append(0)\n",
"\n",
" # break\n",
" try:\n",
" pass_metrics.append([metric_pass_k(\n",
" expected_ans, answer_changed_format, k) for k in [1, 3, 5, 10, 15]])\n",
" except Exception as Err:\n",
" pass_metrics.append([None for k in [1, 3, 5, 10, 15]])\n",
" try:\n",
" t_F_bools.append(\n",
" check(expected_ans, answer_changed_format[0]))\n",
"\n",
" except Exception as Err:\n",
" t_F_bools.append(False)\n",
" except Exception as err:\n",
"\n",
" ops_candidate_error.append(data_[\"table_op\"])\n",
" test_candidate_error.append(data_[\"gen\"])\n",
" error_logs.append(data_[\"error\"])\n",
" error_lines += 1\n",
" scores_all_precision.append(0)\n",
" scores_all_recall.append(0)\n",
" t_F_bools.append(False)\n",
" pass_metrics.append([None for k in [1, 3, 5, 10, 15]])\n",
"\n",
" print(len(pass_metrics))\n",
" print(Counter(ops_candidate_error).items())\n",
" print(Counter(test_candidate_error).items())\n",
" print(Counter(error_logs).items())\n",
" print(\"NO of error lines:\", error_lines)\n",
" print(\"error_converts:\", error_converts/15, error_converts)\n",
" print(\"found serialized:\", found)\n",
" print(Counter(table_ops).items())\n",
" print(\"failed format counts:\", Counter(error_converts_format).items())\n",
" df[\"table_ops\"] = table_ops\n",
" df[['pass_1', 'pass_3', 'pass_5', 'pass_10', 'pass_15']] = pass_metrics\n",
" df[\"Result\"] = df[\"Result\"].apply(lambda x: 1 if x else 0)\n",
" df[\"T/F\"] = t_F_bools\n",
" df[\"precision_per_cell_correctness_top1\"] = scores_all_precision\n",
" df[\"recall_per_cell_correctness_top1\"] = scores_all_recall\n",
" df.to_csv(os.path.join(benchmark_save_path, bd,\n",
" \"Macro_\"+bd+\"_output_revamped.csv\"), index=None)\n",
"\n",
" print(f\"{'-'*90}\\n\")\n",
" except Exception as Err:\n",
" print(\"error encountered\")\n",
" print(Err)\n",
" pass\n",
" print(f\"{'-'*90}\\n\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,231 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"import os\n",
"import numpy as np\n",
"import statsmodels.api as sm\n",
"import statsmodels.stats.contingency_tables as ct"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"benchmark_data_path = \"../resources/BenchmarkDatasetClean/\"\n",
"benchmark_dataset = os.listdir(benchmark_data_path)\n",
"benchmark_save_path = \"../resources/BenchmarkResults_29_9_2023_Macro_Tests_all_noise_redo_redo\"\n",
"benchmark_save_path2 = \"../resources/BenchmarkResults_23_9_2023_Macro_Tests_HTML_original\"\n",
"benchmark_save_path1 = \"../resources/BenchmarkResults_23_9_2023_Macro_Tests_new_format_added_test_redo/\"\n",
"\n",
"benchmark_dataset = [data for data in benchmark_dataset if data.endswith(\n",
" \".csv\") and \"breast\" not in data]\n",
"dfs = []\n",
"for bd in benchmark_dataset:\n",
"\n",
" pass_metrics = []\n",
" print(bd)\n",
" df1 = pd.read_csv(os.path.join(benchmark_save_path1, bd,\n",
" \"Macro_\"+bd+\"_output_revamped.csv\"))\n",
" df2 = pd.read_csv(os.path.join(benchmark_save_path2, bd,\n",
" \"Macro_\"+bd+\"_output_revamped.csv\"))\n",
" df3 = pd.read_csv(os.path.join(benchmark_save_path, bd,\n",
" \"Macro_\"+bd+\"_output_revamped.csv\"))\n",
" df2[\"precision_per_cell_correctness_top1\"] = df2[\"precision_per_cell_correctness_top1\"]*0.01\n",
" df2[\"recall_per_cell_correctness_top1\"] = df2[\"recall_per_cell_correctness_top1\"]*0.01\n",
" df3[\"precision_per_cell_correctness_top1\"] = df3[\"precision_per_cell_correctness_top1\"]*0.01\n",
" df3[\"recall_per_cell_correctness_top1\"] = df3[\"recall_per_cell_correctness_top1\"]*0.01\n",
" df2[\"TableManipulation\"] = \"OriginalData\"\n",
" df1[\"TableManipulation\"] = \"OriginalData\"\n",
" df = pd.concat([df1, df2, df3], ignore_index=True)\n",
" print(df1.shape, df2.shape, df.shape)\n",
" df[\"Dataset-name\"] = bd\n",
" df[\"F1Score_per_cell_correctness_top1\"] = df.apply(lambda x: np.round(2*(x.recall_per_cell_correctness_top1*x.precision_per_cell_correctness_top1)/(\n",
" x.recall_per_cell_correctness_top1+x.precision_per_cell_correctness_top1)) if (x.recall_per_cell_correctness_top1*x.precision_per_cell_correctness_top1) != 0 else 0, axis=1)\n",
" dfs.append(df)\n",
"all_data = pd.concat(dfs, ignore_index=True)\n",
"\n",
"attributes = {c: list(df[c].unique()) for c in df.columns if c in [\n",
" \"temperature\", \"tableFormat\", \"table_ops\", \"TestCase\"]}"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### RQ1"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_cases = [\"TableColumnReorderTests\",\n",
" \"TableReconstructionTests1\", \"TableTransposeTests\"]\n",
"formats = all_data[\"tableFormat\"].unique()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data_test_vs_format_on_all_noise_ops = all_data[(\n",
" all_data[\"temperature\"] == 0.0)] \n",
"print(all_data_test_vs_format_on_all_noise_ops.shape)\n",
"test_vs_format_on_all_noise_ops = all_data_test_vs_format_on_all_noise_ops.pivot_table(\n",
" index=[\"tableFormat\"],\n",
" columns='TestCase',\n",
" values=[\"F1Score_per_cell_correctness_top1\", 'recall_per_cell_correctness_top1',\n",
" \"precision_per_cell_correctness_top1\"], \n",
" aggfunc=[\"mean\"] # Custom aggregation function to append values to a list\n",
")\n",
"test_vs_format_on_all_noise_ops = test_vs_format_on_all_noise_ops*100\n",
"test_vs_format_on_all_noise_ops = np.round(\n",
" test_vs_format_on_all_noise_ops, decimals=2)\n",
"cols = [(\"mean\", f\"{metric}_per_cell_correctness_top1\", T)\n",
" for T in test_cases for metric in [\"precision\", \"recall\", \"F1Score\"]]\n",
"test_vs_format_on_all_noise_ops = test_vs_format_on_all_noise_ops[cols]\n",
"indi = ['CommaSeparatedFormat', 'DFloaderFormat', 'DataMatrixFormat',\n",
" 'JsonFormat', 'MarkdownFormat',\n",
" 'TabSeparatedFormat', 'HTMLFormat', 'HTMLNoSpaceFormat']\n",
"test_vs_format_on_all_noise_ops = test_vs_format_on_all_noise_ops.reindex(indi)\n",
"test_vs_format_on_all_noise_ops"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cols_f1 = [(\"mean\", f\"{metric}_per_cell_correctness_top1\", T)\n",
" for T in test_cases for metric in [\"F1Score\"]]\n",
"test_vs_format_on_all_noise_ops_f1 = test_vs_format_on_all_noise_ops[cols_f1]\n",
"test_vs_format_on_all_noise_ops_f1"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### P-values RQ1 Macro"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data_test_vs_format_on_all_noise_ops = all_data[(\n",
" all_data[\"temperature\"] == 0.0)] \n",
"print(all_data_test_vs_format_on_all_noise_ops.shape)\n",
"test_vs_format_on_all_noise_ops = all_data_test_vs_format_on_all_noise_ops.pivot_table(\n",
" index=[\"TestCase\"],\n",
" columns='tableFormat',\n",
" values=[\"F1Score_per_cell_correctness_top1\"],\n",
" # Custom aggregation function to append values to a list\n",
" aggfunc=lambda x: list(x)\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import scipy\n",
"\n",
"p_val = {}\n",
"for id in range(test_vs_format_on_all_noise_ops.shape[0]):\n",
" print(id)\n",
" if test_vs_format_on_all_noise_ops.index[id] != \"TableReconstructionTests\":\n",
" print(\n",
" f\"For Test: {test_vs_format_on_all_noise_ops.index[id]} pass@1 temp =0.1\")\n",
" sorted_list = [i for i in test_vs_format_on_all_noise_ops_f1.sort_values(\n",
" by=[(\"mean\", \"F1Score_per_cell_correctness_top1\", test_vs_format_on_all_noise_ops.index[id])], ascending=False).index]\n",
" print(sorted_list)\n",
" sub_vals = {}\n",
" index_val = test_vs_format_on_all_noise_ops.index[id]\n",
" for x in range(1, len(sorted_list)):\n",
" vals1 = test_vs_format_on_all_noise_ops.loc[index_val, (\n",
" \"F1Score_per_cell_correctness_top1\", sorted_list[0])]\n",
" vals2 = test_vs_format_on_all_noise_ops.loc[index_val, (\n",
" \"F1Score_per_cell_correctness_top1\", sorted_list[x])]\n",
" min_val = min(len(vals1), len(vals2))\n",
" print(min_val)\n",
" p_value = scipy.stats.ttest_rel(vals1[:min_val], vals2[:min_val])\n",
" sub_vals[f\"{sorted_list[0]}-{sorted_list[x]}\"] = {\"p-value\": p_value.pvalue,\n",
" \"statistics\": p_value.statistic, \"df\": p_value.df, \"tests_count\": min_val}\n",
" p_val[test_vs_format_on_all_noise_ops.index[id]] = sub_vals"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"import datetime\n",
"\n",
"\n",
"class CustomJSONEncoder(json.JSONEncoder):\n",
" def default(self, obj):\n",
" if isinstance(obj, np.integer):\n",
" return int(obj)\n",
" elif isinstance(obj, np.floating):\n",
" return float(obj)\n",
" elif isinstance(obj, np.ndarray):\n",
" return obj.tolist()\n",
" if isinstance(obj, pd.Series):\n",
" return obj.tolist()\n",
" elif isinstance(obj, datetime):\n",
" # Handle datetime objects\n",
" return obj.isoformat()\n",
" elif isinstance(obj, set):\n",
" # Handle sets\n",
" return list(obj)\n",
" return super().default(obj)\n",
"\n",
"\n",
"with open(\"../resources/all_p_vals/p_vals_macro_tests_RQ1.json\", \"w\") as f:\n",
" json.dump(p_val, f, indent=3, cls=CustomJSONEncoder)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,297 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"import os\n",
"import numpy as np\n",
"import statsmodels.api as sm\n",
"import statsmodels.stats.contingency_tables as ct"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"benchmark_data_path = \"../resources/BenchmarkDatasetClean/\"\n",
"benchmark_dataset = os.listdir(benchmark_data_path)\n",
"benchmark_save_path = \"../resources/BenchmarkResults_29_9_2023_Macro_Tests_all_noise_redo_redo\"\n",
"benchmark_save_path2 = \"../resources/BenchmarkResults_23_9_2023_Macro_Tests_HTML_original\"\n",
"benchmark_save_path1 = \"../resources/BenchmarkResults_23_9_2023_Macro_Tests_new_format_added_test_redo/\"\n",
"\n",
"benchmark_dataset = [data for data in benchmark_dataset if data.endswith(\n",
" \".csv\") and \"breast\" not in data]\n",
"dfs = []\n",
"for bd in benchmark_dataset:\n",
"\n",
" pass_metrics = []\n",
" print(bd)\n",
" df1 = pd.read_csv(os.path.join(benchmark_save_path1, bd,\n",
" \"Macro_\"+bd+\"_output_revamped.csv\"))\n",
" df2 = pd.read_csv(os.path.join(benchmark_save_path2, bd,\n",
" \"Macro_\"+bd+\"_output_revamped.csv\"))\n",
" df3 = pd.read_csv(os.path.join(benchmark_save_path, bd,\n",
" \"Macro_\"+bd+\"_output_revamped.csv\"))\n",
" df2[\"TableManipulation\"] = \"OriginalData\"\n",
" df1[\"TableManipulation\"] = \"OriginalData\"\n",
" df2[\"precision_per_cell_correctness_top1\"] = df2[\"precision_per_cell_correctness_top1\"]*0.01\n",
" df2[\"recall_per_cell_correctness_top1\"] = df2[\"recall_per_cell_correctness_top1\"]*0.01\n",
" df3[\"precision_per_cell_correctness_top1\"] = df3[\"precision_per_cell_correctness_top1\"]*0.01\n",
" df3[\"recall_per_cell_correctness_top1\"] = df3[\"recall_per_cell_correctness_top1\"]*0.01\n",
" df = pd.concat([df1, df2, df3], ignore_index=True)\n",
" df[\"Dataset-name\"] = bd\n",
" df[\"F1Score_per_cell_correctness_top1\"] = df.apply(lambda x: np.round(2*(x.recall_per_cell_correctness_top1*x.precision_per_cell_correctness_top1)/(\n",
" x.recall_per_cell_correctness_top1+x.precision_per_cell_correctness_top1)) if (x.recall_per_cell_correctness_top1*x.precision_per_cell_correctness_top1) != 0 else 0, axis=1)\n",
" dfs.append(df)\n",
"all_data = pd.concat(dfs, ignore_index=True)\n",
"\n",
"attributes = {c: list(df[c].unique()) for c in df.columns if c in [\n",
" \"temperature\", \"tableFormat\", \"table_ops\", \"TestCase\"]}"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### RQ3: "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Average percell, @pass 1 formats- noise- vs test"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_cases = ['TableColumnReorderTests',\n",
" 'TableReconstructionTests1',\n",
" 'TableTransposeTests']"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data_test_vs_format_on_all_noise_ops = all_data[(\n",
" all_data[\"temperature\"] == 0.0)] \n",
"print(all_data_test_vs_format_on_all_noise_ops.shape)\n",
"test_vs_format_on_all_noise_ops = all_data_test_vs_format_on_all_noise_ops.pivot_table(\n",
" index=[\"tableFormat\", \"TableManipulation\"],\n",
" columns='TestCase',\n",
" values=[\"F1Score_per_cell_correctness_top1\", 'recall_per_cell_correctness_top1',\n",
" \"precision_per_cell_correctness_top1\"], \n",
" aggfunc=[\"mean\"] # Custom aggregation function to append values to a list\n",
")\n",
"test_vs_format_on_all_noise_ops = test_vs_format_on_all_noise_ops*100\n",
"test_vs_format_on_all_noise_ops = np.round(\n",
" test_vs_format_on_all_noise_ops, decimals=2)\n",
"print(test_vs_format_on_all_noise_ops.columns)\n",
"noises = [\n",
" 'OriginalData',\n",
" 'ShuffleRows',\n",
" 'ShuffleColumns',\n",
" 'ShuffleColumnNames',\n",
" 'SequentialColumnNames',\n",
" 'ArbitraryColumnNames',\n",
" 'TransposeTable',\n",
" 'ColumnCluster',\n",
" 'SerializeTable']\n",
"table_formats = all_data[\"tableFormat\"].value_counts().index\n",
"indi = [(formatType, noise)\n",
" for formatType in table_formats for noise in noises]\n",
"cols = [(\"mean\", f\"{metric}_per_cell_correctness_top1\", T)\n",
" for T in test_cases for metric in [\"precision\", \"recall\", \"F1Score\"]]\n",
"test_vs_format_on_all_noise_ops = test_vs_format_on_all_noise_ops[cols]\n",
"test_vs_format_on_all_noise_ops = test_vs_format_on_all_noise_ops.reindex(indi)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops.tail(30)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### diff with p_val "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops_list = all_data_test_vs_format_on_all_noise_ops.pivot_table(\n",
" index=[\"tableFormat\", \"TableManipulation\"],\n",
" columns='TestCase',\n",
" values=[\"F1Score_per_cell_correctness_top1\", 'recall_per_cell_correctness_top1',\n",
" \"precision_per_cell_correctness_top1\"], \n",
" # Custom aggregation function to append values to a list\n",
" aggfunc=lambda x: list(x)\n",
")\n",
"\n",
"cols_agg = [(f\"{metric}_per_cell_correctness_top1\", T)\n",
" for T in test_cases for metric in [\"precision\", \"recall\", \"F1Score\"]]\n",
"\n",
"test_vs_format_on_all_noise_ops_list = test_vs_format_on_all_noise_ops_list[cols_agg]\n",
"test_vs_format_on_all_noise_ops_list = test_vs_format_on_all_noise_ops_list.reindex(\n",
" indi)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops_list"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import scipy\n",
"test_vs_format_on_all_noise_ops_diff_with_pval = test_vs_format_on_all_noise_ops.copy(\n",
" deep=True)\n",
"test_vs_format_on_all_noise_ops_pval = test_vs_format_on_all_noise_ops.copy(\n",
" deep=True)\n",
"for i in test_vs_format_on_all_noise_ops_diff_with_pval.index:\n",
" for j in test_vs_format_on_all_noise_ops_diff_with_pval.columns:\n",
" ix = (i[0], \"OriginalData\")\n",
" jj = (j[1], j[2])\n",
" original_values = test_vs_format_on_all_noise_ops_list.loc[ix, jj]\n",
" noise_induced_values = test_vs_format_on_all_noise_ops_list.loc[i, jj]\n",
"\n",
" if i[1] != \"OriginalData\":\n",
" min_val = min(len(original_values), len(noise_induced_values))\n",
"\n",
" p_value = scipy.stats.ttest_rel(\n",
" original_values[:min_val], noise_induced_values[:min_val])\n",
"\n",
" benoffi_corrected_p_val_threshold = 0.01/8\n",
" subtract_from = test_vs_format_on_all_noise_ops.loc[ix, j]\n",
" difference = test_vs_format_on_all_noise_ops_diff_with_pval.loc[i,\n",
" j]-subtract_from\n",
" if \"-\" in str(difference):\n",
" str_diff = \"{:.2f}\".format(difference)\n",
" else:\n",
" str_diff = \"+\"+\"{:.2f}\".format(difference)\n",
" if p_value.pvalue < benoffi_corrected_p_val_threshold:\n",
"\n",
" str_diff += \"**\"\n",
" test_vs_format_on_all_noise_ops_diff_with_pval.loc[i, j] = str_diff\n",
" test_vs_format_on_all_noise_ops_pval.loc[i, j] = p_value.pvalue"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops_diff_with_pval"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### only diff Macro"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops_diff = test_vs_format_on_all_noise_ops.copy(\n",
" deep=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for i in test_vs_format_on_all_noise_ops_diff.index:\n",
" for j in test_vs_format_on_all_noise_ops_diff.columns:\n",
" ix = (i[0], \"OriginalData\")\n",
" if i[1] != \"OriginalData\":\n",
" subtract_from = test_vs_format_on_all_noise_ops.loc[ix, j]\n",
" difference = subtract_from - \\\n",
" test_vs_format_on_all_noise_ops_diff.loc[i, j]\n",
" if \"-\" in str(difference):\n",
" str_diff = \"{:.2f}\".format(difference)\n",
" else:\n",
" str_diff = \"+\"+\"{:.2f}\".format(difference)\n",
"\n",
" test_vs_format_on_all_noise_ops_diff.loc[i, j] = str_diff"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops_diff"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,169 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%cd ../code_"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"gather": {
"logged": 1694699855951
}
},
"outputs": [],
"source": [
"from tableTestingMicroScript import *\n",
"import pandas as pd\n",
"import os\n",
"import ast\n",
"from utils import num_tokens_from_string"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from prompts import EXAMPLES, EXAMPLE_Dictionary\n",
"import pandas as pd\n",
"from utils import num_tokens_from_string\n",
"\n",
"\n",
"def gather_Examples_Prompt(tab_format, test_case):\n",
"\n",
" TF, TC = tab_format.__class__.__name__, test_case\n",
" Examples = EXAMPLES.replace(\"[Data_format_example1]\", EXAMPLE_Dictionary[f\"TF_EX1_{TF}\"])\\\n",
" .replace(\"[QA1]\", EXAMPLE_Dictionary[f\"Ex1_QA1_{TC}\"])\\\n",
" .replace(\"[Data_format_example2]\", EXAMPLE_Dictionary[f\"TF_EX1_{TF}\"])\\\n",
" .replace(\"[QA2]\", EXAMPLE_Dictionary[f\"Ex1_QA2_{TC}\"])\n",
" return Examples\n",
"\n",
"\n",
"def get_max_no_of_rows_feedable(df, FormatFunction, Micro=True):\n",
"\n",
" per_test = []\n",
" test_case = [\"NavigationTests\", \"ColumnLookupTests\",\n",
" \"RowLookupTests\", \"DataTypeLookupTests\"]\n",
"\n",
" for tt in test_case:\n",
" exp = gather_Examples_Prompt(FormatFunction, tt)\n",
"\n",
" for i in range(min(df.shape[0], 100), 2, -1):\n",
" if Micro:\n",
" total_token = 30 + num_tokens_from_string(exp)+11+num_tokens_from_string(\n",
" str(FormatFunction.formatting(df.head(i))))+100\n",
" else:\n",
" total_token = 100 + num_tokens_from_string(exp)+11+num_tokens_from_string(\n",
" str(FormatFunction.formatting(df.head(i))))*2.5\n",
"\n",
" if total_token < 4081:\n",
"\n",
" per_test.append(i)\n",
" break\n",
" return (min(per_test))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"gather": {
"logged": 1694736239538
}
},
"outputs": [],
"source": [
"\n",
"benchmark_data_path = \"../BenchmarkDatasetClean\"\n",
"benchmark_dataset = [f for f in os.listdir(\n",
" benchmark_data_path) if f.endswith(\".csv\")]\n",
"benchmark_save_path = \"../BenchmarkResults_Micro_tests/\"\n",
"if not os.path.exists(benchmark_save_path):\n",
" os.makedirs(benchmark_save_path)\n",
"open_api_key = os.env(\"OPENAI_API_KEY\")\n",
"for bd in benchmark_dataset:\n",
" print(bd)\n",
" df = pd.read_csv(os.path.join(benchmark_data_path, bd))\n",
" save_path = os.path.join(benchmark_save_path, bd)\n",
" if not os.path.exists(save_path):\n",
" os.makedirs(save_path)\n",
"\n",
" table_formats = [HTMLFormat(), HTMLNoSpaceFormat(), MarkdownFormat(), JsonFormat(\n",
" ), DataMatrixFormat(), DFloaderFormat(), TabSeparatedFormat(), CommaSeparatedFormat()],\n",
" max_rows = min([get_max_no_of_rows_feedable(df, format)\n",
" for format in table_formats])\n",
"\n",
" df = df.head(max_rows)\n",
" print(df.shape)\n",
" suite = TableExperimentSuite(LLMTableLearner(),\n",
" [HTMLFormat(), HTMLNoSpaceFormat(), MarkdownFormat(), JsonFormat(), DataMatrixFormat(\n",
" ), DFloaderFormat(), TabSeparatedFormat(), CommaSeparatedFormat()],\n",
" [ArbitraryColumnNames(),\n",
" ShuffleRows(),\n",
" TransposeTable(),\n",
" ColumnCluster(),\n",
" SerializeTable(),\n",
" ShuffleColumns(),\n",
" ShuffleColumnNames(),\n",
" OriginalData(),\n",
" SequentialColumnNames()\n",
" ],\n",
" [NavigationTests(), ColumnLookupTests(),\n",
" RowLookupTests(), DataTypeLookupTests()],\n",
" save_path, open_api_key)\n",
" output = suite.run_experiment(df, per_table_op=10, per_test_gen=10)\n",
" col_names = [\"temperature\", \"tableFormat\", \"TableManipulation\", \"TestCase\", \"TestQuestion\", \"TestExpectedAnswer\", \"TestCaseType\",\n",
" \"pass_1\", \"pass_3\", \"pass_5\", \"pass_10\", \"pass_15\", \"TestCaseTuple\", \"Answers_top_15\", \"Answer_top_1\", \"Result\", \"Error\"]\n",
" output_df = pd.DataFrame(output, columns=col_names)\n",
" output_df.to_csv(os.path.join(\n",
" save_path, f\"Micro_{bd}_output.csv\"), index=None)\n",
" try:\n",
" output_df.to_pickle(os.path.join(\n",
" save_path, f\"Micro_{bd}_output.pkl\"), index=None)\n",
" except:\n",
" pass"
]
}
],
"metadata": {
"kernel_info": {
"name": "python38-azureml"
},
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"microsoft": {
"ms_spell_check": {
"ms_spell_check_language": "en"
}
},
"nteract": {
"version": "nteract-front-end@1.0.0"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,192 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%cd ../code_"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"gather": {
"logged": 1693566160494
}
},
"outputs": [],
"source": [
"from tableTesting import *\n",
"import pandas as pd\n",
"import os\n",
"import ast\n",
"from itertools import combinations\n",
"from typing import Any\n",
"import json\n",
"from collections import Counter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def metric_pass_k(expected_answer, answer: Any, k: int) -> float:\n",
" if not isinstance(answer, list):\n",
" raise ValueError(\"Answer should be a list for pass@k metric.\")\n",
"\n",
" # Convert the test.expect to a list if it's not already\n",
" expected_values = expected_answer if isinstance(\n",
" expected_answer, list) else [expected_answer]\n",
" expected_values = list(map(str, expected_values))\n",
" boolean_answers = [True if answer[i]\n",
" in expected_values else False for i in range(len(answer))]\n",
" combinations_k = list(combinations(boolean_answers, k))\n",
" passed_at_k = 0\n",
" # Calculate the pass@k metric\n",
" for comb in combinations_k:\n",
" if any(comb):\n",
" passed_at_k += 1\n",
" pass_at_k_percentage = (passed_at_k / len(combinations_k))*100\n",
"\n",
" return pass_at_k_percentage"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def bool_results(expected_answer, answer: Any) -> float:\n",
" if not isinstance(answer, list):\n",
" raise ValueError(\"Answer should be a list for pass@k metric.\")\n",
"\n",
" # Convert the test.expect to a list if it's not already\n",
" expected_values = expected_answer if isinstance(\n",
" expected_answer, list) else [expected_answer]\n",
" expected_values = list(map(str, expected_values))\n",
" if answer[0] in expected_values:\n",
" return True\n",
" return False"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def check(self, test, answer: Any) -> bool:\n",
" if test.TestType in [\"ColumnLookupTests\", \"RowLookupTests\"]:\n",
" if isinstance(test.expect, list):\n",
" expected_values = test.expect if isinstance(\n",
" test.expect, list) else [test.expect]\n",
" expected_values = list(map(str, expected_values))\n",
" matches = set(expected_values).intersection(set([answer]))\n",
" else:\n",
" print(\"Error: The expected Answer 'List' should be a list not string\")\n",
" return len(matches) > 0\n",
" return str(test.expect) == answer"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"benchmark_data_path = \"../resources/BenchmarkDatasetClean/\"\n",
"benchmark_dataset = os.listdir(benchmark_data_path)\n",
"benchmark_save_path = \"../resources/BenchmarkResults31_8_2023/\"\n",
"save_path_plot = \"../resources/BenchmarkResults31_8_2023_plots/\"\n",
"if not os.path.exists(save_path_plot):\n",
" os.makedirs(save_path_plot)\n",
"sp = \"../BenchmarkFinalResults_31_8_2023/\"\n",
"if not os.path.exists(sp):\n",
" os.makedirs(sp)\n",
"spa = \"../BenchmarkFinalArrangedResults_31_8_2023/\"\n",
"if not os.path.exists(spa):\n",
" os.makedirs(spa)\n",
"benchmark_dataset = [\n",
" data for data in benchmark_dataset if data.endswith(\".csv\")]\n",
"for bd in benchmark_dataset:\n",
" try:\n",
" pass_metrics = []\n",
" t_F_bools = []\n",
" print(bd)\n",
" df = pd.read_csv(os.path.join(\n",
" benchmark_save_path, bd, bd+\"_output.csv\"))\n",
" print(df.shape)\n",
" dirs = os.listdir(os.path.join(\n",
" benchmark_save_path, bd, \"cache_logger\"))\n",
" dirs = [d for d in dirs if d.endswith(\".jsonl\")]\n",
" print(dirs)\n",
" with open(os.path.join(benchmark_save_path, bd, \"cache_logger\", dirs[-1]), \"r\") as file:\n",
" for line in file:\n",
" line = ast.literal_eval(line.strip())\n",
" if line.startswith(\"\\\"\") and line.endswith(\"\\\"\"):\n",
" line = line[1:-1]\n",
" data_ = json.loads((line))\n",
" expected_ans = data_[\"test-expected-answer\"]\n",
" ans = data_[\"answer\"]\n",
" try:\n",
" pass_metrics.append(\n",
" [metric_pass_k(expected_ans, ans, k) for k in [1, 3, 5, 10, 15]])\n",
" t_F_bools.append(bool_results(expected_ans, ans))\n",
" except Exception as err:\n",
" print(f\"error {err} catched during metric calculation\")\n",
" pass_metrics.append([None for k in [1, 3, 5, 10, 15]])\n",
" t_F_bools.append(False)\n",
" pass\n",
" print(len(pass_metrics))\n",
" df[['pass_1', 'pass_3', 'pass_5', 'pass_10', 'pass_15']] = pass_metrics\n",
" df[\"T/F\"] = t_F_bools\n",
" print(df.columns)\n",
" df.to_csv(os.path.join(benchmark_save_path, bd, bd +\n",
" \"_output_revamped_metrics_with_bools.csv\"), index=None)\n",
"\n",
" except Exception as Err:\n",
" print(\"ERROR------------->\", Err)\n",
" pass"
]
}
],
"metadata": {
"kernel_info": {
"name": "python38-azureml"
},
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"microsoft": {
"ms_spell_check": {
"ms_spell_check_language": "en"
}
},
"nteract": {
"version": "nteract-front-end@1.0.0"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,413 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"import os\n",
"import numpy as np\n",
"import statsmodels.api as sm\n",
"import statsmodels.stats.contingency_tables as ct\n",
"import datetime"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"benchmark_data_path = \"../resources/BenchmarkDataset\"\n",
"benchmark_dataset = os.listdir(benchmark_data_path)\n",
"# ---> final micro results with HTML formats\n",
"benchmark_save_path2 = \"../resources/BenchmarkResults/BenchmarkResults_21_9_2023_new_save_format_HTMLFormat_redo/\"\n",
"# ---> final micro results with 6 format\n",
"benchmark_save_path1 = \"../resources/BenchmarkResults/BenchmarkResults_21_9_2023_new_save_format_redo/\"\n",
"benchmark_dataset = [data for data in benchmark_dataset if data.endswith(\n",
" \".csv\") and \"breast\" not in data]\n",
"dfs = []\n",
"for bd in benchmark_dataset:\n",
"\n",
" pass_metrics = []\n",
" print(bd)\n",
" df1 = pd.read_csv(os.path.join(benchmark_save_path1, bd,\n",
" \"Micro_\"+bd+\"_output_revamped.csv\"))\n",
" try:\n",
" df2 = pd.read_csv(os.path.join(benchmark_save_path2,\n",
" bd, \"Micro_\"+bd+\"_output_revamped.csv\"))\n",
" df = pd.concat([df1, df2], ignore_index=True)\n",
" except:\n",
" df = df1\n",
" print(df1.shape, df2.shape, df.shape)\n",
" df[\"Dataset-name\"] = bd\n",
" dfs.append(df)\n",
"all_data = pd.concat(dfs, ignore_index=True)\n",
"pivot_table = all_data.pivot_table(\n",
" index=[\"TestCase\", 'TableManipulation', \"temperature\"],\n",
" columns='tableFormat',\n",
" values=['pass_1', 'pass_3', 'pass_5', 'pass_10', 'pass_15', 'Result'],\n",
" # Custom aggregation function to append values to a list\n",
" aggfunc=lambda x: list(x)\n",
")\n",
"attributes = {c: list(df[c].unique()) for c in df.columns if c in [\n",
" \"temperature\", \"tableFormat\", \"TableManipulation\", \"TestCase\"]}"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# RQ1"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Average pass @ 1, temp =0 Tests Vs formats across all datasets"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data_test_vs_format_on_original_data = all_data[(\n",
" all_data[\"TableManipulation\"] == \"OriginalData\") & (all_data[\"temperature\"] == 0.0)]\n",
"test_vs_format_on_original_data = all_data_test_vs_format_on_original_data.pivot_table(\n",
" index=[\"TestCase\"],\n",
" columns='tableFormat',\n",
" values=['pass_1'],\n",
" aggfunc=[\"mean\"] # Custom aggregation function to append values to a list\n",
")\n",
"test_vs_format_on_original_data = np.round(\n",
" test_vs_format_on_original_data, decimals=2)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_original_data.T.to_clipboard()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## p-value RQ1"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import scipy\n",
"data_id = all_data[all_data[\"TableManipulation\"].isin([\"OriginalData\"]) & (\n",
" all_data[\"temperature\"] == 0.0)] \n",
"pivot_table_across_formats = data_id.pivot_table(\n",
" index=[\"TestCase\"],\n",
" columns='tableFormat',\n",
" values=['pass_1'],\n",
" # Custom aggregation function to append values to a list\n",
" aggfunc=lambda x: list(x)\n",
")\n",
"p_val = {}\n",
"for id in range(pivot_table_across_formats.shape[0]):\n",
" print(id)\n",
" print(f\"For Test: {pivot_table_across_formats.index[id]} pass@1 temp =0.1\")\n",
" sorted_list = [i[-1] for i in test_vs_format_on_original_data.T.sort_values(\n",
" by=[pivot_table_across_formats.index[id]], ascending=False).index]\n",
" sub_vals = {}\n",
" index_val = pivot_table_across_formats.index[id]\n",
" for x in range(1, len(sorted_list)):\n",
" vals1 = pivot_table_across_formats.loc[index_val,\n",
" (\"pass_1\", sorted_list[0])]\n",
" vals2 = pivot_table_across_formats.loc[index_val,\n",
" (\"pass_1\", sorted_list[x])]\n",
" min_val = min(len(vals1), len(vals2))\n",
" print(min_val)\n",
" p_value = scipy.stats.ttest_rel(vals1[:min_val], vals2[:min_val])\n",
" sub_vals[f\"{sorted_list[0]}-{sorted_list[x]}\"] = {\"p-value\": p_value.pvalue,\n",
" \"statistics\": p_value.statistic, \"df\": p_value.df, \"tests_count\": min_val}\n",
" p_val[pivot_table_across_formats.index[id]] = sub_vals"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"\n",
"class CustomJSONEncoder(json.JSONEncoder):\n",
" def default(self, obj):\n",
" if isinstance(obj, np.integer):\n",
" return int(obj)\n",
" elif isinstance(obj, np.floating):\n",
" return float(obj)\n",
" elif isinstance(obj, np.ndarray):\n",
" return obj.tolist()\n",
" if isinstance(obj, pd.Series):\n",
" return obj.tolist()\n",
" elif isinstance(obj, datetime):\n",
" # Handle datetime objects\n",
" return obj.isoformat()\n",
" elif isinstance(obj, set):\n",
" # Handle sets\n",
" return list(obj)\n",
" # Add more custom conversions for other data types if needed\n",
" return super().default(obj)\n",
"\n",
"\n",
"with open(\"../resources/all_p_vals/p_vals_micro_tests_RQ1.json\", \"w\") as f:\n",
" json.dump(p_val, f, indent=3, cls=CustomJSONEncoder)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Average pass@1, temp 0.0 over different data averaged across tests for different formats"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data_dataset_vs_format_on_original_data = all_data[(\n",
" all_data[\"TableManipulation\"] == \"OriginalData\") & (all_data[\"temperature\"] == 0.0)]\n",
"dataset_vs_format_on_original_data = all_data_dataset_vs_format_on_original_data.pivot_table(\n",
" index=[\"Dataset-name\"],\n",
" columns='tableFormat',\n",
" values=['pass_1'],\n",
" aggfunc=[\"mean\"] # Custom aggregation function to append values to a list\n",
")\n",
"test_vs_format_on_original_data = np.round(\n",
" dataset_vs_format_on_original_data, decimals=2)\n",
"test_vs_format_on_original_data"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": []
},
{
"cell_type": "markdown",
"metadata": {},
"source": []
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# RQ2"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## All noise vs test across formats"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data[\"tableFormat\"].value_counts().index"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data_test_vs_format_on_all_noise_ops = all_data[(\n",
" all_data[\"temperature\"] == 0.0)]\n",
"test_vs_format_on_all_noise_ops = all_data_test_vs_format_on_all_noise_ops.pivot_table(\n",
" index=[\"tableFormat\", \"TableManipulation\"],\n",
" columns='TestCase',\n",
" values=['pass_1'],\n",
" aggfunc=[\"mean\"] # Custom aggregation function to append values to a list\n",
")\n",
"test_vs_format_on_all_noise_ops_list = all_data_test_vs_format_on_all_noise_ops.pivot_table(\n",
" index=[\"tableFormat\", \"TableManipulation\"],\n",
" columns='TestCase',\n",
" values=['pass_1'],\n",
" # Custom aggregation function to append values to a list\n",
" aggfunc=lambda x: list(x)\n",
")\n",
"test_vs_format_on_all_noise_ops = np.round(\n",
" test_vs_format_on_all_noise_ops, decimals=2)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"noises = [\n",
" 'OriginalData',\n",
" 'ShuffleRows',\n",
" 'ShuffleColumns',\n",
" 'ShuffleColumnNames',\n",
" 'SequentialColumnNames',\n",
" 'ArbitraryColumnNames',\n",
" 'TransposeTable',\n",
" 'ColumnCluster',\n",
" 'SerializeTable']\n",
"table_formats = all_data[\"tableFormat\"].value_counts().index\n",
"indi = [(formatType, noise)\n",
" for formatType in table_formats for noise in noises]\n",
"cols = [(metric, \"pass_1\", test)\n",
" for test in attributes[\"TestCase\"] for metric in [\"mean\"]]\n",
"cols_agg = [(\"pass_1\", test) for test in attributes[\"TestCase\"]]\n",
"test_vs_format_on_all_noise_ops = test_vs_format_on_all_noise_ops[cols]\n",
"test_vs_format_on_all_noise_ops = test_vs_format_on_all_noise_ops.reindex(indi)\n",
"test_vs_format_on_all_noise_ops_list = test_vs_format_on_all_noise_ops_list[cols_agg]\n",
"test_vs_format_on_all_noise_ops_list = test_vs_format_on_all_noise_ops_list.reindex(\n",
" indi)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops.head(10)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## RQ2: Delta values from original with p-values"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import scipy\n",
"test_vs_format_on_all_noise_ops_diff_with_pval = test_vs_format_on_all_noise_ops.copy(\n",
" deep=True)\n",
"test_vs_format_on_all_noise_ops_pval = test_vs_format_on_all_noise_ops.copy(\n",
" deep=True)\n",
"for i in test_vs_format_on_all_noise_ops_diff_with_pval.index:\n",
" for j in test_vs_format_on_all_noise_ops_diff_with_pval.columns:\n",
" ix = (i[0], \"OriginalData\")\n",
" jj = (j[1], j[2])\n",
" original_values = test_vs_format_on_all_noise_ops_list.loc[ix, jj]\n",
" noise_induced_values = test_vs_format_on_all_noise_ops_list.loc[i, jj]\n",
"\n",
" if i[1] != \"OriginalData\":\n",
" min_val = min(len(original_values), len(noise_induced_values))\n",
" p_value = scipy.stats.ttest_rel(\n",
" original_values[:min_val], noise_induced_values[:min_val])\n",
" benoffi_corrected_p_val_threshold = 0.01/8\n",
" subtract_from = test_vs_format_on_all_noise_ops.loc[ix, j]\n",
" difference = test_vs_format_on_all_noise_ops_diff_with_pval.loc[i,\n",
" j]-subtract_from\n",
" if \"-\" in str(difference):\n",
" str_diff = \"{:.2f}\".format(difference)\n",
" else:\n",
" str_diff = \"+\"+\"{:.2f}\".format(difference)\n",
" if p_value.pvalue < benoffi_corrected_p_val_threshold:\n",
" str_diff += \"**\"\n",
" test_vs_format_on_all_noise_ops_diff_with_pval.loc[i, j] = str_diff\n",
" test_vs_format_on_all_noise_ops_pval.loc[i, j] = p_value.pvalue"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops_diff_with_pval.head(30)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### only_diff"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops_diff = test_vs_format_on_all_noise_ops.copy(\n",
" deep=True)\n",
"for i in test_vs_format_on_all_noise_ops_diff.index:\n",
" for j in test_vs_format_on_all_noise_ops_diff.columns:\n",
" ix = (i[0], \"OriginalData\")\n",
" if i[1] != \"OriginalData\":\n",
" subtract_from = test_vs_format_on_all_noise_ops.loc[ix, j]\n",
" difference = subtract_from - \\\n",
" test_vs_format_on_all_noise_ops_diff.loc[i, j]\n",
" if \"-\" in str(difference):\n",
" str_diff = \"{:.2f}\".format(difference)\n",
" else:\n",
" str_diff = \"+\"+\"{:.2f}\".format(difference)\n",
"\n",
" test_vs_format_on_all_noise_ops_diff.loc[i, j] = str_diff"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"test_vs_format_on_all_noise_ops_diff"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

Просмотреть файл

@ -1,12 +0,0 @@
pandas
numpy
scipy
torch
openai
transformers
jsonlines
tabulate
matplotlib
scikit-learn
plotly
statsmodels

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,769 +0,0 @@
X1,X2,X3,X4,X5,X6,X7,X8,Y1,Y2
0.98,514.5,294.0,110.25,7.0,2,0.0,0,15.55,21.33
0.98,514.5,294.0,110.25,7.0,3,0.0,0,15.55,21.33
0.98,514.5,294.0,110.25,7.0,4,0.0,0,15.55,21.33
0.98,514.5,294.0,110.25,7.0,5,0.0,0,15.55,21.33
0.9,563.5,318.5,122.5,7.0,2,0.0,0,20.84,28.28
0.9,563.5,318.5,122.5,7.0,3,0.0,0,21.46,25.38
0.9,563.5,318.5,122.5,7.0,4,0.0,0,20.71,25.16
0.9,563.5,318.5,122.5,7.0,5,0.0,0,19.68,29.6
0.86,588.0,294.0,147.0,7.0,2,0.0,0,19.5,27.3
0.86,588.0,294.0,147.0,7.0,3,0.0,0,19.95,21.97
0.86,588.0,294.0,147.0,7.0,4,0.0,0,19.34,23.49
0.86,588.0,294.0,147.0,7.0,5,0.0,0,18.31,27.87
0.82,612.5,318.5,147.0,7.0,2,0.0,0,17.05,23.77
0.82,612.5,318.5,147.0,7.0,3,0.0,0,17.41,21.46
0.82,612.5,318.5,147.0,7.0,4,0.0,0,16.95,21.16
0.82,612.5,318.5,147.0,7.0,5,0.0,0,15.98,24.93
0.79,637.0,343.0,147.0,7.0,2,0.0,0,28.52,37.73
0.79,637.0,343.0,147.0,7.0,3,0.0,0,29.9,31.27
0.79,637.0,343.0,147.0,7.0,4,0.0,0,29.63,30.93
0.79,637.0,343.0,147.0,7.0,5,0.0,0,28.75,39.44
0.76,661.5,416.5,122.5,7.0,2,0.0,0,24.77,29.79
0.76,661.5,416.5,122.5,7.0,3,0.0,0,23.93,29.68
0.76,661.5,416.5,122.5,7.0,4,0.0,0,24.77,29.79
0.76,661.5,416.5,122.5,7.0,5,0.0,0,23.93,29.4
0.74,686.0,245.0,220.5,3.5,2,0.0,0,6.07,10.9
0.74,686.0,245.0,220.5,3.5,3,0.0,0,6.05,11.19
0.74,686.0,245.0,220.5,3.5,4,0.0,0,6.01,10.94
0.74,686.0,245.0,220.5,3.5,5,0.0,0,6.04,11.17
0.71,710.5,269.5,220.5,3.5,2,0.0,0,6.37,11.27
0.71,710.5,269.5,220.5,3.5,3,0.0,0,6.4,11.72
0.71,710.5,269.5,220.5,3.5,4,0.0,0,6.37,11.29
0.71,710.5,269.5,220.5,3.5,5,0.0,0,6.4,11.67
0.69,735.0,294.0,220.5,3.5,2,0.0,0,6.85,11.74
0.69,735.0,294.0,220.5,3.5,3,0.0,0,6.79,12.05
0.69,735.0,294.0,220.5,3.5,4,0.0,0,6.77,11.73
0.69,735.0,294.0,220.5,3.5,5,0.0,0,6.81,11.93
0.66,759.5,318.5,220.5,3.5,2,0.0,0,7.18,12.4
0.66,759.5,318.5,220.5,3.5,3,0.0,0,7.1,12.23
0.66,759.5,318.5,220.5,3.5,4,0.0,0,7.1,12.4
0.66,759.5,318.5,220.5,3.5,5,0.0,0,7.1,12.14
0.64,784.0,343.0,220.5,3.5,2,0.0,0,10.85,16.78
0.64,784.0,343.0,220.5,3.5,3,0.0,0,10.54,16.8
0.64,784.0,343.0,220.5,3.5,4,0.0,0,10.77,16.75
0.64,784.0,343.0,220.5,3.5,5,0.0,0,10.56,16.67
0.62,808.5,367.5,220.5,3.5,2,0.0,0,8.6,12.07
0.62,808.5,367.5,220.5,3.5,3,0.0,0,8.49,12.22
0.62,808.5,367.5,220.5,3.5,4,0.0,0,8.45,12.08
0.62,808.5,367.5,220.5,3.5,5,0.0,0,8.5,12.04
0.98,514.5,294.0,110.25,7.0,2,0.1,1,24.58,26.47
0.98,514.5,294.0,110.25,7.0,3,0.1,1,24.63,26.37
0.98,514.5,294.0,110.25,7.0,4,0.1,1,24.63,26.44
0.98,514.5,294.0,110.25,7.0,5,0.1,1,24.59,26.29
0.9,563.5,318.5,122.5,7.0,2,0.1,1,29.03,32.92
0.9,563.5,318.5,122.5,7.0,3,0.1,1,29.87,29.87
0.9,563.5,318.5,122.5,7.0,4,0.1,1,29.14,29.58
0.9,563.5,318.5,122.5,7.0,5,0.1,1,28.09,34.33
0.86,588.0,294.0,147.0,7.0,2,0.1,1,26.28,30.89
0.86,588.0,294.0,147.0,7.0,3,0.1,1,26.91,25.6
0.86,588.0,294.0,147.0,7.0,4,0.1,1,26.37,27.03
0.86,588.0,294.0,147.0,7.0,5,0.1,1,25.27,31.73
0.82,612.5,318.5,147.0,7.0,2,0.1,1,23.53,27.31
0.82,612.5,318.5,147.0,7.0,3,0.1,1,24.03,24.91
0.82,612.5,318.5,147.0,7.0,4,0.1,1,23.54,24.61
0.82,612.5,318.5,147.0,7.0,5,0.1,1,22.58,28.51
0.79,637.0,343.0,147.0,7.0,2,0.1,1,35.56,41.68
0.79,637.0,343.0,147.0,7.0,3,0.1,1,37.12,35.28
0.79,637.0,343.0,147.0,7.0,4,0.1,1,36.9,34.43
0.79,637.0,343.0,147.0,7.0,5,0.1,1,35.94,43.33
0.76,661.5,416.5,122.5,7.0,2,0.1,1,32.96,33.87
0.76,661.5,416.5,122.5,7.0,3,0.1,1,32.12,34.07
0.76,661.5,416.5,122.5,7.0,4,0.1,1,32.94,34.14
0.76,661.5,416.5,122.5,7.0,5,0.1,1,32.21,33.67
0.74,686.0,245.0,220.5,3.5,2,0.1,1,10.36,13.43
0.74,686.0,245.0,220.5,3.5,3,0.1,1,10.43,13.71
0.74,686.0,245.0,220.5,3.5,4,0.1,1,10.36,13.48
0.74,686.0,245.0,220.5,3.5,5,0.1,1,10.39,13.7
0.71,710.5,269.5,220.5,3.5,2,0.1,1,10.71,13.8
0.71,710.5,269.5,220.5,3.5,3,0.1,1,10.8,14.28
0.71,710.5,269.5,220.5,3.5,4,0.1,1,10.7,13.87
0.71,710.5,269.5,220.5,3.5,5,0.1,1,10.75,14.27
0.69,735.0,294.0,220.5,3.5,2,0.1,1,11.11,14.28
0.69,735.0,294.0,220.5,3.5,3,0.1,1,11.13,14.61
0.69,735.0,294.0,220.5,3.5,4,0.1,1,11.09,14.3
0.69,735.0,294.0,220.5,3.5,5,0.1,1,11.16,14.45
0.66,759.5,318.5,220.5,3.5,2,0.1,1,11.68,13.9
0.66,759.5,318.5,220.5,3.5,3,0.1,1,11.69,13.72
0.66,759.5,318.5,220.5,3.5,4,0.1,1,11.7,13.88
0.66,759.5,318.5,220.5,3.5,5,0.1,1,11.69,13.65
0.64,784.0,343.0,220.5,3.5,2,0.1,1,15.41,19.37
0.64,784.0,343.0,220.5,3.5,3,0.1,1,15.2,19.43
0.64,784.0,343.0,220.5,3.5,4,0.1,1,15.42,19.34
0.64,784.0,343.0,220.5,3.5,5,0.1,1,15.21,19.32
0.62,808.5,367.5,220.5,3.5,2,0.1,1,12.96,14.34
0.62,808.5,367.5,220.5,3.5,3,0.1,1,12.97,14.5
0.62,808.5,367.5,220.5,3.5,4,0.1,1,12.93,14.33
0.62,808.5,367.5,220.5,3.5,5,0.1,1,13.02,14.27
0.98,514.5,294.0,110.25,7.0,2,0.1,2,24.29,25.95
0.98,514.5,294.0,110.25,7.0,3,0.1,2,24.31,25.63
0.98,514.5,294.0,110.25,7.0,4,0.1,2,24.13,26.13
0.98,514.5,294.0,110.25,7.0,5,0.1,2,24.25,25.89
0.9,563.5,318.5,122.5,7.0,2,0.1,2,28.88,32.54
0.9,563.5,318.5,122.5,7.0,3,0.1,2,29.68,29.44
0.9,563.5,318.5,122.5,7.0,4,0.1,2,28.83,29.36
0.9,563.5,318.5,122.5,7.0,5,0.1,2,27.9,34.2
0.86,588.0,294.0,147.0,7.0,2,0.1,2,26.48,30.91
0.86,588.0,294.0,147.0,7.0,3,0.1,2,27.02,25.63
0.86,588.0,294.0,147.0,7.0,4,0.1,2,26.33,27.36
0.86,588.0,294.0,147.0,7.0,5,0.1,2,25.36,31.9
0.82,612.5,318.5,147.0,7.0,2,0.1,2,23.75,27.38
0.82,612.5,318.5,147.0,7.0,3,0.1,2,24.23,25.02
0.82,612.5,318.5,147.0,7.0,4,0.1,2,23.67,24.8
0.82,612.5,318.5,147.0,7.0,5,0.1,2,22.79,28.79
0.79,637.0,343.0,147.0,7.0,2,0.1,2,35.65,41.07
0.79,637.0,343.0,147.0,7.0,3,0.1,2,37.26,34.62
0.79,637.0,343.0,147.0,7.0,4,0.1,2,36.97,33.87
0.79,637.0,343.0,147.0,7.0,5,0.1,2,36.03,42.86
0.76,661.5,416.5,122.5,7.0,2,0.1,2,33.16,33.91
0.76,661.5,416.5,122.5,7.0,3,0.1,2,32.4,34.07
0.76,661.5,416.5,122.5,7.0,4,0.1,2,33.12,34.17
0.76,661.5,416.5,122.5,7.0,5,0.1,2,32.41,33.78
0.74,686.0,245.0,220.5,3.5,2,0.1,2,10.42,13.39
0.74,686.0,245.0,220.5,3.5,3,0.1,2,10.46,13.72
0.74,686.0,245.0,220.5,3.5,4,0.1,2,10.32,13.57
0.74,686.0,245.0,220.5,3.5,5,0.1,2,10.45,13.79
0.71,710.5,269.5,220.5,3.5,2,0.1,2,10.64,13.67
0.71,710.5,269.5,220.5,3.5,3,0.1,2,10.72,14.11
0.71,710.5,269.5,220.5,3.5,4,0.1,2,10.55,13.8
0.71,710.5,269.5,220.5,3.5,5,0.1,2,10.68,14.21
0.69,735.0,294.0,220.5,3.5,2,0.1,2,11.45,13.2
0.69,735.0,294.0,220.5,3.5,3,0.1,2,11.46,13.54
0.69,735.0,294.0,220.5,3.5,4,0.1,2,11.32,13.32
0.69,735.0,294.0,220.5,3.5,5,0.1,2,11.49,13.51
0.66,759.5,318.5,220.5,3.5,2,0.1,2,11.45,14.86
0.66,759.5,318.5,220.5,3.5,3,0.1,2,11.42,14.75
0.66,759.5,318.5,220.5,3.5,4,0.1,2,11.33,15.0
0.66,759.5,318.5,220.5,3.5,5,0.1,2,11.43,14.74
0.64,784.0,343.0,220.5,3.5,2,0.1,2,15.41,19.23
0.64,784.0,343.0,220.5,3.5,3,0.1,2,15.18,19.34
0.64,784.0,343.0,220.5,3.5,4,0.1,2,15.34,19.32
0.64,784.0,343.0,220.5,3.5,5,0.1,2,15.19,19.3
0.62,808.5,367.5,220.5,3.5,2,0.1,2,12.88,14.37
0.62,808.5,367.5,220.5,3.5,3,0.1,2,13.0,14.57
0.62,808.5,367.5,220.5,3.5,4,0.1,2,12.97,14.27
0.62,808.5,367.5,220.5,3.5,5,0.1,2,13.04,14.24
0.98,514.5,294.0,110.25,7.0,2,0.1,3,24.28,25.68
0.98,514.5,294.0,110.25,7.0,3,0.1,3,24.4,26.02
0.98,514.5,294.0,110.25,7.0,4,0.1,3,24.11,25.84
0.98,514.5,294.0,110.25,7.0,5,0.1,3,24.35,26.14
0.9,563.5,318.5,122.5,7.0,2,0.1,3,28.07,34.14
0.9,563.5,318.5,122.5,7.0,3,0.1,3,29.01,32.85
0.9,563.5,318.5,122.5,7.0,4,0.1,3,29.62,30.08
0.9,563.5,318.5,122.5,7.0,5,0.1,3,29.05,29.67
0.86,588.0,294.0,147.0,7.0,2,0.1,3,25.41,31.73
0.86,588.0,294.0,147.0,7.0,3,0.1,3,26.47,31.01
0.86,588.0,294.0,147.0,7.0,4,0.1,3,26.89,25.9
0.86,588.0,294.0,147.0,7.0,5,0.1,3,26.46,27.4
0.82,612.5,318.5,147.0,7.0,2,0.1,3,22.93,28.68
0.82,612.5,318.5,147.0,7.0,3,0.1,3,23.84,27.54
0.82,612.5,318.5,147.0,7.0,4,0.1,3,24.17,25.35
0.82,612.5,318.5,147.0,7.0,5,0.1,3,23.87,24.93
0.79,637.0,343.0,147.0,7.0,2,0.1,3,35.78,43.12
0.79,637.0,343.0,147.0,7.0,3,0.1,3,35.48,41.22
0.79,637.0,343.0,147.0,7.0,4,0.1,3,36.97,35.1
0.79,637.0,343.0,147.0,7.0,5,0.1,3,36.7,34.29
0.76,661.5,416.5,122.5,7.0,2,0.1,3,32.52,33.85
0.76,661.5,416.5,122.5,7.0,3,0.1,3,33.28,34.11
0.76,661.5,416.5,122.5,7.0,4,0.1,3,32.33,34.48
0.76,661.5,416.5,122.5,7.0,5,0.1,3,33.24,34.5
0.74,686.0,245.0,220.5,3.5,2,0.1,3,10.39,13.6
0.74,686.0,245.0,220.5,3.5,3,0.1,3,10.34,13.36
0.74,686.0,245.0,220.5,3.5,4,0.1,3,10.35,13.65
0.74,686.0,245.0,220.5,3.5,5,0.1,3,10.38,13.49
0.71,710.5,269.5,220.5,3.5,2,0.1,3,10.77,14.14
0.71,710.5,269.5,220.5,3.5,3,0.1,3,10.68,13.77
0.71,710.5,269.5,220.5,3.5,4,0.1,3,10.68,14.3
0.71,710.5,269.5,220.5,3.5,5,0.1,3,10.7,13.87
0.69,735.0,294.0,220.5,3.5,2,0.1,3,11.22,14.44
0.69,735.0,294.0,220.5,3.5,3,0.1,3,11.16,14.27
0.69,735.0,294.0,220.5,3.5,4,0.1,3,11.1,14.67
0.69,735.0,294.0,220.5,3.5,5,0.1,3,11.14,14.4
0.66,759.5,318.5,220.5,3.5,2,0.1,3,11.59,13.46
0.66,759.5,318.5,220.5,3.5,3,0.1,3,11.6,13.7
0.66,759.5,318.5,220.5,3.5,4,0.1,3,11.53,13.59
0.66,759.5,318.5,220.5,3.5,5,0.1,3,11.61,13.83
0.64,784.0,343.0,220.5,3.5,2,0.1,3,15.16,19.14
0.64,784.0,343.0,220.5,3.5,3,0.1,3,15.36,19.18
0.64,784.0,343.0,220.5,3.5,4,0.1,3,15.12,19.37
0.64,784.0,343.0,220.5,3.5,5,0.1,3,15.36,19.29
0.62,808.5,367.5,220.5,3.5,2,0.1,3,12.68,14.09
0.62,808.5,367.5,220.5,3.5,3,0.1,3,12.63,14.23
0.62,808.5,367.5,220.5,3.5,4,0.1,3,12.71,14.14
0.62,808.5,367.5,220.5,3.5,5,0.1,3,12.73,13.89
0.98,514.5,294.0,110.25,7.0,2,0.1,4,24.38,25.91
0.98,514.5,294.0,110.25,7.0,3,0.1,4,24.23,25.72
0.98,514.5,294.0,110.25,7.0,4,0.1,4,24.04,26.18
0.98,514.5,294.0,110.25,7.0,5,0.1,4,24.32,25.87
0.9,563.5,318.5,122.5,7.0,2,0.1,4,29.06,29.34
0.9,563.5,318.5,122.5,7.0,3,0.1,4,28.05,33.91
0.9,563.5,318.5,122.5,7.0,4,0.1,4,28.86,32.83
0.9,563.5,318.5,122.5,7.0,5,0.1,4,29.79,29.92
0.86,588.0,294.0,147.0,7.0,2,0.1,4,26.44,27.17
0.86,588.0,294.0,147.0,7.0,3,0.1,4,25.37,31.76
0.86,588.0,294.0,147.0,7.0,4,0.1,4,26.33,31.06
0.86,588.0,294.0,147.0,7.0,5,0.1,4,27.03,25.81
0.82,612.5,318.5,147.0,7.0,2,0.1,4,23.8,24.61
0.82,612.5,318.5,147.0,7.0,3,0.1,4,22.8,28.61
0.82,612.5,318.5,147.0,7.0,4,0.1,4,23.59,27.57
0.82,612.5,318.5,147.0,7.0,5,0.1,4,24.24,25.16
0.79,637.0,343.0,147.0,7.0,2,0.1,4,36.86,34.25
0.79,637.0,343.0,147.0,7.0,3,0.1,4,35.89,43.3
0.79,637.0,343.0,147.0,7.0,4,0.1,4,35.45,41.86
0.79,637.0,343.0,147.0,7.0,5,0.1,4,37.1,35.29
0.76,661.5,416.5,122.5,7.0,2,0.1,4,33.08,34.11
0.76,661.5,416.5,122.5,7.0,3,0.1,4,32.38,33.62
0.76,661.5,416.5,122.5,7.0,4,0.1,4,33.09,33.89
0.76,661.5,416.5,122.5,7.0,5,0.1,4,32.31,34.05
0.74,686.0,245.0,220.5,3.5,2,0.1,4,10.08,13.2
0.74,686.0,245.0,220.5,3.5,3,0.1,4,10.15,13.36
0.74,686.0,245.0,220.5,3.5,4,0.1,4,10.07,13.21
0.74,686.0,245.0,220.5,3.5,5,0.1,4,10.14,13.53
0.71,710.5,269.5,220.5,3.5,2,0.1,4,10.66,13.67
0.71,710.5,269.5,220.5,3.5,3,0.1,4,10.68,14.12
0.71,710.5,269.5,220.5,3.5,4,0.1,4,10.53,13.79
0.71,710.5,269.5,220.5,3.5,5,0.1,4,10.72,14.2
0.69,735.0,294.0,220.5,3.5,2,0.1,4,11.18,14.29
0.69,735.0,294.0,220.5,3.5,3,0.1,4,11.22,14.49
0.69,735.0,294.0,220.5,3.5,4,0.1,4,11.07,14.42
0.69,735.0,294.0,220.5,3.5,5,0.1,4,11.2,14.73
0.66,759.5,318.5,220.5,3.5,2,0.1,4,11.44,14.86
0.66,759.5,318.5,220.5,3.5,3,0.1,4,11.42,14.67
0.66,759.5,318.5,220.5,3.5,4,0.1,4,11.33,15.0
0.66,759.5,318.5,220.5,3.5,5,0.1,4,11.43,14.83
0.64,784.0,343.0,220.5,3.5,2,0.1,4,15.4,19.24
0.64,784.0,343.0,220.5,3.5,3,0.1,4,15.19,19.25
0.64,784.0,343.0,220.5,3.5,4,0.1,4,15.32,19.42
0.64,784.0,343.0,220.5,3.5,5,0.1,4,15.16,19.48
0.62,808.5,367.5,220.5,3.5,2,0.1,4,12.85,14.37
0.62,808.5,367.5,220.5,3.5,3,0.1,4,13.04,14.34
0.62,808.5,367.5,220.5,3.5,4,0.1,4,13.0,14.28
0.62,808.5,367.5,220.5,3.5,5,0.1,4,13.0,14.47
0.98,514.5,294.0,110.25,7.0,2,0.1,5,24.35,25.64
0.98,514.5,294.0,110.25,7.0,3,0.1,5,24.33,25.98
0.98,514.5,294.0,110.25,7.0,4,0.1,5,24.03,25.88
0.98,514.5,294.0,110.25,7.0,5,0.1,5,24.26,26.18
0.9,563.5,318.5,122.5,7.0,2,0.1,5,29.83,29.82
0.9,563.5,318.5,122.5,7.0,3,0.1,5,29.08,29.52
0.9,563.5,318.5,122.5,7.0,4,0.1,5,28.03,34.45
0.9,563.5,318.5,122.5,7.0,5,0.1,5,29.02,33.01
0.86,588.0,294.0,147.0,7.0,2,0.1,5,27.03,25.82
0.86,588.0,294.0,147.0,7.0,3,0.1,5,26.45,27.33
0.86,588.0,294.0,147.0,7.0,4,0.1,5,25.36,32.04
0.86,588.0,294.0,147.0,7.0,5,0.1,5,26.45,31.28
0.82,612.5,318.5,147.0,7.0,2,0.1,5,24.37,25.11
0.82,612.5,318.5,147.0,7.0,3,0.1,5,23.89,24.77
0.82,612.5,318.5,147.0,7.0,4,0.1,5,22.89,28.88
0.82,612.5,318.5,147.0,7.0,5,0.1,5,23.86,27.69
0.79,637.0,343.0,147.0,7.0,2,0.1,5,37.03,34.99
0.79,637.0,343.0,147.0,7.0,3,0.1,5,36.71,34.18
0.79,637.0,343.0,147.0,7.0,4,0.1,5,36.77,43.14
0.79,637.0,343.0,147.0,7.0,5,0.1,5,35.48,41.26
0.76,661.5,416.5,122.5,7.0,2,0.1,5,32.31,34.25
0.76,661.5,416.5,122.5,7.0,3,0.1,5,33.21,34.35
0.76,661.5,416.5,122.5,7.0,4,0.1,5,32.46,33.64
0.76,661.5,416.5,122.5,7.0,5,0.1,5,33.27,33.88
0.74,686.0,245.0,220.5,3.5,2,0.1,5,10.47,13.65
0.74,686.0,245.0,220.5,3.5,3,0.1,5,10.37,13.44
0.74,686.0,245.0,220.5,3.5,4,0.1,5,10.34,13.72
0.74,686.0,245.0,220.5,3.5,5,0.1,5,10.39,13.5
0.71,710.5,269.5,220.5,3.5,2,0.1,5,10.78,14.18
0.71,710.5,269.5,220.5,3.5,3,0.1,5,10.7,13.75
0.71,710.5,269.5,220.5,3.5,4,0.1,5,10.67,14.26
0.71,710.5,269.5,220.5,3.5,5,0.1,5,13.69,13.89
0.69,735.0,294.0,220.5,3.5,2,0.1,5,11.21,14.55
0.69,735.0,294.0,220.5,3.5,3,0.1,5,11.14,14.28
0.69,735.0,294.0,220.5,3.5,4,0.1,5,11.11,14.46
0.69,735.0,294.0,220.5,3.5,5,0.1,5,11.16,14.39
0.66,759.5,318.5,220.5,3.5,2,0.1,5,11.38,14.54
0.66,759.5,318.5,220.5,3.5,3,0.1,5,11.34,14.81
0.66,759.5,318.5,220.5,3.5,4,0.1,5,11.22,14.65
0.66,759.5,318.5,220.5,3.5,5,0.1,5,11.34,14.87
0.64,784.0,343.0,220.5,3.5,2,0.1,5,15.16,19.24
0.64,784.0,343.0,220.5,3.5,3,0.1,5,15.37,19.18
0.64,784.0,343.0,220.5,3.5,4,0.1,5,15.12,19.26
0.64,784.0,343.0,220.5,3.5,5,0.1,5,15.36,19.29
0.62,808.5,367.5,220.5,3.5,2,0.1,5,12.59,14.24
0.62,808.5,367.5,220.5,3.5,3,0.1,5,12.74,13.97
0.62,808.5,367.5,220.5,3.5,4,0.1,5,12.8,13.99
0.62,808.5,367.5,220.5,3.5,5,0.1,5,12.62,14.15
0.98,514.5,294.0,110.25,7.0,2,0.25,1,28.15,29.79
0.98,514.5,294.0,110.25,7.0,3,0.25,1,28.15,29.79
0.98,514.5,294.0,110.25,7.0,4,0.25,1,28.37,29.28
0.98,514.5,294.0,110.25,7.0,5,0.25,1,28.41,29.49
0.9,563.5,318.5,122.5,7.0,2,0.25,1,32.68,36.12
0.9,563.5,318.5,122.5,7.0,3,0.25,1,33.48,33.17
0.9,563.5,318.5,122.5,7.0,4,0.25,1,32.84,32.71
0.9,563.5,318.5,122.5,7.0,5,0.25,1,32.0,37.58
0.86,588.0,294.0,147.0,7.0,2,0.25,1,29.54,33.98
0.86,588.0,294.0,147.0,7.0,3,0.25,1,30.05,28.61
0.86,588.0,294.0,147.0,7.0,4,0.25,1,29.6,30.12
0.86,588.0,294.0,147.0,7.0,5,0.25,1,28.66,34.73
0.82,612.5,318.5,147.0,7.0,2,0.25,1,26.84,30.17
0.82,612.5,318.5,147.0,7.0,3,0.25,1,27.27,27.84
0.82,612.5,318.5,147.0,7.0,4,0.25,1,26.97,27.25
0.82,612.5,318.5,147.0,7.0,5,0.25,1,26.19,31.39
0.79,637.0,343.0,147.0,7.0,2,0.25,1,38.67,43.8
0.79,637.0,343.0,147.0,7.0,3,0.25,1,40.03,37.81
0.79,637.0,343.0,147.0,7.0,4,0.25,1,39.86,36.85
0.79,637.0,343.0,147.0,7.0,5,0.25,1,39.04,45.52
0.76,661.5,416.5,122.5,7.0,2,0.25,1,36.96,36.85
0.76,661.5,416.5,122.5,7.0,3,0.25,1,36.13,37.58
0.76,661.5,416.5,122.5,7.0,4,0.25,1,36.91,37.45
0.76,661.5,416.5,122.5,7.0,5,0.25,1,36.43,36.62
0.74,686.0,245.0,220.5,3.5,2,0.25,1,12.43,15.19
0.74,686.0,245.0,220.5,3.5,3,0.25,1,12.5,15.5
0.74,686.0,245.0,220.5,3.5,4,0.25,1,12.41,15.28
0.74,686.0,245.0,220.5,3.5,5,0.25,1,12.45,15.5
0.71,710.5,269.5,220.5,3.5,2,0.25,1,12.57,15.42
0.71,710.5,269.5,220.5,3.5,3,0.25,1,12.65,15.85
0.71,710.5,269.5,220.5,3.5,4,0.25,1,12.57,15.44
0.71,710.5,269.5,220.5,3.5,5,0.25,1,12.63,15.81
0.69,735.0,294.0,220.5,3.5,2,0.25,1,12.78,15.21
0.69,735.0,294.0,220.5,3.5,3,0.25,1,12.93,15.63
0.69,735.0,294.0,220.5,3.5,4,0.25,1,12.73,15.48
0.69,735.0,294.0,220.5,3.5,5,0.25,1,12.72,15.78
0.66,759.5,318.5,220.5,3.5,2,0.25,1,13.17,16.39
0.66,759.5,318.5,220.5,3.5,3,0.25,1,13.18,16.27
0.66,759.5,318.5,220.5,3.5,4,0.25,1,13.17,16.39
0.66,759.5,318.5,220.5,3.5,5,0.25,1,13.18,16.19
0.64,784.0,343.0,220.5,3.5,2,0.25,1,17.5,21.13
0.64,784.0,343.0,220.5,3.5,3,0.25,1,17.35,21.19
0.64,784.0,343.0,220.5,3.5,4,0.25,1,17.52,21.09
0.64,784.0,343.0,220.5,3.5,5,0.25,1,17.37,21.08
0.62,808.5,367.5,220.5,3.5,2,0.25,1,15.09,15.77
0.62,808.5,367.5,220.5,3.5,3,0.25,1,15.12,15.95
0.62,808.5,367.5,220.5,3.5,4,0.25,1,15.08,15.77
0.62,808.5,367.5,220.5,3.5,5,0.25,1,15.16,15.76
0.98,514.5,294.0,110.25,7.0,2,0.25,2,28.67,29.62
0.98,514.5,294.0,110.25,7.0,3,0.25,2,28.57,29.69
0.98,514.5,294.0,110.25,7.0,4,0.25,2,28.18,30.18
0.98,514.5,294.0,110.25,7.0,5,0.25,2,28.6,30.02
0.9,563.5,318.5,122.5,7.0,2,0.25,2,32.46,35.56
0.9,563.5,318.5,122.5,7.0,3,0.25,2,33.27,32.64
0.9,563.5,318.5,122.5,7.0,4,0.25,2,32.33,32.77
0.9,563.5,318.5,122.5,7.0,5,0.25,2,31.66,37.72
0.86,588.0,294.0,147.0,7.0,2,0.25,2,29.34,33.37
0.86,588.0,294.0,147.0,7.0,3,0.25,2,29.87,27.89
0.86,588.0,294.0,147.0,7.0,4,0.25,2,29.27,29.9
0.86,588.0,294.0,147.0,7.0,5,0.25,2,28.4,34.52
0.82,612.5,318.5,147.0,7.0,2,0.25,2,25.74,28.27
0.82,612.5,318.5,147.0,7.0,3,0.25,2,25.98,26.96
0.82,612.5,318.5,147.0,7.0,4,0.25,2,25.38,26.72
0.82,612.5,318.5,147.0,7.0,5,0.25,2,24.94,29.88
0.79,637.0,343.0,147.0,7.0,2,0.25,2,38.57,43.86
0.79,637.0,343.0,147.0,7.0,3,0.25,2,40.19,37.41
0.79,637.0,343.0,147.0,7.0,4,0.25,2,39.97,36.77
0.79,637.0,343.0,147.0,7.0,5,0.25,2,38.98,45.97
0.76,661.5,416.5,122.5,7.0,2,0.25,2,36.95,36.87
0.76,661.5,416.5,122.5,7.0,3,0.25,2,36.28,37.35
0.76,661.5,416.5,122.5,7.0,4,0.25,2,36.86,37.28
0.76,661.5,416.5,122.5,7.0,5,0.25,2,36.45,36.81
0.74,686.0,245.0,220.5,3.5,2,0.25,2,12.35,14.73
0.74,686.0,245.0,220.5,3.5,3,0.25,2,12.45,15.1
0.74,686.0,245.0,220.5,3.5,4,0.25,2,12.16,15.18
0.74,686.0,245.0,220.5,3.5,5,0.25,2,12.3,15.44
0.71,710.5,269.5,220.5,3.5,2,0.25,2,12.33,14.91
0.71,710.5,269.5,220.5,3.5,3,0.25,2,12.29,15.4
0.71,710.5,269.5,220.5,3.5,4,0.25,2,12.2,14.94
0.71,710.5,269.5,220.5,3.5,5,0.25,2,12.49,15.32
0.69,735.0,294.0,220.5,3.5,2,0.25,2,12.85,15.52
0.69,735.0,294.0,220.5,3.5,3,0.25,2,12.87,15.85
0.69,735.0,294.0,220.5,3.5,4,0.25,2,12.73,15.66
0.69,735.0,294.0,220.5,3.5,5,0.25,2,12.95,15.99
0.66,759.5,318.5,220.5,3.5,2,0.25,2,13.05,15.89
0.66,759.5,318.5,220.5,3.5,3,0.25,2,12.93,15.85
0.66,759.5,318.5,220.5,3.5,4,0.25,2,12.77,16.22
0.66,759.5,318.5,220.5,3.5,5,0.25,2,13.0,15.87
0.64,784.0,343.0,220.5,3.5,2,0.25,2,17.14,20.47
0.64,784.0,343.0,220.5,3.5,3,0.25,2,16.84,20.56
0.64,784.0,343.0,220.5,3.5,4,0.25,2,17.02,20.48
0.64,784.0,343.0,220.5,3.5,5,0.25,2,17.11,20.43
0.62,808.5,367.5,220.5,3.5,2,0.25,2,14.34,15.32
0.62,808.5,367.5,220.5,3.5,3,0.25,2,14.66,15.64
0.62,808.5,367.5,220.5,3.5,4,0.25,2,14.6,15.14
0.62,808.5,367.5,220.5,3.5,5,0.25,2,14.6,15.3
0.98,514.5,294.0,110.25,7.0,2,0.25,3,28.67,29.43
0.98,514.5,294.0,110.25,7.0,3,0.25,3,28.56,29.78
0.98,514.5,294.0,110.25,7.0,4,0.25,3,28.17,30.1
0.98,514.5,294.0,110.25,7.0,5,0.25,3,28.63,30.19
0.9,563.5,318.5,122.5,7.0,2,0.25,3,31.63,36.35
0.9,563.5,318.5,122.5,7.0,3,0.25,3,32.4,35.1
0.9,563.5,318.5,122.5,7.0,4,0.25,3,32.68,32.83
0.9,563.5,318.5,122.5,7.0,5,0.25,3,32.29,32.46
0.86,588.0,294.0,147.0,7.0,2,0.25,3,28.4,33.52
0.86,588.0,294.0,147.0,7.0,3,0.25,3,29.4,32.93
0.86,588.0,294.0,147.0,7.0,4,0.25,3,29.43,28.38
0.86,588.0,294.0,147.0,7.0,5,0.25,3,29.07,29.82
0.82,612.5,318.5,147.0,7.0,2,0.25,3,24.7,28.77
0.82,612.5,318.5,147.0,7.0,3,0.25,3,25.48,27.76
0.82,612.5,318.5,147.0,7.0,4,0.25,3,25.37,26.95
0.82,612.5,318.5,147.0,7.0,5,0.25,3,25.17,26.41
0.79,637.0,343.0,147.0,7.0,2,0.25,3,39.04,45.13
0.79,637.0,343.0,147.0,7.0,3,0.25,3,38.35,43.66
0.79,637.0,343.0,147.0,7.0,4,0.25,3,39.81,37.76
0.79,637.0,343.0,147.0,7.0,5,0.25,3,39.83,36.87
0.76,661.5,416.5,122.5,7.0,2,0.25,3,35.99,36.07
0.76,661.5,416.5,122.5,7.0,3,0.25,3,36.59,36.44
0.76,661.5,416.5,122.5,7.0,4,0.25,3,35.64,37.28
0.76,661.5,416.5,122.5,7.0,5,0.25,3,36.52,37.29
0.74,686.0,245.0,220.5,3.5,2,0.25,3,11.8,14.49
0.74,686.0,245.0,220.5,3.5,3,0.25,3,12.03,13.79
0.74,686.0,245.0,220.5,3.5,4,0.25,3,11.98,14.72
0.74,686.0,245.0,220.5,3.5,5,0.25,3,11.69,14.76
0.71,710.5,269.5,220.5,3.5,2,0.25,3,12.41,14.92
0.71,710.5,269.5,220.5,3.5,3,0.25,3,12.28,14.74
0.71,710.5,269.5,220.5,3.5,4,0.25,3,12.1,15.57
0.71,710.5,269.5,220.5,3.5,5,0.25,3,12.19,14.94
0.69,735.0,294.0,220.5,3.5,2,0.25,3,12.34,14.92
0.69,735.0,294.0,220.5,3.5,3,0.25,3,12.46,14.38
0.69,735.0,294.0,220.5,3.5,4,0.25,3,12.31,15.44
0.69,735.0,294.0,220.5,3.5,5,0.25,3,12.12,15.17
0.66,759.5,318.5,220.5,3.5,2,0.25,3,12.97,15.53
0.66,759.5,318.5,220.5,3.5,3,0.25,3,13.01,15.8
0.66,759.5,318.5,220.5,3.5,4,0.25,3,12.74,16.14
0.66,759.5,318.5,220.5,3.5,5,0.25,3,12.84,16.26
0.64,784.0,343.0,220.5,3.5,2,0.25,3,16.83,19.87
0.64,784.0,343.0,220.5,3.5,3,0.25,3,16.93,20.03
0.64,784.0,343.0,220.5,3.5,4,0.25,3,16.66,20.46
0.64,784.0,343.0,220.5,3.5,5,0.25,3,16.86,20.28
0.62,808.5,367.5,220.5,3.5,2,0.25,3,13.91,14.89
0.62,808.5,367.5,220.5,3.5,3,0.25,3,14.34,14.96
0.62,808.5,367.5,220.5,3.5,4,0.25,3,13.95,14.89
0.62,808.5,367.5,220.5,3.5,5,0.25,3,13.99,14.35
0.98,514.5,294.0,110.25,7.0,2,0.25,4,28.7,29.61
0.98,514.5,294.0,110.25,7.0,3,0.25,4,28.55,29.59
0.98,514.5,294.0,110.25,7.0,4,0.25,4,28.15,30.19
0.98,514.5,294.0,110.25,7.0,5,0.25,4,28.62,30.12
0.9,563.5,318.5,122.5,7.0,2,0.25,4,32.67,32.12
0.9,563.5,318.5,122.5,7.0,3,0.25,4,31.69,37.12
0.9,563.5,318.5,122.5,7.0,4,0.25,4,32.07,36.16
0.9,563.5,318.5,122.5,7.0,5,0.25,4,33.28,33.16
0.86,588.0,294.0,147.0,7.0,2,0.25,4,29.47,29.45
0.86,588.0,294.0,147.0,7.0,3,0.25,4,28.42,34.19
0.86,588.0,294.0,147.0,7.0,4,0.25,4,29.08,33.93
0.86,588.0,294.0,147.0,7.0,5,0.25,4,29.88,28.31
0.82,612.5,318.5,147.0,7.0,2,0.25,4,25.66,26.3
0.82,612.5,318.5,147.0,7.0,3,0.25,4,24.96,29.43
0.82,612.5,318.5,147.0,7.0,4,0.25,4,25.43,28.76
0.82,612.5,318.5,147.0,7.0,5,0.25,4,26.0,27.34
0.79,637.0,343.0,147.0,7.0,2,0.25,4,40.0,36.26
0.79,637.0,343.0,147.0,7.0,3,0.25,4,38.84,45.48
0.79,637.0,343.0,147.0,7.0,4,0.25,4,38.33,44.16
0.79,637.0,343.0,147.0,7.0,5,0.25,4,40.12,37.26
0.76,661.5,416.5,122.5,7.0,2,0.25,4,36.95,37.2
0.76,661.5,416.5,122.5,7.0,3,0.25,4,36.45,36.76
0.76,661.5,416.5,122.5,7.0,4,0.25,4,36.81,37.05
0.76,661.5,416.5,122.5,7.0,5,0.25,4,36.26,37.51
0.74,686.0,245.0,220.5,3.5,2,0.25,4,12.32,14.92
0.74,686.0,245.0,220.5,3.5,3,0.25,4,12.3,15.24
0.74,686.0,245.0,220.5,3.5,4,0.25,4,12.18,15.03
0.74,686.0,245.0,220.5,3.5,5,0.25,4,12.43,15.35
0.71,710.5,269.5,220.5,3.5,2,0.25,4,12.36,14.67
0.71,710.5,269.5,220.5,3.5,3,0.25,4,12.49,15.09
0.71,710.5,269.5,220.5,3.5,4,0.25,4,12.17,15.2
0.71,710.5,269.5,220.5,3.5,5,0.25,4,12.28,15.64
0.69,735.0,294.0,220.5,3.5,2,0.25,4,12.91,15.37
0.69,735.0,294.0,220.5,3.5,3,0.25,4,12.95,15.73
0.69,735.0,294.0,220.5,3.5,4,0.25,4,12.67,15.83
0.69,735.0,294.0,220.5,3.5,5,0.25,4,12.86,16.13
0.66,759.5,318.5,220.5,3.5,2,0.25,4,12.95,15.95
0.66,759.5,318.5,220.5,3.5,3,0.25,4,13.0,15.59
0.66,759.5,318.5,220.5,3.5,4,0.25,4,12.86,16.17
0.66,759.5,318.5,220.5,3.5,5,0.25,4,12.92,16.14
0.64,784.0,343.0,220.5,3.5,2,0.25,4,16.99,19.65
0.64,784.0,343.0,220.5,3.5,3,0.25,4,16.69,19.76
0.64,784.0,343.0,220.5,3.5,4,0.25,4,16.56,20.37
0.64,784.0,343.0,220.5,3.5,5,0.25,4,16.62,19.9
0.62,808.5,367.5,220.5,3.5,2,0.25,4,14.33,15.41
0.62,808.5,367.5,220.5,3.5,3,0.25,4,14.61,15.56
0.62,808.5,367.5,220.5,3.5,4,0.25,4,14.61,15.07
0.62,808.5,367.5,220.5,3.5,5,0.25,4,14.65,15.38
0.98,514.5,294.0,110.25,7.0,2,0.25,5,28.69,29.53
0.98,514.5,294.0,110.25,7.0,3,0.25,5,28.58,29.77
0.98,514.5,294.0,110.25,7.0,4,0.25,5,28.15,30.0
0.98,514.5,294.0,110.25,7.0,5,0.25,5,28.61,30.2
0.9,563.5,318.5,122.5,7.0,2,0.25,5,33.13,32.25
0.9,563.5,318.5,122.5,7.0,3,0.25,5,32.31,32.0
0.9,563.5,318.5,122.5,7.0,4,0.25,5,31.53,37.19
0.9,563.5,318.5,122.5,7.0,5,0.25,5,32.46,35.62
0.86,588.0,294.0,147.0,7.0,2,0.25,5,29.71,28.02
0.86,588.0,294.0,147.0,7.0,3,0.25,5,29.09,29.43
0.86,588.0,294.0,147.0,7.0,4,0.25,5,28.31,34.15
0.86,588.0,294.0,147.0,7.0,5,0.25,5,29.39,33.47
0.82,612.5,318.5,147.0,7.0,2,0.25,5,25.7,26.53
0.82,612.5,318.5,147.0,7.0,3,0.25,5,25.17,26.08
0.82,612.5,318.5,147.0,7.0,4,0.25,5,24.6,29.31
0.82,612.5,318.5,147.0,7.0,5,0.25,5,25.49,28.14
0.79,637.0,343.0,147.0,7.0,2,0.25,5,39.89,37.54
0.79,637.0,343.0,147.0,7.0,3,0.25,5,39.83,36.66
0.79,637.0,343.0,147.0,7.0,4,0.25,5,39.01,45.28
0.79,637.0,343.0,147.0,7.0,5,0.25,5,38.65,43.73
0.76,661.5,416.5,122.5,7.0,2,0.25,5,35.69,36.93
0.76,661.5,416.5,122.5,7.0,3,0.25,5,36.64,37.01
0.76,661.5,416.5,122.5,7.0,4,0.25,5,36.06,35.73
0.76,661.5,416.5,122.5,7.0,5,0.25,5,36.7,36.15
0.74,686.0,245.0,220.5,3.5,2,0.25,5,12.12,14.48
0.74,686.0,245.0,220.5,3.5,3,0.25,5,11.67,14.58
0.74,686.0,245.0,220.5,3.5,4,0.25,5,11.64,14.81
0.74,686.0,245.0,220.5,3.5,5,0.25,5,12.02,14.03
0.71,710.5,269.5,220.5,3.5,2,0.25,5,12.27,15.27
0.71,710.5,269.5,220.5,3.5,3,0.25,5,12.19,14.71
0.71,710.5,269.5,220.5,3.5,4,0.25,5,12.25,15.23
0.71,710.5,269.5,220.5,3.5,5,0.25,5,12.27,14.97
0.69,735.0,294.0,220.5,3.5,2,0.25,5,12.47,15.14
0.69,735.0,294.0,220.5,3.5,3,0.25,5,12.12,14.97
0.69,735.0,294.0,220.5,3.5,4,0.25,5,12.18,15.22
0.69,735.0,294.0,220.5,3.5,5,0.25,5,12.47,14.6
0.66,759.5,318.5,220.5,3.5,2,0.25,5,12.93,15.83
0.66,759.5,318.5,220.5,3.5,3,0.25,5,12.82,16.03
0.66,759.5,318.5,220.5,3.5,4,0.25,5,12.78,15.8
0.66,759.5,318.5,220.5,3.5,5,0.25,5,13.02,16.06
0.64,784.0,343.0,220.5,3.5,2,0.25,5,16.73,20.13
0.64,784.0,343.0,220.5,3.5,3,0.25,5,16.86,20.01
0.64,784.0,343.0,220.5,3.5,4,0.25,5,16.76,20.19
0.64,784.0,343.0,220.5,3.5,5,0.25,5,16.92,20.29
0.62,808.5,367.5,220.5,3.5,2,0.25,5,13.68,15.19
0.62,808.5,367.5,220.5,3.5,3,0.25,5,13.99,14.61
0.62,808.5,367.5,220.5,3.5,4,0.25,5,14.16,14.61
0.62,808.5,367.5,220.5,3.5,5,0.25,5,13.86,14.75
0.98,514.5,294.0,110.25,7.0,2,0.4,1,32.26,33.37
0.98,514.5,294.0,110.25,7.0,3,0.4,1,32.26,33.34
0.98,514.5,294.0,110.25,7.0,4,0.4,1,32.49,32.83
0.98,514.5,294.0,110.25,7.0,5,0.4,1,32.53,33.04
0.9,563.5,318.5,122.5,7.0,2,0.4,1,36.47,39.28
0.9,563.5,318.5,122.5,7.0,3,0.4,1,37.24,36.38
0.9,563.5,318.5,122.5,7.0,4,0.4,1,36.66,35.92
0.9,563.5,318.5,122.5,7.0,5,0.4,1,35.96,40.99
0.86,588.0,294.0,147.0,7.0,2,0.4,1,31.89,35.99
0.86,588.0,294.0,147.0,7.0,3,0.4,1,32.39,30.66
0.86,588.0,294.0,147.0,7.0,4,0.4,1,32.09,31.7
0.86,588.0,294.0,147.0,7.0,5,0.4,1,31.29,36.73
0.82,612.5,318.5,147.0,7.0,2,0.4,1,29.22,31.71
0.82,612.5,318.5,147.0,7.0,3,0.4,1,29.91,29.13
0.82,612.5,318.5,147.0,7.0,4,0.4,1,29.53,28.99
0.82,612.5,318.5,147.0,7.0,5,0.4,1,28.65,33.54
0.79,637.0,343.0,147.0,7.0,2,0.4,1,41.4,45.29
0.79,637.0,343.0,147.0,7.0,3,0.4,1,42.62,39.07
0.79,637.0,343.0,147.0,7.0,4,0.4,1,42.5,38.35
0.79,637.0,343.0,147.0,7.0,5,0.4,1,41.67,46.94
0.76,661.5,416.5,122.5,7.0,2,0.4,1,40.78,39.55
0.76,661.5,416.5,122.5,7.0,3,0.4,1,39.97,40.85
0.76,661.5,416.5,122.5,7.0,4,0.4,1,40.71,40.63
0.76,661.5,416.5,122.5,7.0,5,0.4,1,40.43,39.48
0.74,686.0,245.0,220.5,3.5,2,0.4,1,14.52,16.94
0.74,686.0,245.0,220.5,3.5,3,0.4,1,14.61,17.25
0.74,686.0,245.0,220.5,3.5,4,0.4,1,14.5,17.03
0.74,686.0,245.0,220.5,3.5,5,0.4,1,14.55,17.25
0.71,710.5,269.5,220.5,3.5,2,0.4,1,14.51,17.1
0.71,710.5,269.5,220.5,3.5,3,0.4,1,14.6,17.51
0.71,710.5,269.5,220.5,3.5,4,0.4,1,14.5,17.12
0.71,710.5,269.5,220.5,3.5,5,0.4,1,14.58,17.47
0.69,735.0,294.0,220.5,3.5,2,0.4,1,14.51,16.5
0.69,735.0,294.0,220.5,3.5,3,0.4,1,14.7,17.0
0.69,735.0,294.0,220.5,3.5,4,0.4,1,14.42,16.87
0.69,735.0,294.0,220.5,3.5,5,0.4,1,14.42,17.2
0.66,759.5,318.5,220.5,3.5,2,0.4,1,15.23,18.14
0.66,759.5,318.5,220.5,3.5,3,0.4,1,15.23,18.03
0.66,759.5,318.5,220.5,3.5,4,0.4,1,15.23,18.14
0.66,759.5,318.5,220.5,3.5,5,0.4,1,15.23,17.95
0.64,784.0,343.0,220.5,3.5,2,0.4,1,19.52,22.72
0.64,784.0,343.0,220.5,3.5,3,0.4,1,19.36,22.73
0.64,784.0,343.0,220.5,3.5,4,0.4,1,19.48,22.72
0.64,784.0,343.0,220.5,3.5,5,0.4,1,19.42,22.53
0.62,808.5,367.5,220.5,3.5,2,0.4,1,15.09,17.2
0.62,808.5,367.5,220.5,3.5,3,0.4,1,17.17,17.21
0.62,808.5,367.5,220.5,3.5,4,0.4,1,17.14,17.15
0.62,808.5,367.5,220.5,3.5,5,0.4,1,17.14,17.2
0.98,514.5,294.0,110.25,7.0,2,0.4,2,32.82,32.96
0.98,514.5,294.0,110.25,7.0,3,0.4,2,32.71,33.13
0.98,514.5,294.0,110.25,7.0,4,0.4,2,32.24,33.94
0.98,514.5,294.0,110.25,7.0,5,0.4,2,32.72,33.78
0.9,563.5,318.5,122.5,7.0,2,0.4,2,35.84,38.35
0.9,563.5,318.5,122.5,7.0,3,0.4,2,36.57,35.39
0.9,563.5,318.5,122.5,7.0,4,0.4,2,36.06,34.94
0.9,563.5,318.5,122.5,7.0,5,0.4,2,35.69,40.66
0.86,588.0,294.0,147.0,7.0,2,0.4,2,32.48,35.48
0.86,588.0,294.0,147.0,7.0,3,0.4,2,32.74,30.53
0.86,588.0,294.0,147.0,7.0,4,0.4,2,32.13,32.28
0.86,588.0,294.0,147.0,7.0,5,0.4,2,31.64,36.86
0.82,612.5,318.5,147.0,7.0,2,0.4,2,28.95,30.34
0.82,612.5,318.5,147.0,7.0,3,0.4,2,29.49,27.93
0.82,612.5,318.5,147.0,7.0,4,0.4,2,28.64,28.95
0.82,612.5,318.5,147.0,7.0,5,0.4,2,28.01,32.92
0.79,637.0,343.0,147.0,7.0,2,0.4,2,41.64,45.59
0.79,637.0,343.0,147.0,7.0,3,0.4,2,43.1,39.41
0.79,637.0,343.0,147.0,7.0,4,0.4,2,42.74,38.84
0.79,637.0,343.0,147.0,7.0,5,0.4,2,41.92,48.03
0.76,661.5,416.5,122.5,7.0,2,0.4,2,40.78,39.48
0.76,661.5,416.5,122.5,7.0,3,0.4,2,40.15,40.4
0.76,661.5,416.5,122.5,7.0,4,0.4,2,40.57,40.47
0.76,661.5,416.5,122.5,7.0,5,0.4,2,40.42,39.7
0.74,686.0,245.0,220.5,3.5,2,0.4,2,14.54,16.43
0.74,686.0,245.0,220.5,3.5,3,0.4,2,14.45,16.93
0.74,686.0,245.0,220.5,3.5,4,0.4,2,14.18,16.99
0.74,686.0,245.0,220.5,3.5,5,0.4,2,14.5,17.03
0.71,710.5,269.5,220.5,3.5,2,0.4,2,14.7,16.77
0.71,710.5,269.5,220.5,3.5,3,0.4,2,14.66,17.37
0.71,710.5,269.5,220.5,3.5,4,0.4,2,14.4,17.27
0.71,710.5,269.5,220.5,3.5,5,0.4,2,14.71,17.51
0.69,735.0,294.0,220.5,3.5,2,0.4,2,14.75,16.44
0.69,735.0,294.0,220.5,3.5,3,0.4,2,14.71,17.01
0.69,735.0,294.0,220.5,3.5,4,0.4,2,14.33,17.23
0.69,735.0,294.0,220.5,3.5,5,0.4,2,14.62,17.22
0.66,759.5,318.5,220.5,3.5,2,0.4,2,15.34,17.85
0.66,759.5,318.5,220.5,3.5,3,0.4,2,15.29,17.89
0.66,759.5,318.5,220.5,3.5,4,0.4,2,15.09,18.36
0.66,759.5,318.5,220.5,3.5,5,0.4,2,15.3,18.15
0.64,784.0,343.0,220.5,3.5,2,0.4,2,19.2,21.72
0.64,784.0,343.0,220.5,3.5,3,0.4,2,18.88,22.07
0.64,784.0,343.0,220.5,3.5,4,0.4,2,18.9,22.09
0.64,784.0,343.0,220.5,3.5,5,0.4,2,19.12,21.93
0.62,808.5,367.5,220.5,3.5,2,0.4,2,16.76,17.36
0.62,808.5,367.5,220.5,3.5,3,0.4,2,17.23,17.38
0.62,808.5,367.5,220.5,3.5,4,0.4,2,17.26,16.86
0.62,808.5,367.5,220.5,3.5,5,0.4,2,17.15,16.99
0.98,514.5,294.0,110.25,7.0,2,0.4,3,32.82,32.78
0.98,514.5,294.0,110.25,7.0,3,0.4,3,32.69,33.24
0.98,514.5,294.0,110.25,7.0,4,0.4,3,32.23,33.86
0.98,514.5,294.0,110.25,7.0,5,0.4,3,32.75,34.0
0.9,563.5,318.5,122.5,7.0,2,0.4,3,34.24,37.26
0.9,563.5,318.5,122.5,7.0,3,0.4,3,34.95,35.04
0.9,563.5,318.5,122.5,7.0,4,0.4,3,35.05,33.82
0.9,563.5,318.5,122.5,7.0,5,0.4,3,34.29,33.31
0.86,588.0,294.0,147.0,7.0,2,0.4,3,31.28,35.22
0.86,588.0,294.0,147.0,7.0,3,0.4,3,32.12,34.7
0.86,588.0,294.0,147.0,7.0,4,0.4,3,32.05,30.11
0.86,588.0,294.0,147.0,7.0,5,0.4,3,31.84,31.6
0.82,612.5,318.5,147.0,7.0,2,0.4,3,28.67,32.43
0.82,612.5,318.5,147.0,7.0,3,0.4,3,29.67,30.65
0.82,612.5,318.5,147.0,7.0,4,0.4,3,29.47,29.77
0.82,612.5,318.5,147.0,7.0,5,0.4,3,28.91,29.64
0.79,637.0,343.0,147.0,7.0,2,0.4,3,41.26,46.44
0.79,637.0,343.0,147.0,7.0,3,0.4,3,41.3,44.18
0.79,637.0,343.0,147.0,7.0,4,0.4,3,42.49,38.81
0.79,637.0,343.0,147.0,7.0,5,0.4,3,42.08,38.23
0.76,661.5,416.5,122.5,7.0,2,0.4,3,39.32,38.17
0.76,661.5,416.5,122.5,7.0,3,0.4,3,39.84,38.48
0.76,661.5,416.5,122.5,7.0,4,0.4,3,38.89,39.66
0.76,661.5,416.5,122.5,7.0,5,0.4,3,39.68,40.1
0.74,686.0,245.0,220.5,3.5,2,0.4,3,13.97,16.08
0.74,686.0,245.0,220.5,3.5,3,0.4,3,14.22,15.39
0.74,686.0,245.0,220.5,3.5,4,0.4,3,14.1,16.57
0.74,686.0,245.0,220.5,3.5,5,0.4,3,13.78,16.6
0.71,710.5,269.5,220.5,3.5,2,0.4,3,14.07,16.11
0.71,710.5,269.5,220.5,3.5,3,0.4,3,14.03,15.47
0.71,710.5,269.5,220.5,3.5,4,0.4,3,13.94,16.7
0.71,710.5,269.5,220.5,3.5,5,0.4,3,13.86,16.1
0.69,735.0,294.0,220.5,3.5,2,0.4,3,14.32,16.35
0.69,735.0,294.0,220.5,3.5,3,0.4,3,14.56,15.84
0.69,735.0,294.0,220.5,3.5,4,0.4,3,14.33,16.99
0.69,735.0,294.0,220.5,3.5,5,0.4,3,14.08,17.02
0.66,759.5,318.5,220.5,3.5,2,0.4,3,15.16,17.04
0.66,759.5,318.5,220.5,3.5,3,0.4,3,15.18,17.63
0.66,759.5,318.5,220.5,3.5,4,0.4,3,14.72,18.1
0.66,759.5,318.5,220.5,3.5,5,0.4,3,14.9,18.22
0.64,784.0,343.0,220.5,3.5,2,0.4,3,18.48,20.78
0.64,784.0,343.0,220.5,3.5,3,0.4,3,18.71,20.72
0.64,784.0,343.0,220.5,3.5,4,0.4,3,18.48,21.54
0.64,784.0,343.0,220.5,3.5,5,0.4,3,18.46,21.53
0.62,808.5,367.5,220.5,3.5,2,0.4,3,16.47,16.9
0.62,808.5,367.5,220.5,3.5,3,0.4,3,16.35,17.14
0.62,808.5,367.5,220.5,3.5,4,0.4,3,16.55,16.56
0.62,808.5,367.5,220.5,3.5,5,0.4,3,16.74,16.0
0.98,514.5,294.0,110.25,7.0,2,0.4,4,32.85,32.95
0.98,514.5,294.0,110.25,7.0,3,0.4,4,32.67,33.06
0.98,514.5,294.0,110.25,7.0,4,0.4,4,32.21,33.95
0.98,514.5,294.0,110.25,7.0,5,0.4,4,32.74,33.88
0.9,563.5,318.5,122.5,7.0,2,0.4,4,36.45,33.98
0.9,563.5,318.5,122.5,7.0,3,0.4,4,35.73,39.92
0.9,563.5,318.5,122.5,7.0,4,0.4,4,35.4,39.22
0.9,563.5,318.5,122.5,7.0,5,0.4,4,36.57,36.1
0.86,588.0,294.0,147.0,7.0,2,0.4,4,32.38,31.53
0.86,588.0,294.0,147.0,7.0,3,0.4,4,31.66,36.2
0.86,588.0,294.0,147.0,7.0,4,0.4,4,32.15,36.21
0.86,588.0,294.0,147.0,7.0,5,0.4,4,32.75,31.0
0.82,612.5,318.5,147.0,7.0,2,0.4,4,28.93,28.2
0.82,612.5,318.5,147.0,7.0,3,0.4,4,28.05,32.35
0.82,612.5,318.5,147.0,7.0,4,0.4,4,28.64,31.14
0.82,612.5,318.5,147.0,7.0,5,0.4,4,29.52,28.43
0.79,637.0,343.0,147.0,7.0,2,0.4,4,42.77,38.33
0.79,637.0,343.0,147.0,7.0,3,0.4,4,41.73,47.59
0.79,637.0,343.0,147.0,7.0,4,0.4,4,41.32,46.23
0.79,637.0,343.0,147.0,7.0,5,0.4,4,42.96,39.56
0.76,661.5,416.5,122.5,7.0,2,0.4,4,40.68,40.36
0.76,661.5,416.5,122.5,7.0,3,0.4,4,40.4,39.67
0.76,661.5,416.5,122.5,7.0,4,0.4,4,40.6,39.85
0.76,661.5,416.5,122.5,7.0,5,0.4,4,40.11,40.77
0.74,686.0,245.0,220.5,3.5,2,0.4,4,14.37,16.61
0.74,686.0,245.0,220.5,3.5,3,0.4,4,14.48,16.74
0.74,686.0,245.0,220.5,3.5,4,0.4,4,14.32,16.9
0.74,686.0,245.0,220.5,3.5,5,0.4,4,14.44,17.32
0.71,710.5,269.5,220.5,3.5,2,0.4,4,14.6,16.85
0.71,710.5,269.5,220.5,3.5,3,0.4,4,14.7,17.2
0.71,710.5,269.5,220.5,3.5,4,0.4,4,14.47,17.23
0.71,710.5,269.5,220.5,3.5,5,0.4,4,14.66,17.74
0.69,735.0,294.0,220.5,3.5,2,0.4,4,14.54,16.81
0.69,735.0,294.0,220.5,3.5,3,0.4,4,14.62,16.88
0.69,735.0,294.0,220.5,3.5,4,0.4,4,14.53,16.9
0.69,735.0,294.0,220.5,3.5,5,0.4,4,14.71,17.39
0.66,759.5,318.5,220.5,3.5,2,0.4,4,15.34,17.86
0.66,759.5,318.5,220.5,3.5,3,0.4,4,15.29,17.82
0.66,759.5,318.5,220.5,3.5,4,0.4,4,15.09,18.36
0.66,759.5,318.5,220.5,3.5,5,0.4,4,15.3,18.24
0.64,784.0,343.0,220.5,3.5,2,0.4,4,19.06,21.68
0.64,784.0,343.0,220.5,3.5,3,0.4,4,19.13,21.54
0.64,784.0,343.0,220.5,3.5,4,0.4,4,19.0,22.25
0.64,784.0,343.0,220.5,3.5,5,0.4,4,18.84,22.49
0.62,808.5,367.5,220.5,3.5,2,0.4,4,16.44,17.1
0.62,808.5,367.5,220.5,3.5,3,0.4,4,16.9,16.79
0.62,808.5,367.5,220.5,3.5,4,0.4,4,16.94,16.58
0.62,808.5,367.5,220.5,3.5,5,0.4,4,16.77,16.79
0.98,514.5,294.0,110.25,7.0,2,0.4,5,32.84,32.88
0.98,514.5,294.0,110.25,7.0,3,0.4,5,32.72,33.23
0.98,514.5,294.0,110.25,7.0,4,0.4,5,32.21,33.76
0.98,514.5,294.0,110.25,7.0,5,0.4,5,32.73,34.01
0.9,563.5,318.5,122.5,7.0,2,0.4,5,35.67,33.94
0.9,563.5,318.5,122.5,7.0,3,0.4,5,35.01,33.14
0.9,563.5,318.5,122.5,7.0,4,0.4,5,34.72,38.79
0.9,563.5,318.5,122.5,7.0,5,0.4,5,35.24,37.27
0.86,588.0,294.0,147.0,7.0,2,0.4,5,32.31,29.69
0.86,588.0,294.0,147.0,7.0,3,0.4,5,31.81,31.2
0.86,588.0,294.0,147.0,7.0,4,0.4,5,31.12,36.26
0.86,588.0,294.0,147.0,7.0,5,0.4,5,32.06,35.71
0.82,612.5,318.5,147.0,7.0,2,0.4,5,30.0,29.93
0.82,612.5,318.5,147.0,7.0,3,0.4,5,29.5,29.56
0.82,612.5,318.5,147.0,7.0,4,0.4,5,29.06,33.84
0.82,612.5,318.5,147.0,7.0,5,0.4,5,29.92,32.54
0.79,637.0,343.0,147.0,7.0,2,0.4,5,42.11,38.56
0.79,637.0,343.0,147.0,7.0,3,0.4,5,41.96,37.7
0.79,637.0,343.0,147.0,7.0,4,0.4,5,41.09,47.01
0.79,637.0,343.0,147.0,7.0,5,0.4,5,40.79,44.87
0.76,661.5,416.5,122.5,7.0,2,0.4,5,38.82,39.37
0.76,661.5,416.5,122.5,7.0,3,0.4,5,39.72,39.8
0.76,661.5,416.5,122.5,7.0,4,0.4,5,39.31,37.79
0.76,661.5,416.5,122.5,7.0,5,0.4,5,39.86,38.18
0.74,686.0,245.0,220.5,3.5,2,0.4,5,14.41,16.69
0.74,686.0,245.0,220.5,3.5,3,0.4,5,14.19,16.62
0.74,686.0,245.0,220.5,3.5,4,0.4,5,14.17,16.94
0.74,686.0,245.0,220.5,3.5,5,0.4,5,14.39,16.7
0.71,710.5,269.5,220.5,3.5,2,0.4,5,12.43,15.59
0.71,710.5,269.5,220.5,3.5,3,0.4,5,12.63,14.58
0.71,710.5,269.5,220.5,3.5,4,0.4,5,12.76,15.33
0.71,710.5,269.5,220.5,3.5,5,0.4,5,12.42,15.31
0.69,735.0,294.0,220.5,3.5,2,0.4,5,14.12,16.63
0.69,735.0,294.0,220.5,3.5,3,0.4,5,14.28,15.87
0.69,735.0,294.0,220.5,3.5,4,0.4,5,14.37,16.54
0.69,735.0,294.0,220.5,3.5,5,0.4,5,14.21,16.74
0.66,759.5,318.5,220.5,3.5,2,0.4,5,14.96,17.64
0.66,759.5,318.5,220.5,3.5,3,0.4,5,14.92,17.79
0.66,759.5,318.5,220.5,3.5,4,0.4,5,14.92,17.55
0.66,759.5,318.5,220.5,3.5,5,0.4,5,15.16,18.06
0.64,784.0,343.0,220.5,3.5,2,0.4,5,17.69,20.82
0.64,784.0,343.0,220.5,3.5,3,0.4,5,18.19,20.21
0.64,784.0,343.0,220.5,3.5,4,0.4,5,18.16,20.71
0.64,784.0,343.0,220.5,3.5,5,0.4,5,17.88,21.4
0.62,808.5,367.5,220.5,3.5,2,0.4,5,16.54,16.88
0.62,808.5,367.5,220.5,3.5,3,0.4,5,16.44,17.11
0.62,808.5,367.5,220.5,3.5,4,0.4,5,16.48,16.61
0.62,808.5,367.5,220.5,3.5,5,0.4,5,16.64,16.03
1 X1 X2 X3 X4 X5 X6 X7 X8 Y1 Y2
2 0.98 514.5 294.0 110.25 7.0 2 0.0 0 15.55 21.33
3 0.98 514.5 294.0 110.25 7.0 3 0.0 0 15.55 21.33
4 0.98 514.5 294.0 110.25 7.0 4 0.0 0 15.55 21.33
5 0.98 514.5 294.0 110.25 7.0 5 0.0 0 15.55 21.33
6 0.9 563.5 318.5 122.5 7.0 2 0.0 0 20.84 28.28
7 0.9 563.5 318.5 122.5 7.0 3 0.0 0 21.46 25.38
8 0.9 563.5 318.5 122.5 7.0 4 0.0 0 20.71 25.16
9 0.9 563.5 318.5 122.5 7.0 5 0.0 0 19.68 29.6
10 0.86 588.0 294.0 147.0 7.0 2 0.0 0 19.5 27.3
11 0.86 588.0 294.0 147.0 7.0 3 0.0 0 19.95 21.97
12 0.86 588.0 294.0 147.0 7.0 4 0.0 0 19.34 23.49
13 0.86 588.0 294.0 147.0 7.0 5 0.0 0 18.31 27.87
14 0.82 612.5 318.5 147.0 7.0 2 0.0 0 17.05 23.77
15 0.82 612.5 318.5 147.0 7.0 3 0.0 0 17.41 21.46
16 0.82 612.5 318.5 147.0 7.0 4 0.0 0 16.95 21.16
17 0.82 612.5 318.5 147.0 7.0 5 0.0 0 15.98 24.93
18 0.79 637.0 343.0 147.0 7.0 2 0.0 0 28.52 37.73
19 0.79 637.0 343.0 147.0 7.0 3 0.0 0 29.9 31.27
20 0.79 637.0 343.0 147.0 7.0 4 0.0 0 29.63 30.93
21 0.79 637.0 343.0 147.0 7.0 5 0.0 0 28.75 39.44
22 0.76 661.5 416.5 122.5 7.0 2 0.0 0 24.77 29.79
23 0.76 661.5 416.5 122.5 7.0 3 0.0 0 23.93 29.68
24 0.76 661.5 416.5 122.5 7.0 4 0.0 0 24.77 29.79
25 0.76 661.5 416.5 122.5 7.0 5 0.0 0 23.93 29.4
26 0.74 686.0 245.0 220.5 3.5 2 0.0 0 6.07 10.9
27 0.74 686.0 245.0 220.5 3.5 3 0.0 0 6.05 11.19
28 0.74 686.0 245.0 220.5 3.5 4 0.0 0 6.01 10.94
29 0.74 686.0 245.0 220.5 3.5 5 0.0 0 6.04 11.17
30 0.71 710.5 269.5 220.5 3.5 2 0.0 0 6.37 11.27
31 0.71 710.5 269.5 220.5 3.5 3 0.0 0 6.4 11.72
32 0.71 710.5 269.5 220.5 3.5 4 0.0 0 6.37 11.29
33 0.71 710.5 269.5 220.5 3.5 5 0.0 0 6.4 11.67
34 0.69 735.0 294.0 220.5 3.5 2 0.0 0 6.85 11.74
35 0.69 735.0 294.0 220.5 3.5 3 0.0 0 6.79 12.05
36 0.69 735.0 294.0 220.5 3.5 4 0.0 0 6.77 11.73
37 0.69 735.0 294.0 220.5 3.5 5 0.0 0 6.81 11.93
38 0.66 759.5 318.5 220.5 3.5 2 0.0 0 7.18 12.4
39 0.66 759.5 318.5 220.5 3.5 3 0.0 0 7.1 12.23
40 0.66 759.5 318.5 220.5 3.5 4 0.0 0 7.1 12.4
41 0.66 759.5 318.5 220.5 3.5 5 0.0 0 7.1 12.14
42 0.64 784.0 343.0 220.5 3.5 2 0.0 0 10.85 16.78
43 0.64 784.0 343.0 220.5 3.5 3 0.0 0 10.54 16.8
44 0.64 784.0 343.0 220.5 3.5 4 0.0 0 10.77 16.75
45 0.64 784.0 343.0 220.5 3.5 5 0.0 0 10.56 16.67
46 0.62 808.5 367.5 220.5 3.5 2 0.0 0 8.6 12.07
47 0.62 808.5 367.5 220.5 3.5 3 0.0 0 8.49 12.22
48 0.62 808.5 367.5 220.5 3.5 4 0.0 0 8.45 12.08
49 0.62 808.5 367.5 220.5 3.5 5 0.0 0 8.5 12.04
50 0.98 514.5 294.0 110.25 7.0 2 0.1 1 24.58 26.47
51 0.98 514.5 294.0 110.25 7.0 3 0.1 1 24.63 26.37
52 0.98 514.5 294.0 110.25 7.0 4 0.1 1 24.63 26.44
53 0.98 514.5 294.0 110.25 7.0 5 0.1 1 24.59 26.29
54 0.9 563.5 318.5 122.5 7.0 2 0.1 1 29.03 32.92
55 0.9 563.5 318.5 122.5 7.0 3 0.1 1 29.87 29.87
56 0.9 563.5 318.5 122.5 7.0 4 0.1 1 29.14 29.58
57 0.9 563.5 318.5 122.5 7.0 5 0.1 1 28.09 34.33
58 0.86 588.0 294.0 147.0 7.0 2 0.1 1 26.28 30.89
59 0.86 588.0 294.0 147.0 7.0 3 0.1 1 26.91 25.6
60 0.86 588.0 294.0 147.0 7.0 4 0.1 1 26.37 27.03
61 0.86 588.0 294.0 147.0 7.0 5 0.1 1 25.27 31.73
62 0.82 612.5 318.5 147.0 7.0 2 0.1 1 23.53 27.31
63 0.82 612.5 318.5 147.0 7.0 3 0.1 1 24.03 24.91
64 0.82 612.5 318.5 147.0 7.0 4 0.1 1 23.54 24.61
65 0.82 612.5 318.5 147.0 7.0 5 0.1 1 22.58 28.51
66 0.79 637.0 343.0 147.0 7.0 2 0.1 1 35.56 41.68
67 0.79 637.0 343.0 147.0 7.0 3 0.1 1 37.12 35.28
68 0.79 637.0 343.0 147.0 7.0 4 0.1 1 36.9 34.43
69 0.79 637.0 343.0 147.0 7.0 5 0.1 1 35.94 43.33
70 0.76 661.5 416.5 122.5 7.0 2 0.1 1 32.96 33.87
71 0.76 661.5 416.5 122.5 7.0 3 0.1 1 32.12 34.07
72 0.76 661.5 416.5 122.5 7.0 4 0.1 1 32.94 34.14
73 0.76 661.5 416.5 122.5 7.0 5 0.1 1 32.21 33.67
74 0.74 686.0 245.0 220.5 3.5 2 0.1 1 10.36 13.43
75 0.74 686.0 245.0 220.5 3.5 3 0.1 1 10.43 13.71
76 0.74 686.0 245.0 220.5 3.5 4 0.1 1 10.36 13.48
77 0.74 686.0 245.0 220.5 3.5 5 0.1 1 10.39 13.7
78 0.71 710.5 269.5 220.5 3.5 2 0.1 1 10.71 13.8
79 0.71 710.5 269.5 220.5 3.5 3 0.1 1 10.8 14.28
80 0.71 710.5 269.5 220.5 3.5 4 0.1 1 10.7 13.87
81 0.71 710.5 269.5 220.5 3.5 5 0.1 1 10.75 14.27
82 0.69 735.0 294.0 220.5 3.5 2 0.1 1 11.11 14.28
83 0.69 735.0 294.0 220.5 3.5 3 0.1 1 11.13 14.61
84 0.69 735.0 294.0 220.5 3.5 4 0.1 1 11.09 14.3
85 0.69 735.0 294.0 220.5 3.5 5 0.1 1 11.16 14.45
86 0.66 759.5 318.5 220.5 3.5 2 0.1 1 11.68 13.9
87 0.66 759.5 318.5 220.5 3.5 3 0.1 1 11.69 13.72
88 0.66 759.5 318.5 220.5 3.5 4 0.1 1 11.7 13.88
89 0.66 759.5 318.5 220.5 3.5 5 0.1 1 11.69 13.65
90 0.64 784.0 343.0 220.5 3.5 2 0.1 1 15.41 19.37
91 0.64 784.0 343.0 220.5 3.5 3 0.1 1 15.2 19.43
92 0.64 784.0 343.0 220.5 3.5 4 0.1 1 15.42 19.34
93 0.64 784.0 343.0 220.5 3.5 5 0.1 1 15.21 19.32
94 0.62 808.5 367.5 220.5 3.5 2 0.1 1 12.96 14.34
95 0.62 808.5 367.5 220.5 3.5 3 0.1 1 12.97 14.5
96 0.62 808.5 367.5 220.5 3.5 4 0.1 1 12.93 14.33
97 0.62 808.5 367.5 220.5 3.5 5 0.1 1 13.02 14.27
98 0.98 514.5 294.0 110.25 7.0 2 0.1 2 24.29 25.95
99 0.98 514.5 294.0 110.25 7.0 3 0.1 2 24.31 25.63
100 0.98 514.5 294.0 110.25 7.0 4 0.1 2 24.13 26.13
101 0.98 514.5 294.0 110.25 7.0 5 0.1 2 24.25 25.89
102 0.9 563.5 318.5 122.5 7.0 2 0.1 2 28.88 32.54
103 0.9 563.5 318.5 122.5 7.0 3 0.1 2 29.68 29.44
104 0.9 563.5 318.5 122.5 7.0 4 0.1 2 28.83 29.36
105 0.9 563.5 318.5 122.5 7.0 5 0.1 2 27.9 34.2
106 0.86 588.0 294.0 147.0 7.0 2 0.1 2 26.48 30.91
107 0.86 588.0 294.0 147.0 7.0 3 0.1 2 27.02 25.63
108 0.86 588.0 294.0 147.0 7.0 4 0.1 2 26.33 27.36
109 0.86 588.0 294.0 147.0 7.0 5 0.1 2 25.36 31.9
110 0.82 612.5 318.5 147.0 7.0 2 0.1 2 23.75 27.38
111 0.82 612.5 318.5 147.0 7.0 3 0.1 2 24.23 25.02
112 0.82 612.5 318.5 147.0 7.0 4 0.1 2 23.67 24.8
113 0.82 612.5 318.5 147.0 7.0 5 0.1 2 22.79 28.79
114 0.79 637.0 343.0 147.0 7.0 2 0.1 2 35.65 41.07
115 0.79 637.0 343.0 147.0 7.0 3 0.1 2 37.26 34.62
116 0.79 637.0 343.0 147.0 7.0 4 0.1 2 36.97 33.87
117 0.79 637.0 343.0 147.0 7.0 5 0.1 2 36.03 42.86
118 0.76 661.5 416.5 122.5 7.0 2 0.1 2 33.16 33.91
119 0.76 661.5 416.5 122.5 7.0 3 0.1 2 32.4 34.07
120 0.76 661.5 416.5 122.5 7.0 4 0.1 2 33.12 34.17
121 0.76 661.5 416.5 122.5 7.0 5 0.1 2 32.41 33.78
122 0.74 686.0 245.0 220.5 3.5 2 0.1 2 10.42 13.39
123 0.74 686.0 245.0 220.5 3.5 3 0.1 2 10.46 13.72
124 0.74 686.0 245.0 220.5 3.5 4 0.1 2 10.32 13.57
125 0.74 686.0 245.0 220.5 3.5 5 0.1 2 10.45 13.79
126 0.71 710.5 269.5 220.5 3.5 2 0.1 2 10.64 13.67
127 0.71 710.5 269.5 220.5 3.5 3 0.1 2 10.72 14.11
128 0.71 710.5 269.5 220.5 3.5 4 0.1 2 10.55 13.8
129 0.71 710.5 269.5 220.5 3.5 5 0.1 2 10.68 14.21
130 0.69 735.0 294.0 220.5 3.5 2 0.1 2 11.45 13.2
131 0.69 735.0 294.0 220.5 3.5 3 0.1 2 11.46 13.54
132 0.69 735.0 294.0 220.5 3.5 4 0.1 2 11.32 13.32
133 0.69 735.0 294.0 220.5 3.5 5 0.1 2 11.49 13.51
134 0.66 759.5 318.5 220.5 3.5 2 0.1 2 11.45 14.86
135 0.66 759.5 318.5 220.5 3.5 3 0.1 2 11.42 14.75
136 0.66 759.5 318.5 220.5 3.5 4 0.1 2 11.33 15.0
137 0.66 759.5 318.5 220.5 3.5 5 0.1 2 11.43 14.74
138 0.64 784.0 343.0 220.5 3.5 2 0.1 2 15.41 19.23
139 0.64 784.0 343.0 220.5 3.5 3 0.1 2 15.18 19.34
140 0.64 784.0 343.0 220.5 3.5 4 0.1 2 15.34 19.32
141 0.64 784.0 343.0 220.5 3.5 5 0.1 2 15.19 19.3
142 0.62 808.5 367.5 220.5 3.5 2 0.1 2 12.88 14.37
143 0.62 808.5 367.5 220.5 3.5 3 0.1 2 13.0 14.57
144 0.62 808.5 367.5 220.5 3.5 4 0.1 2 12.97 14.27
145 0.62 808.5 367.5 220.5 3.5 5 0.1 2 13.04 14.24
146 0.98 514.5 294.0 110.25 7.0 2 0.1 3 24.28 25.68
147 0.98 514.5 294.0 110.25 7.0 3 0.1 3 24.4 26.02
148 0.98 514.5 294.0 110.25 7.0 4 0.1 3 24.11 25.84
149 0.98 514.5 294.0 110.25 7.0 5 0.1 3 24.35 26.14
150 0.9 563.5 318.5 122.5 7.0 2 0.1 3 28.07 34.14
151 0.9 563.5 318.5 122.5 7.0 3 0.1 3 29.01 32.85
152 0.9 563.5 318.5 122.5 7.0 4 0.1 3 29.62 30.08
153 0.9 563.5 318.5 122.5 7.0 5 0.1 3 29.05 29.67
154 0.86 588.0 294.0 147.0 7.0 2 0.1 3 25.41 31.73
155 0.86 588.0 294.0 147.0 7.0 3 0.1 3 26.47 31.01
156 0.86 588.0 294.0 147.0 7.0 4 0.1 3 26.89 25.9
157 0.86 588.0 294.0 147.0 7.0 5 0.1 3 26.46 27.4
158 0.82 612.5 318.5 147.0 7.0 2 0.1 3 22.93 28.68
159 0.82 612.5 318.5 147.0 7.0 3 0.1 3 23.84 27.54
160 0.82 612.5 318.5 147.0 7.0 4 0.1 3 24.17 25.35
161 0.82 612.5 318.5 147.0 7.0 5 0.1 3 23.87 24.93
162 0.79 637.0 343.0 147.0 7.0 2 0.1 3 35.78 43.12
163 0.79 637.0 343.0 147.0 7.0 3 0.1 3 35.48 41.22
164 0.79 637.0 343.0 147.0 7.0 4 0.1 3 36.97 35.1
165 0.79 637.0 343.0 147.0 7.0 5 0.1 3 36.7 34.29
166 0.76 661.5 416.5 122.5 7.0 2 0.1 3 32.52 33.85
167 0.76 661.5 416.5 122.5 7.0 3 0.1 3 33.28 34.11
168 0.76 661.5 416.5 122.5 7.0 4 0.1 3 32.33 34.48
169 0.76 661.5 416.5 122.5 7.0 5 0.1 3 33.24 34.5
170 0.74 686.0 245.0 220.5 3.5 2 0.1 3 10.39 13.6
171 0.74 686.0 245.0 220.5 3.5 3 0.1 3 10.34 13.36
172 0.74 686.0 245.0 220.5 3.5 4 0.1 3 10.35 13.65
173 0.74 686.0 245.0 220.5 3.5 5 0.1 3 10.38 13.49
174 0.71 710.5 269.5 220.5 3.5 2 0.1 3 10.77 14.14
175 0.71 710.5 269.5 220.5 3.5 3 0.1 3 10.68 13.77
176 0.71 710.5 269.5 220.5 3.5 4 0.1 3 10.68 14.3
177 0.71 710.5 269.5 220.5 3.5 5 0.1 3 10.7 13.87
178 0.69 735.0 294.0 220.5 3.5 2 0.1 3 11.22 14.44
179 0.69 735.0 294.0 220.5 3.5 3 0.1 3 11.16 14.27
180 0.69 735.0 294.0 220.5 3.5 4 0.1 3 11.1 14.67
181 0.69 735.0 294.0 220.5 3.5 5 0.1 3 11.14 14.4
182 0.66 759.5 318.5 220.5 3.5 2 0.1 3 11.59 13.46
183 0.66 759.5 318.5 220.5 3.5 3 0.1 3 11.6 13.7
184 0.66 759.5 318.5 220.5 3.5 4 0.1 3 11.53 13.59
185 0.66 759.5 318.5 220.5 3.5 5 0.1 3 11.61 13.83
186 0.64 784.0 343.0 220.5 3.5 2 0.1 3 15.16 19.14
187 0.64 784.0 343.0 220.5 3.5 3 0.1 3 15.36 19.18
188 0.64 784.0 343.0 220.5 3.5 4 0.1 3 15.12 19.37
189 0.64 784.0 343.0 220.5 3.5 5 0.1 3 15.36 19.29
190 0.62 808.5 367.5 220.5 3.5 2 0.1 3 12.68 14.09
191 0.62 808.5 367.5 220.5 3.5 3 0.1 3 12.63 14.23
192 0.62 808.5 367.5 220.5 3.5 4 0.1 3 12.71 14.14
193 0.62 808.5 367.5 220.5 3.5 5 0.1 3 12.73 13.89
194 0.98 514.5 294.0 110.25 7.0 2 0.1 4 24.38 25.91
195 0.98 514.5 294.0 110.25 7.0 3 0.1 4 24.23 25.72
196 0.98 514.5 294.0 110.25 7.0 4 0.1 4 24.04 26.18
197 0.98 514.5 294.0 110.25 7.0 5 0.1 4 24.32 25.87
198 0.9 563.5 318.5 122.5 7.0 2 0.1 4 29.06 29.34
199 0.9 563.5 318.5 122.5 7.0 3 0.1 4 28.05 33.91
200 0.9 563.5 318.5 122.5 7.0 4 0.1 4 28.86 32.83
201 0.9 563.5 318.5 122.5 7.0 5 0.1 4 29.79 29.92
202 0.86 588.0 294.0 147.0 7.0 2 0.1 4 26.44 27.17
203 0.86 588.0 294.0 147.0 7.0 3 0.1 4 25.37 31.76
204 0.86 588.0 294.0 147.0 7.0 4 0.1 4 26.33 31.06
205 0.86 588.0 294.0 147.0 7.0 5 0.1 4 27.03 25.81
206 0.82 612.5 318.5 147.0 7.0 2 0.1 4 23.8 24.61
207 0.82 612.5 318.5 147.0 7.0 3 0.1 4 22.8 28.61
208 0.82 612.5 318.5 147.0 7.0 4 0.1 4 23.59 27.57
209 0.82 612.5 318.5 147.0 7.0 5 0.1 4 24.24 25.16
210 0.79 637.0 343.0 147.0 7.0 2 0.1 4 36.86 34.25
211 0.79 637.0 343.0 147.0 7.0 3 0.1 4 35.89 43.3
212 0.79 637.0 343.0 147.0 7.0 4 0.1 4 35.45 41.86
213 0.79 637.0 343.0 147.0 7.0 5 0.1 4 37.1 35.29
214 0.76 661.5 416.5 122.5 7.0 2 0.1 4 33.08 34.11
215 0.76 661.5 416.5 122.5 7.0 3 0.1 4 32.38 33.62
216 0.76 661.5 416.5 122.5 7.0 4 0.1 4 33.09 33.89
217 0.76 661.5 416.5 122.5 7.0 5 0.1 4 32.31 34.05
218 0.74 686.0 245.0 220.5 3.5 2 0.1 4 10.08 13.2
219 0.74 686.0 245.0 220.5 3.5 3 0.1 4 10.15 13.36
220 0.74 686.0 245.0 220.5 3.5 4 0.1 4 10.07 13.21
221 0.74 686.0 245.0 220.5 3.5 5 0.1 4 10.14 13.53
222 0.71 710.5 269.5 220.5 3.5 2 0.1 4 10.66 13.67
223 0.71 710.5 269.5 220.5 3.5 3 0.1 4 10.68 14.12
224 0.71 710.5 269.5 220.5 3.5 4 0.1 4 10.53 13.79
225 0.71 710.5 269.5 220.5 3.5 5 0.1 4 10.72 14.2
226 0.69 735.0 294.0 220.5 3.5 2 0.1 4 11.18 14.29
227 0.69 735.0 294.0 220.5 3.5 3 0.1 4 11.22 14.49
228 0.69 735.0 294.0 220.5 3.5 4 0.1 4 11.07 14.42
229 0.69 735.0 294.0 220.5 3.5 5 0.1 4 11.2 14.73
230 0.66 759.5 318.5 220.5 3.5 2 0.1 4 11.44 14.86
231 0.66 759.5 318.5 220.5 3.5 3 0.1 4 11.42 14.67
232 0.66 759.5 318.5 220.5 3.5 4 0.1 4 11.33 15.0
233 0.66 759.5 318.5 220.5 3.5 5 0.1 4 11.43 14.83
234 0.64 784.0 343.0 220.5 3.5 2 0.1 4 15.4 19.24
235 0.64 784.0 343.0 220.5 3.5 3 0.1 4 15.19 19.25
236 0.64 784.0 343.0 220.5 3.5 4 0.1 4 15.32 19.42
237 0.64 784.0 343.0 220.5 3.5 5 0.1 4 15.16 19.48
238 0.62 808.5 367.5 220.5 3.5 2 0.1 4 12.85 14.37
239 0.62 808.5 367.5 220.5 3.5 3 0.1 4 13.04 14.34
240 0.62 808.5 367.5 220.5 3.5 4 0.1 4 13.0 14.28
241 0.62 808.5 367.5 220.5 3.5 5 0.1 4 13.0 14.47
242 0.98 514.5 294.0 110.25 7.0 2 0.1 5 24.35 25.64
243 0.98 514.5 294.0 110.25 7.0 3 0.1 5 24.33 25.98
244 0.98 514.5 294.0 110.25 7.0 4 0.1 5 24.03 25.88
245 0.98 514.5 294.0 110.25 7.0 5 0.1 5 24.26 26.18
246 0.9 563.5 318.5 122.5 7.0 2 0.1 5 29.83 29.82
247 0.9 563.5 318.5 122.5 7.0 3 0.1 5 29.08 29.52
248 0.9 563.5 318.5 122.5 7.0 4 0.1 5 28.03 34.45
249 0.9 563.5 318.5 122.5 7.0 5 0.1 5 29.02 33.01
250 0.86 588.0 294.0 147.0 7.0 2 0.1 5 27.03 25.82
251 0.86 588.0 294.0 147.0 7.0 3 0.1 5 26.45 27.33
252 0.86 588.0 294.0 147.0 7.0 4 0.1 5 25.36 32.04
253 0.86 588.0 294.0 147.0 7.0 5 0.1 5 26.45 31.28
254 0.82 612.5 318.5 147.0 7.0 2 0.1 5 24.37 25.11
255 0.82 612.5 318.5 147.0 7.0 3 0.1 5 23.89 24.77
256 0.82 612.5 318.5 147.0 7.0 4 0.1 5 22.89 28.88
257 0.82 612.5 318.5 147.0 7.0 5 0.1 5 23.86 27.69
258 0.79 637.0 343.0 147.0 7.0 2 0.1 5 37.03 34.99
259 0.79 637.0 343.0 147.0 7.0 3 0.1 5 36.71 34.18
260 0.79 637.0 343.0 147.0 7.0 4 0.1 5 36.77 43.14
261 0.79 637.0 343.0 147.0 7.0 5 0.1 5 35.48 41.26
262 0.76 661.5 416.5 122.5 7.0 2 0.1 5 32.31 34.25
263 0.76 661.5 416.5 122.5 7.0 3 0.1 5 33.21 34.35
264 0.76 661.5 416.5 122.5 7.0 4 0.1 5 32.46 33.64
265 0.76 661.5 416.5 122.5 7.0 5 0.1 5 33.27 33.88
266 0.74 686.0 245.0 220.5 3.5 2 0.1 5 10.47 13.65
267 0.74 686.0 245.0 220.5 3.5 3 0.1 5 10.37 13.44
268 0.74 686.0 245.0 220.5 3.5 4 0.1 5 10.34 13.72
269 0.74 686.0 245.0 220.5 3.5 5 0.1 5 10.39 13.5
270 0.71 710.5 269.5 220.5 3.5 2 0.1 5 10.78 14.18
271 0.71 710.5 269.5 220.5 3.5 3 0.1 5 10.7 13.75
272 0.71 710.5 269.5 220.5 3.5 4 0.1 5 10.67 14.26
273 0.71 710.5 269.5 220.5 3.5 5 0.1 5 13.69 13.89
274 0.69 735.0 294.0 220.5 3.5 2 0.1 5 11.21 14.55
275 0.69 735.0 294.0 220.5 3.5 3 0.1 5 11.14 14.28
276 0.69 735.0 294.0 220.5 3.5 4 0.1 5 11.11 14.46
277 0.69 735.0 294.0 220.5 3.5 5 0.1 5 11.16 14.39
278 0.66 759.5 318.5 220.5 3.5 2 0.1 5 11.38 14.54
279 0.66 759.5 318.5 220.5 3.5 3 0.1 5 11.34 14.81
280 0.66 759.5 318.5 220.5 3.5 4 0.1 5 11.22 14.65
281 0.66 759.5 318.5 220.5 3.5 5 0.1 5 11.34 14.87
282 0.64 784.0 343.0 220.5 3.5 2 0.1 5 15.16 19.24
283 0.64 784.0 343.0 220.5 3.5 3 0.1 5 15.37 19.18
284 0.64 784.0 343.0 220.5 3.5 4 0.1 5 15.12 19.26
285 0.64 784.0 343.0 220.5 3.5 5 0.1 5 15.36 19.29
286 0.62 808.5 367.5 220.5 3.5 2 0.1 5 12.59 14.24
287 0.62 808.5 367.5 220.5 3.5 3 0.1 5 12.74 13.97
288 0.62 808.5 367.5 220.5 3.5 4 0.1 5 12.8 13.99
289 0.62 808.5 367.5 220.5 3.5 5 0.1 5 12.62 14.15
290 0.98 514.5 294.0 110.25 7.0 2 0.25 1 28.15 29.79
291 0.98 514.5 294.0 110.25 7.0 3 0.25 1 28.15 29.79
292 0.98 514.5 294.0 110.25 7.0 4 0.25 1 28.37 29.28
293 0.98 514.5 294.0 110.25 7.0 5 0.25 1 28.41 29.49
294 0.9 563.5 318.5 122.5 7.0 2 0.25 1 32.68 36.12
295 0.9 563.5 318.5 122.5 7.0 3 0.25 1 33.48 33.17
296 0.9 563.5 318.5 122.5 7.0 4 0.25 1 32.84 32.71
297 0.9 563.5 318.5 122.5 7.0 5 0.25 1 32.0 37.58
298 0.86 588.0 294.0 147.0 7.0 2 0.25 1 29.54 33.98
299 0.86 588.0 294.0 147.0 7.0 3 0.25 1 30.05 28.61
300 0.86 588.0 294.0 147.0 7.0 4 0.25 1 29.6 30.12
301 0.86 588.0 294.0 147.0 7.0 5 0.25 1 28.66 34.73
302 0.82 612.5 318.5 147.0 7.0 2 0.25 1 26.84 30.17
303 0.82 612.5 318.5 147.0 7.0 3 0.25 1 27.27 27.84
304 0.82 612.5 318.5 147.0 7.0 4 0.25 1 26.97 27.25
305 0.82 612.5 318.5 147.0 7.0 5 0.25 1 26.19 31.39
306 0.79 637.0 343.0 147.0 7.0 2 0.25 1 38.67 43.8
307 0.79 637.0 343.0 147.0 7.0 3 0.25 1 40.03 37.81
308 0.79 637.0 343.0 147.0 7.0 4 0.25 1 39.86 36.85
309 0.79 637.0 343.0 147.0 7.0 5 0.25 1 39.04 45.52
310 0.76 661.5 416.5 122.5 7.0 2 0.25 1 36.96 36.85
311 0.76 661.5 416.5 122.5 7.0 3 0.25 1 36.13 37.58
312 0.76 661.5 416.5 122.5 7.0 4 0.25 1 36.91 37.45
313 0.76 661.5 416.5 122.5 7.0 5 0.25 1 36.43 36.62
314 0.74 686.0 245.0 220.5 3.5 2 0.25 1 12.43 15.19
315 0.74 686.0 245.0 220.5 3.5 3 0.25 1 12.5 15.5
316 0.74 686.0 245.0 220.5 3.5 4 0.25 1 12.41 15.28
317 0.74 686.0 245.0 220.5 3.5 5 0.25 1 12.45 15.5
318 0.71 710.5 269.5 220.5 3.5 2 0.25 1 12.57 15.42
319 0.71 710.5 269.5 220.5 3.5 3 0.25 1 12.65 15.85
320 0.71 710.5 269.5 220.5 3.5 4 0.25 1 12.57 15.44
321 0.71 710.5 269.5 220.5 3.5 5 0.25 1 12.63 15.81
322 0.69 735.0 294.0 220.5 3.5 2 0.25 1 12.78 15.21
323 0.69 735.0 294.0 220.5 3.5 3 0.25 1 12.93 15.63
324 0.69 735.0 294.0 220.5 3.5 4 0.25 1 12.73 15.48
325 0.69 735.0 294.0 220.5 3.5 5 0.25 1 12.72 15.78
326 0.66 759.5 318.5 220.5 3.5 2 0.25 1 13.17 16.39
327 0.66 759.5 318.5 220.5 3.5 3 0.25 1 13.18 16.27
328 0.66 759.5 318.5 220.5 3.5 4 0.25 1 13.17 16.39
329 0.66 759.5 318.5 220.5 3.5 5 0.25 1 13.18 16.19
330 0.64 784.0 343.0 220.5 3.5 2 0.25 1 17.5 21.13
331 0.64 784.0 343.0 220.5 3.5 3 0.25 1 17.35 21.19
332 0.64 784.0 343.0 220.5 3.5 4 0.25 1 17.52 21.09
333 0.64 784.0 343.0 220.5 3.5 5 0.25 1 17.37 21.08
334 0.62 808.5 367.5 220.5 3.5 2 0.25 1 15.09 15.77
335 0.62 808.5 367.5 220.5 3.5 3 0.25 1 15.12 15.95
336 0.62 808.5 367.5 220.5 3.5 4 0.25 1 15.08 15.77
337 0.62 808.5 367.5 220.5 3.5 5 0.25 1 15.16 15.76
338 0.98 514.5 294.0 110.25 7.0 2 0.25 2 28.67 29.62
339 0.98 514.5 294.0 110.25 7.0 3 0.25 2 28.57 29.69
340 0.98 514.5 294.0 110.25 7.0 4 0.25 2 28.18 30.18
341 0.98 514.5 294.0 110.25 7.0 5 0.25 2 28.6 30.02
342 0.9 563.5 318.5 122.5 7.0 2 0.25 2 32.46 35.56
343 0.9 563.5 318.5 122.5 7.0 3 0.25 2 33.27 32.64
344 0.9 563.5 318.5 122.5 7.0 4 0.25 2 32.33 32.77
345 0.9 563.5 318.5 122.5 7.0 5 0.25 2 31.66 37.72
346 0.86 588.0 294.0 147.0 7.0 2 0.25 2 29.34 33.37
347 0.86 588.0 294.0 147.0 7.0 3 0.25 2 29.87 27.89
348 0.86 588.0 294.0 147.0 7.0 4 0.25 2 29.27 29.9
349 0.86 588.0 294.0 147.0 7.0 5 0.25 2 28.4 34.52
350 0.82 612.5 318.5 147.0 7.0 2 0.25 2 25.74 28.27
351 0.82 612.5 318.5 147.0 7.0 3 0.25 2 25.98 26.96
352 0.82 612.5 318.5 147.0 7.0 4 0.25 2 25.38 26.72
353 0.82 612.5 318.5 147.0 7.0 5 0.25 2 24.94 29.88
354 0.79 637.0 343.0 147.0 7.0 2 0.25 2 38.57 43.86
355 0.79 637.0 343.0 147.0 7.0 3 0.25 2 40.19 37.41
356 0.79 637.0 343.0 147.0 7.0 4 0.25 2 39.97 36.77
357 0.79 637.0 343.0 147.0 7.0 5 0.25 2 38.98 45.97
358 0.76 661.5 416.5 122.5 7.0 2 0.25 2 36.95 36.87
359 0.76 661.5 416.5 122.5 7.0 3 0.25 2 36.28 37.35
360 0.76 661.5 416.5 122.5 7.0 4 0.25 2 36.86 37.28
361 0.76 661.5 416.5 122.5 7.0 5 0.25 2 36.45 36.81
362 0.74 686.0 245.0 220.5 3.5 2 0.25 2 12.35 14.73
363 0.74 686.0 245.0 220.5 3.5 3 0.25 2 12.45 15.1
364 0.74 686.0 245.0 220.5 3.5 4 0.25 2 12.16 15.18
365 0.74 686.0 245.0 220.5 3.5 5 0.25 2 12.3 15.44
366 0.71 710.5 269.5 220.5 3.5 2 0.25 2 12.33 14.91
367 0.71 710.5 269.5 220.5 3.5 3 0.25 2 12.29 15.4
368 0.71 710.5 269.5 220.5 3.5 4 0.25 2 12.2 14.94
369 0.71 710.5 269.5 220.5 3.5 5 0.25 2 12.49 15.32
370 0.69 735.0 294.0 220.5 3.5 2 0.25 2 12.85 15.52
371 0.69 735.0 294.0 220.5 3.5 3 0.25 2 12.87 15.85
372 0.69 735.0 294.0 220.5 3.5 4 0.25 2 12.73 15.66
373 0.69 735.0 294.0 220.5 3.5 5 0.25 2 12.95 15.99
374 0.66 759.5 318.5 220.5 3.5 2 0.25 2 13.05 15.89
375 0.66 759.5 318.5 220.5 3.5 3 0.25 2 12.93 15.85
376 0.66 759.5 318.5 220.5 3.5 4 0.25 2 12.77 16.22
377 0.66 759.5 318.5 220.5 3.5 5 0.25 2 13.0 15.87
378 0.64 784.0 343.0 220.5 3.5 2 0.25 2 17.14 20.47
379 0.64 784.0 343.0 220.5 3.5 3 0.25 2 16.84 20.56
380 0.64 784.0 343.0 220.5 3.5 4 0.25 2 17.02 20.48
381 0.64 784.0 343.0 220.5 3.5 5 0.25 2 17.11 20.43
382 0.62 808.5 367.5 220.5 3.5 2 0.25 2 14.34 15.32
383 0.62 808.5 367.5 220.5 3.5 3 0.25 2 14.66 15.64
384 0.62 808.5 367.5 220.5 3.5 4 0.25 2 14.6 15.14
385 0.62 808.5 367.5 220.5 3.5 5 0.25 2 14.6 15.3
386 0.98 514.5 294.0 110.25 7.0 2 0.25 3 28.67 29.43
387 0.98 514.5 294.0 110.25 7.0 3 0.25 3 28.56 29.78
388 0.98 514.5 294.0 110.25 7.0 4 0.25 3 28.17 30.1
389 0.98 514.5 294.0 110.25 7.0 5 0.25 3 28.63 30.19
390 0.9 563.5 318.5 122.5 7.0 2 0.25 3 31.63 36.35
391 0.9 563.5 318.5 122.5 7.0 3 0.25 3 32.4 35.1
392 0.9 563.5 318.5 122.5 7.0 4 0.25 3 32.68 32.83
393 0.9 563.5 318.5 122.5 7.0 5 0.25 3 32.29 32.46
394 0.86 588.0 294.0 147.0 7.0 2 0.25 3 28.4 33.52
395 0.86 588.0 294.0 147.0 7.0 3 0.25 3 29.4 32.93
396 0.86 588.0 294.0 147.0 7.0 4 0.25 3 29.43 28.38
397 0.86 588.0 294.0 147.0 7.0 5 0.25 3 29.07 29.82
398 0.82 612.5 318.5 147.0 7.0 2 0.25 3 24.7 28.77
399 0.82 612.5 318.5 147.0 7.0 3 0.25 3 25.48 27.76
400 0.82 612.5 318.5 147.0 7.0 4 0.25 3 25.37 26.95
401 0.82 612.5 318.5 147.0 7.0 5 0.25 3 25.17 26.41
402 0.79 637.0 343.0 147.0 7.0 2 0.25 3 39.04 45.13
403 0.79 637.0 343.0 147.0 7.0 3 0.25 3 38.35 43.66
404 0.79 637.0 343.0 147.0 7.0 4 0.25 3 39.81 37.76
405 0.79 637.0 343.0 147.0 7.0 5 0.25 3 39.83 36.87
406 0.76 661.5 416.5 122.5 7.0 2 0.25 3 35.99 36.07
407 0.76 661.5 416.5 122.5 7.0 3 0.25 3 36.59 36.44
408 0.76 661.5 416.5 122.5 7.0 4 0.25 3 35.64 37.28
409 0.76 661.5 416.5 122.5 7.0 5 0.25 3 36.52 37.29
410 0.74 686.0 245.0 220.5 3.5 2 0.25 3 11.8 14.49
411 0.74 686.0 245.0 220.5 3.5 3 0.25 3 12.03 13.79
412 0.74 686.0 245.0 220.5 3.5 4 0.25 3 11.98 14.72
413 0.74 686.0 245.0 220.5 3.5 5 0.25 3 11.69 14.76
414 0.71 710.5 269.5 220.5 3.5 2 0.25 3 12.41 14.92
415 0.71 710.5 269.5 220.5 3.5 3 0.25 3 12.28 14.74
416 0.71 710.5 269.5 220.5 3.5 4 0.25 3 12.1 15.57
417 0.71 710.5 269.5 220.5 3.5 5 0.25 3 12.19 14.94
418 0.69 735.0 294.0 220.5 3.5 2 0.25 3 12.34 14.92
419 0.69 735.0 294.0 220.5 3.5 3 0.25 3 12.46 14.38
420 0.69 735.0 294.0 220.5 3.5 4 0.25 3 12.31 15.44
421 0.69 735.0 294.0 220.5 3.5 5 0.25 3 12.12 15.17
422 0.66 759.5 318.5 220.5 3.5 2 0.25 3 12.97 15.53
423 0.66 759.5 318.5 220.5 3.5 3 0.25 3 13.01 15.8
424 0.66 759.5 318.5 220.5 3.5 4 0.25 3 12.74 16.14
425 0.66 759.5 318.5 220.5 3.5 5 0.25 3 12.84 16.26
426 0.64 784.0 343.0 220.5 3.5 2 0.25 3 16.83 19.87
427 0.64 784.0 343.0 220.5 3.5 3 0.25 3 16.93 20.03
428 0.64 784.0 343.0 220.5 3.5 4 0.25 3 16.66 20.46
429 0.64 784.0 343.0 220.5 3.5 5 0.25 3 16.86 20.28
430 0.62 808.5 367.5 220.5 3.5 2 0.25 3 13.91 14.89
431 0.62 808.5 367.5 220.5 3.5 3 0.25 3 14.34 14.96
432 0.62 808.5 367.5 220.5 3.5 4 0.25 3 13.95 14.89
433 0.62 808.5 367.5 220.5 3.5 5 0.25 3 13.99 14.35
434 0.98 514.5 294.0 110.25 7.0 2 0.25 4 28.7 29.61
435 0.98 514.5 294.0 110.25 7.0 3 0.25 4 28.55 29.59
436 0.98 514.5 294.0 110.25 7.0 4 0.25 4 28.15 30.19
437 0.98 514.5 294.0 110.25 7.0 5 0.25 4 28.62 30.12
438 0.9 563.5 318.5 122.5 7.0 2 0.25 4 32.67 32.12
439 0.9 563.5 318.5 122.5 7.0 3 0.25 4 31.69 37.12
440 0.9 563.5 318.5 122.5 7.0 4 0.25 4 32.07 36.16
441 0.9 563.5 318.5 122.5 7.0 5 0.25 4 33.28 33.16
442 0.86 588.0 294.0 147.0 7.0 2 0.25 4 29.47 29.45
443 0.86 588.0 294.0 147.0 7.0 3 0.25 4 28.42 34.19
444 0.86 588.0 294.0 147.0 7.0 4 0.25 4 29.08 33.93
445 0.86 588.0 294.0 147.0 7.0 5 0.25 4 29.88 28.31
446 0.82 612.5 318.5 147.0 7.0 2 0.25 4 25.66 26.3
447 0.82 612.5 318.5 147.0 7.0 3 0.25 4 24.96 29.43
448 0.82 612.5 318.5 147.0 7.0 4 0.25 4 25.43 28.76
449 0.82 612.5 318.5 147.0 7.0 5 0.25 4 26.0 27.34
450 0.79 637.0 343.0 147.0 7.0 2 0.25 4 40.0 36.26
451 0.79 637.0 343.0 147.0 7.0 3 0.25 4 38.84 45.48
452 0.79 637.0 343.0 147.0 7.0 4 0.25 4 38.33 44.16
453 0.79 637.0 343.0 147.0 7.0 5 0.25 4 40.12 37.26
454 0.76 661.5 416.5 122.5 7.0 2 0.25 4 36.95 37.2
455 0.76 661.5 416.5 122.5 7.0 3 0.25 4 36.45 36.76
456 0.76 661.5 416.5 122.5 7.0 4 0.25 4 36.81 37.05
457 0.76 661.5 416.5 122.5 7.0 5 0.25 4 36.26 37.51
458 0.74 686.0 245.0 220.5 3.5 2 0.25 4 12.32 14.92
459 0.74 686.0 245.0 220.5 3.5 3 0.25 4 12.3 15.24
460 0.74 686.0 245.0 220.5 3.5 4 0.25 4 12.18 15.03
461 0.74 686.0 245.0 220.5 3.5 5 0.25 4 12.43 15.35
462 0.71 710.5 269.5 220.5 3.5 2 0.25 4 12.36 14.67
463 0.71 710.5 269.5 220.5 3.5 3 0.25 4 12.49 15.09
464 0.71 710.5 269.5 220.5 3.5 4 0.25 4 12.17 15.2
465 0.71 710.5 269.5 220.5 3.5 5 0.25 4 12.28 15.64
466 0.69 735.0 294.0 220.5 3.5 2 0.25 4 12.91 15.37
467 0.69 735.0 294.0 220.5 3.5 3 0.25 4 12.95 15.73
468 0.69 735.0 294.0 220.5 3.5 4 0.25 4 12.67 15.83
469 0.69 735.0 294.0 220.5 3.5 5 0.25 4 12.86 16.13
470 0.66 759.5 318.5 220.5 3.5 2 0.25 4 12.95 15.95
471 0.66 759.5 318.5 220.5 3.5 3 0.25 4 13.0 15.59
472 0.66 759.5 318.5 220.5 3.5 4 0.25 4 12.86 16.17
473 0.66 759.5 318.5 220.5 3.5 5 0.25 4 12.92 16.14
474 0.64 784.0 343.0 220.5 3.5 2 0.25 4 16.99 19.65
475 0.64 784.0 343.0 220.5 3.5 3 0.25 4 16.69 19.76
476 0.64 784.0 343.0 220.5 3.5 4 0.25 4 16.56 20.37
477 0.64 784.0 343.0 220.5 3.5 5 0.25 4 16.62 19.9
478 0.62 808.5 367.5 220.5 3.5 2 0.25 4 14.33 15.41
479 0.62 808.5 367.5 220.5 3.5 3 0.25 4 14.61 15.56
480 0.62 808.5 367.5 220.5 3.5 4 0.25 4 14.61 15.07
481 0.62 808.5 367.5 220.5 3.5 5 0.25 4 14.65 15.38
482 0.98 514.5 294.0 110.25 7.0 2 0.25 5 28.69 29.53
483 0.98 514.5 294.0 110.25 7.0 3 0.25 5 28.58 29.77
484 0.98 514.5 294.0 110.25 7.0 4 0.25 5 28.15 30.0
485 0.98 514.5 294.0 110.25 7.0 5 0.25 5 28.61 30.2
486 0.9 563.5 318.5 122.5 7.0 2 0.25 5 33.13 32.25
487 0.9 563.5 318.5 122.5 7.0 3 0.25 5 32.31 32.0
488 0.9 563.5 318.5 122.5 7.0 4 0.25 5 31.53 37.19
489 0.9 563.5 318.5 122.5 7.0 5 0.25 5 32.46 35.62
490 0.86 588.0 294.0 147.0 7.0 2 0.25 5 29.71 28.02
491 0.86 588.0 294.0 147.0 7.0 3 0.25 5 29.09 29.43
492 0.86 588.0 294.0 147.0 7.0 4 0.25 5 28.31 34.15
493 0.86 588.0 294.0 147.0 7.0 5 0.25 5 29.39 33.47
494 0.82 612.5 318.5 147.0 7.0 2 0.25 5 25.7 26.53
495 0.82 612.5 318.5 147.0 7.0 3 0.25 5 25.17 26.08
496 0.82 612.5 318.5 147.0 7.0 4 0.25 5 24.6 29.31
497 0.82 612.5 318.5 147.0 7.0 5 0.25 5 25.49 28.14
498 0.79 637.0 343.0 147.0 7.0 2 0.25 5 39.89 37.54
499 0.79 637.0 343.0 147.0 7.0 3 0.25 5 39.83 36.66
500 0.79 637.0 343.0 147.0 7.0 4 0.25 5 39.01 45.28
501 0.79 637.0 343.0 147.0 7.0 5 0.25 5 38.65 43.73
502 0.76 661.5 416.5 122.5 7.0 2 0.25 5 35.69 36.93
503 0.76 661.5 416.5 122.5 7.0 3 0.25 5 36.64 37.01
504 0.76 661.5 416.5 122.5 7.0 4 0.25 5 36.06 35.73
505 0.76 661.5 416.5 122.5 7.0 5 0.25 5 36.7 36.15
506 0.74 686.0 245.0 220.5 3.5 2 0.25 5 12.12 14.48
507 0.74 686.0 245.0 220.5 3.5 3 0.25 5 11.67 14.58
508 0.74 686.0 245.0 220.5 3.5 4 0.25 5 11.64 14.81
509 0.74 686.0 245.0 220.5 3.5 5 0.25 5 12.02 14.03
510 0.71 710.5 269.5 220.5 3.5 2 0.25 5 12.27 15.27
511 0.71 710.5 269.5 220.5 3.5 3 0.25 5 12.19 14.71
512 0.71 710.5 269.5 220.5 3.5 4 0.25 5 12.25 15.23
513 0.71 710.5 269.5 220.5 3.5 5 0.25 5 12.27 14.97
514 0.69 735.0 294.0 220.5 3.5 2 0.25 5 12.47 15.14
515 0.69 735.0 294.0 220.5 3.5 3 0.25 5 12.12 14.97
516 0.69 735.0 294.0 220.5 3.5 4 0.25 5 12.18 15.22
517 0.69 735.0 294.0 220.5 3.5 5 0.25 5 12.47 14.6
518 0.66 759.5 318.5 220.5 3.5 2 0.25 5 12.93 15.83
519 0.66 759.5 318.5 220.5 3.5 3 0.25 5 12.82 16.03
520 0.66 759.5 318.5 220.5 3.5 4 0.25 5 12.78 15.8
521 0.66 759.5 318.5 220.5 3.5 5 0.25 5 13.02 16.06
522 0.64 784.0 343.0 220.5 3.5 2 0.25 5 16.73 20.13
523 0.64 784.0 343.0 220.5 3.5 3 0.25 5 16.86 20.01
524 0.64 784.0 343.0 220.5 3.5 4 0.25 5 16.76 20.19
525 0.64 784.0 343.0 220.5 3.5 5 0.25 5 16.92 20.29
526 0.62 808.5 367.5 220.5 3.5 2 0.25 5 13.68 15.19
527 0.62 808.5 367.5 220.5 3.5 3 0.25 5 13.99 14.61
528 0.62 808.5 367.5 220.5 3.5 4 0.25 5 14.16 14.61
529 0.62 808.5 367.5 220.5 3.5 5 0.25 5 13.86 14.75
530 0.98 514.5 294.0 110.25 7.0 2 0.4 1 32.26 33.37
531 0.98 514.5 294.0 110.25 7.0 3 0.4 1 32.26 33.34
532 0.98 514.5 294.0 110.25 7.0 4 0.4 1 32.49 32.83
533 0.98 514.5 294.0 110.25 7.0 5 0.4 1 32.53 33.04
534 0.9 563.5 318.5 122.5 7.0 2 0.4 1 36.47 39.28
535 0.9 563.5 318.5 122.5 7.0 3 0.4 1 37.24 36.38
536 0.9 563.5 318.5 122.5 7.0 4 0.4 1 36.66 35.92
537 0.9 563.5 318.5 122.5 7.0 5 0.4 1 35.96 40.99
538 0.86 588.0 294.0 147.0 7.0 2 0.4 1 31.89 35.99
539 0.86 588.0 294.0 147.0 7.0 3 0.4 1 32.39 30.66
540 0.86 588.0 294.0 147.0 7.0 4 0.4 1 32.09 31.7
541 0.86 588.0 294.0 147.0 7.0 5 0.4 1 31.29 36.73
542 0.82 612.5 318.5 147.0 7.0 2 0.4 1 29.22 31.71
543 0.82 612.5 318.5 147.0 7.0 3 0.4 1 29.91 29.13
544 0.82 612.5 318.5 147.0 7.0 4 0.4 1 29.53 28.99
545 0.82 612.5 318.5 147.0 7.0 5 0.4 1 28.65 33.54
546 0.79 637.0 343.0 147.0 7.0 2 0.4 1 41.4 45.29
547 0.79 637.0 343.0 147.0 7.0 3 0.4 1 42.62 39.07
548 0.79 637.0 343.0 147.0 7.0 4 0.4 1 42.5 38.35
549 0.79 637.0 343.0 147.0 7.0 5 0.4 1 41.67 46.94
550 0.76 661.5 416.5 122.5 7.0 2 0.4 1 40.78 39.55
551 0.76 661.5 416.5 122.5 7.0 3 0.4 1 39.97 40.85
552 0.76 661.5 416.5 122.5 7.0 4 0.4 1 40.71 40.63
553 0.76 661.5 416.5 122.5 7.0 5 0.4 1 40.43 39.48
554 0.74 686.0 245.0 220.5 3.5 2 0.4 1 14.52 16.94
555 0.74 686.0 245.0 220.5 3.5 3 0.4 1 14.61 17.25
556 0.74 686.0 245.0 220.5 3.5 4 0.4 1 14.5 17.03
557 0.74 686.0 245.0 220.5 3.5 5 0.4 1 14.55 17.25
558 0.71 710.5 269.5 220.5 3.5 2 0.4 1 14.51 17.1
559 0.71 710.5 269.5 220.5 3.5 3 0.4 1 14.6 17.51
560 0.71 710.5 269.5 220.5 3.5 4 0.4 1 14.5 17.12
561 0.71 710.5 269.5 220.5 3.5 5 0.4 1 14.58 17.47
562 0.69 735.0 294.0 220.5 3.5 2 0.4 1 14.51 16.5
563 0.69 735.0 294.0 220.5 3.5 3 0.4 1 14.7 17.0
564 0.69 735.0 294.0 220.5 3.5 4 0.4 1 14.42 16.87
565 0.69 735.0 294.0 220.5 3.5 5 0.4 1 14.42 17.2
566 0.66 759.5 318.5 220.5 3.5 2 0.4 1 15.23 18.14
567 0.66 759.5 318.5 220.5 3.5 3 0.4 1 15.23 18.03
568 0.66 759.5 318.5 220.5 3.5 4 0.4 1 15.23 18.14
569 0.66 759.5 318.5 220.5 3.5 5 0.4 1 15.23 17.95
570 0.64 784.0 343.0 220.5 3.5 2 0.4 1 19.52 22.72
571 0.64 784.0 343.0 220.5 3.5 3 0.4 1 19.36 22.73
572 0.64 784.0 343.0 220.5 3.5 4 0.4 1 19.48 22.72
573 0.64 784.0 343.0 220.5 3.5 5 0.4 1 19.42 22.53
574 0.62 808.5 367.5 220.5 3.5 2 0.4 1 15.09 17.2
575 0.62 808.5 367.5 220.5 3.5 3 0.4 1 17.17 17.21
576 0.62 808.5 367.5 220.5 3.5 4 0.4 1 17.14 17.15
577 0.62 808.5 367.5 220.5 3.5 5 0.4 1 17.14 17.2
578 0.98 514.5 294.0 110.25 7.0 2 0.4 2 32.82 32.96
579 0.98 514.5 294.0 110.25 7.0 3 0.4 2 32.71 33.13
580 0.98 514.5 294.0 110.25 7.0 4 0.4 2 32.24 33.94
581 0.98 514.5 294.0 110.25 7.0 5 0.4 2 32.72 33.78
582 0.9 563.5 318.5 122.5 7.0 2 0.4 2 35.84 38.35
583 0.9 563.5 318.5 122.5 7.0 3 0.4 2 36.57 35.39
584 0.9 563.5 318.5 122.5 7.0 4 0.4 2 36.06 34.94
585 0.9 563.5 318.5 122.5 7.0 5 0.4 2 35.69 40.66
586 0.86 588.0 294.0 147.0 7.0 2 0.4 2 32.48 35.48
587 0.86 588.0 294.0 147.0 7.0 3 0.4 2 32.74 30.53
588 0.86 588.0 294.0 147.0 7.0 4 0.4 2 32.13 32.28
589 0.86 588.0 294.0 147.0 7.0 5 0.4 2 31.64 36.86
590 0.82 612.5 318.5 147.0 7.0 2 0.4 2 28.95 30.34
591 0.82 612.5 318.5 147.0 7.0 3 0.4 2 29.49 27.93
592 0.82 612.5 318.5 147.0 7.0 4 0.4 2 28.64 28.95
593 0.82 612.5 318.5 147.0 7.0 5 0.4 2 28.01 32.92
594 0.79 637.0 343.0 147.0 7.0 2 0.4 2 41.64 45.59
595 0.79 637.0 343.0 147.0 7.0 3 0.4 2 43.1 39.41
596 0.79 637.0 343.0 147.0 7.0 4 0.4 2 42.74 38.84
597 0.79 637.0 343.0 147.0 7.0 5 0.4 2 41.92 48.03
598 0.76 661.5 416.5 122.5 7.0 2 0.4 2 40.78 39.48
599 0.76 661.5 416.5 122.5 7.0 3 0.4 2 40.15 40.4
600 0.76 661.5 416.5 122.5 7.0 4 0.4 2 40.57 40.47
601 0.76 661.5 416.5 122.5 7.0 5 0.4 2 40.42 39.7
602 0.74 686.0 245.0 220.5 3.5 2 0.4 2 14.54 16.43
603 0.74 686.0 245.0 220.5 3.5 3 0.4 2 14.45 16.93
604 0.74 686.0 245.0 220.5 3.5 4 0.4 2 14.18 16.99
605 0.74 686.0 245.0 220.5 3.5 5 0.4 2 14.5 17.03
606 0.71 710.5 269.5 220.5 3.5 2 0.4 2 14.7 16.77
607 0.71 710.5 269.5 220.5 3.5 3 0.4 2 14.66 17.37
608 0.71 710.5 269.5 220.5 3.5 4 0.4 2 14.4 17.27
609 0.71 710.5 269.5 220.5 3.5 5 0.4 2 14.71 17.51
610 0.69 735.0 294.0 220.5 3.5 2 0.4 2 14.75 16.44
611 0.69 735.0 294.0 220.5 3.5 3 0.4 2 14.71 17.01
612 0.69 735.0 294.0 220.5 3.5 4 0.4 2 14.33 17.23
613 0.69 735.0 294.0 220.5 3.5 5 0.4 2 14.62 17.22
614 0.66 759.5 318.5 220.5 3.5 2 0.4 2 15.34 17.85
615 0.66 759.5 318.5 220.5 3.5 3 0.4 2 15.29 17.89
616 0.66 759.5 318.5 220.5 3.5 4 0.4 2 15.09 18.36
617 0.66 759.5 318.5 220.5 3.5 5 0.4 2 15.3 18.15
618 0.64 784.0 343.0 220.5 3.5 2 0.4 2 19.2 21.72
619 0.64 784.0 343.0 220.5 3.5 3 0.4 2 18.88 22.07
620 0.64 784.0 343.0 220.5 3.5 4 0.4 2 18.9 22.09
621 0.64 784.0 343.0 220.5 3.5 5 0.4 2 19.12 21.93
622 0.62 808.5 367.5 220.5 3.5 2 0.4 2 16.76 17.36
623 0.62 808.5 367.5 220.5 3.5 3 0.4 2 17.23 17.38
624 0.62 808.5 367.5 220.5 3.5 4 0.4 2 17.26 16.86
625 0.62 808.5 367.5 220.5 3.5 5 0.4 2 17.15 16.99
626 0.98 514.5 294.0 110.25 7.0 2 0.4 3 32.82 32.78
627 0.98 514.5 294.0 110.25 7.0 3 0.4 3 32.69 33.24
628 0.98 514.5 294.0 110.25 7.0 4 0.4 3 32.23 33.86
629 0.98 514.5 294.0 110.25 7.0 5 0.4 3 32.75 34.0
630 0.9 563.5 318.5 122.5 7.0 2 0.4 3 34.24 37.26
631 0.9 563.5 318.5 122.5 7.0 3 0.4 3 34.95 35.04
632 0.9 563.5 318.5 122.5 7.0 4 0.4 3 35.05 33.82
633 0.9 563.5 318.5 122.5 7.0 5 0.4 3 34.29 33.31
634 0.86 588.0 294.0 147.0 7.0 2 0.4 3 31.28 35.22
635 0.86 588.0 294.0 147.0 7.0 3 0.4 3 32.12 34.7
636 0.86 588.0 294.0 147.0 7.0 4 0.4 3 32.05 30.11
637 0.86 588.0 294.0 147.0 7.0 5 0.4 3 31.84 31.6
638 0.82 612.5 318.5 147.0 7.0 2 0.4 3 28.67 32.43
639 0.82 612.5 318.5 147.0 7.0 3 0.4 3 29.67 30.65
640 0.82 612.5 318.5 147.0 7.0 4 0.4 3 29.47 29.77
641 0.82 612.5 318.5 147.0 7.0 5 0.4 3 28.91 29.64
642 0.79 637.0 343.0 147.0 7.0 2 0.4 3 41.26 46.44
643 0.79 637.0 343.0 147.0 7.0 3 0.4 3 41.3 44.18
644 0.79 637.0 343.0 147.0 7.0 4 0.4 3 42.49 38.81
645 0.79 637.0 343.0 147.0 7.0 5 0.4 3 42.08 38.23
646 0.76 661.5 416.5 122.5 7.0 2 0.4 3 39.32 38.17
647 0.76 661.5 416.5 122.5 7.0 3 0.4 3 39.84 38.48
648 0.76 661.5 416.5 122.5 7.0 4 0.4 3 38.89 39.66
649 0.76 661.5 416.5 122.5 7.0 5 0.4 3 39.68 40.1
650 0.74 686.0 245.0 220.5 3.5 2 0.4 3 13.97 16.08
651 0.74 686.0 245.0 220.5 3.5 3 0.4 3 14.22 15.39
652 0.74 686.0 245.0 220.5 3.5 4 0.4 3 14.1 16.57
653 0.74 686.0 245.0 220.5 3.5 5 0.4 3 13.78 16.6
654 0.71 710.5 269.5 220.5 3.5 2 0.4 3 14.07 16.11
655 0.71 710.5 269.5 220.5 3.5 3 0.4 3 14.03 15.47
656 0.71 710.5 269.5 220.5 3.5 4 0.4 3 13.94 16.7
657 0.71 710.5 269.5 220.5 3.5 5 0.4 3 13.86 16.1
658 0.69 735.0 294.0 220.5 3.5 2 0.4 3 14.32 16.35
659 0.69 735.0 294.0 220.5 3.5 3 0.4 3 14.56 15.84
660 0.69 735.0 294.0 220.5 3.5 4 0.4 3 14.33 16.99
661 0.69 735.0 294.0 220.5 3.5 5 0.4 3 14.08 17.02
662 0.66 759.5 318.5 220.5 3.5 2 0.4 3 15.16 17.04
663 0.66 759.5 318.5 220.5 3.5 3 0.4 3 15.18 17.63
664 0.66 759.5 318.5 220.5 3.5 4 0.4 3 14.72 18.1
665 0.66 759.5 318.5 220.5 3.5 5 0.4 3 14.9 18.22
666 0.64 784.0 343.0 220.5 3.5 2 0.4 3 18.48 20.78
667 0.64 784.0 343.0 220.5 3.5 3 0.4 3 18.71 20.72
668 0.64 784.0 343.0 220.5 3.5 4 0.4 3 18.48 21.54
669 0.64 784.0 343.0 220.5 3.5 5 0.4 3 18.46 21.53
670 0.62 808.5 367.5 220.5 3.5 2 0.4 3 16.47 16.9
671 0.62 808.5 367.5 220.5 3.5 3 0.4 3 16.35 17.14
672 0.62 808.5 367.5 220.5 3.5 4 0.4 3 16.55 16.56
673 0.62 808.5 367.5 220.5 3.5 5 0.4 3 16.74 16.0
674 0.98 514.5 294.0 110.25 7.0 2 0.4 4 32.85 32.95
675 0.98 514.5 294.0 110.25 7.0 3 0.4 4 32.67 33.06
676 0.98 514.5 294.0 110.25 7.0 4 0.4 4 32.21 33.95
677 0.98 514.5 294.0 110.25 7.0 5 0.4 4 32.74 33.88
678 0.9 563.5 318.5 122.5 7.0 2 0.4 4 36.45 33.98
679 0.9 563.5 318.5 122.5 7.0 3 0.4 4 35.73 39.92
680 0.9 563.5 318.5 122.5 7.0 4 0.4 4 35.4 39.22
681 0.9 563.5 318.5 122.5 7.0 5 0.4 4 36.57 36.1
682 0.86 588.0 294.0 147.0 7.0 2 0.4 4 32.38 31.53
683 0.86 588.0 294.0 147.0 7.0 3 0.4 4 31.66 36.2
684 0.86 588.0 294.0 147.0 7.0 4 0.4 4 32.15 36.21
685 0.86 588.0 294.0 147.0 7.0 5 0.4 4 32.75 31.0
686 0.82 612.5 318.5 147.0 7.0 2 0.4 4 28.93 28.2
687 0.82 612.5 318.5 147.0 7.0 3 0.4 4 28.05 32.35
688 0.82 612.5 318.5 147.0 7.0 4 0.4 4 28.64 31.14
689 0.82 612.5 318.5 147.0 7.0 5 0.4 4 29.52 28.43
690 0.79 637.0 343.0 147.0 7.0 2 0.4 4 42.77 38.33
691 0.79 637.0 343.0 147.0 7.0 3 0.4 4 41.73 47.59
692 0.79 637.0 343.0 147.0 7.0 4 0.4 4 41.32 46.23
693 0.79 637.0 343.0 147.0 7.0 5 0.4 4 42.96 39.56
694 0.76 661.5 416.5 122.5 7.0 2 0.4 4 40.68 40.36
695 0.76 661.5 416.5 122.5 7.0 3 0.4 4 40.4 39.67
696 0.76 661.5 416.5 122.5 7.0 4 0.4 4 40.6 39.85
697 0.76 661.5 416.5 122.5 7.0 5 0.4 4 40.11 40.77
698 0.74 686.0 245.0 220.5 3.5 2 0.4 4 14.37 16.61
699 0.74 686.0 245.0 220.5 3.5 3 0.4 4 14.48 16.74
700 0.74 686.0 245.0 220.5 3.5 4 0.4 4 14.32 16.9
701 0.74 686.0 245.0 220.5 3.5 5 0.4 4 14.44 17.32
702 0.71 710.5 269.5 220.5 3.5 2 0.4 4 14.6 16.85
703 0.71 710.5 269.5 220.5 3.5 3 0.4 4 14.7 17.2
704 0.71 710.5 269.5 220.5 3.5 4 0.4 4 14.47 17.23
705 0.71 710.5 269.5 220.5 3.5 5 0.4 4 14.66 17.74
706 0.69 735.0 294.0 220.5 3.5 2 0.4 4 14.54 16.81
707 0.69 735.0 294.0 220.5 3.5 3 0.4 4 14.62 16.88
708 0.69 735.0 294.0 220.5 3.5 4 0.4 4 14.53 16.9
709 0.69 735.0 294.0 220.5 3.5 5 0.4 4 14.71 17.39
710 0.66 759.5 318.5 220.5 3.5 2 0.4 4 15.34 17.86
711 0.66 759.5 318.5 220.5 3.5 3 0.4 4 15.29 17.82
712 0.66 759.5 318.5 220.5 3.5 4 0.4 4 15.09 18.36
713 0.66 759.5 318.5 220.5 3.5 5 0.4 4 15.3 18.24
714 0.64 784.0 343.0 220.5 3.5 2 0.4 4 19.06 21.68
715 0.64 784.0 343.0 220.5 3.5 3 0.4 4 19.13 21.54
716 0.64 784.0 343.0 220.5 3.5 4 0.4 4 19.0 22.25
717 0.64 784.0 343.0 220.5 3.5 5 0.4 4 18.84 22.49
718 0.62 808.5 367.5 220.5 3.5 2 0.4 4 16.44 17.1
719 0.62 808.5 367.5 220.5 3.5 3 0.4 4 16.9 16.79
720 0.62 808.5 367.5 220.5 3.5 4 0.4 4 16.94 16.58
721 0.62 808.5 367.5 220.5 3.5 5 0.4 4 16.77 16.79
722 0.98 514.5 294.0 110.25 7.0 2 0.4 5 32.84 32.88
723 0.98 514.5 294.0 110.25 7.0 3 0.4 5 32.72 33.23
724 0.98 514.5 294.0 110.25 7.0 4 0.4 5 32.21 33.76
725 0.98 514.5 294.0 110.25 7.0 5 0.4 5 32.73 34.01
726 0.9 563.5 318.5 122.5 7.0 2 0.4 5 35.67 33.94
727 0.9 563.5 318.5 122.5 7.0 3 0.4 5 35.01 33.14
728 0.9 563.5 318.5 122.5 7.0 4 0.4 5 34.72 38.79
729 0.9 563.5 318.5 122.5 7.0 5 0.4 5 35.24 37.27
730 0.86 588.0 294.0 147.0 7.0 2 0.4 5 32.31 29.69
731 0.86 588.0 294.0 147.0 7.0 3 0.4 5 31.81 31.2
732 0.86 588.0 294.0 147.0 7.0 4 0.4 5 31.12 36.26
733 0.86 588.0 294.0 147.0 7.0 5 0.4 5 32.06 35.71
734 0.82 612.5 318.5 147.0 7.0 2 0.4 5 30.0 29.93
735 0.82 612.5 318.5 147.0 7.0 3 0.4 5 29.5 29.56
736 0.82 612.5 318.5 147.0 7.0 4 0.4 5 29.06 33.84
737 0.82 612.5 318.5 147.0 7.0 5 0.4 5 29.92 32.54
738 0.79 637.0 343.0 147.0 7.0 2 0.4 5 42.11 38.56
739 0.79 637.0 343.0 147.0 7.0 3 0.4 5 41.96 37.7
740 0.79 637.0 343.0 147.0 7.0 4 0.4 5 41.09 47.01
741 0.79 637.0 343.0 147.0 7.0 5 0.4 5 40.79 44.87
742 0.76 661.5 416.5 122.5 7.0 2 0.4 5 38.82 39.37
743 0.76 661.5 416.5 122.5 7.0 3 0.4 5 39.72 39.8
744 0.76 661.5 416.5 122.5 7.0 4 0.4 5 39.31 37.79
745 0.76 661.5 416.5 122.5 7.0 5 0.4 5 39.86 38.18
746 0.74 686.0 245.0 220.5 3.5 2 0.4 5 14.41 16.69
747 0.74 686.0 245.0 220.5 3.5 3 0.4 5 14.19 16.62
748 0.74 686.0 245.0 220.5 3.5 4 0.4 5 14.17 16.94
749 0.74 686.0 245.0 220.5 3.5 5 0.4 5 14.39 16.7
750 0.71 710.5 269.5 220.5 3.5 2 0.4 5 12.43 15.59
751 0.71 710.5 269.5 220.5 3.5 3 0.4 5 12.63 14.58
752 0.71 710.5 269.5 220.5 3.5 4 0.4 5 12.76 15.33
753 0.71 710.5 269.5 220.5 3.5 5 0.4 5 12.42 15.31
754 0.69 735.0 294.0 220.5 3.5 2 0.4 5 14.12 16.63
755 0.69 735.0 294.0 220.5 3.5 3 0.4 5 14.28 15.87
756 0.69 735.0 294.0 220.5 3.5 4 0.4 5 14.37 16.54
757 0.69 735.0 294.0 220.5 3.5 5 0.4 5 14.21 16.74
758 0.66 759.5 318.5 220.5 3.5 2 0.4 5 14.96 17.64
759 0.66 759.5 318.5 220.5 3.5 3 0.4 5 14.92 17.79
760 0.66 759.5 318.5 220.5 3.5 4 0.4 5 14.92 17.55
761 0.66 759.5 318.5 220.5 3.5 5 0.4 5 15.16 18.06
762 0.64 784.0 343.0 220.5 3.5 2 0.4 5 17.69 20.82
763 0.64 784.0 343.0 220.5 3.5 3 0.4 5 18.19 20.21
764 0.64 784.0 343.0 220.5 3.5 4 0.4 5 18.16 20.71
765 0.64 784.0 343.0 220.5 3.5 5 0.4 5 17.88 21.4
766 0.62 808.5 367.5 220.5 3.5 2 0.4 5 16.54 16.88
767 0.62 808.5 367.5 220.5 3.5 3 0.4 5 16.44 17.11
768 0.62 808.5 367.5 220.5 3.5 4 0.4 5 16.48 16.61
769 0.62 808.5 367.5 220.5 3.5 5 0.4 5 16.64 16.03

Просмотреть файл

@ -1,395 +0,0 @@
CRIM,ZN,INDUS,CHAS,NOX,RM,AGE,DIS,RAD,TAX,PTRATIO,B,LSTAT,MEDV
0.00632,18.0,2.31,0.0,0.5379999999999999,6.575,65.2,4.09,1,296,15.3,396.9,4.98,24.0
0.02731,0.0,7.07,0.0,0.469,6.421,78.9,4.9671,2,242,17.8,396.9,9.14,21.6
0.02729,0.0,7.07,0.0,0.469,7.185,61.1,4.9671,2,242,17.8,392.83,4.03,34.7
0.032369999999999996,0.0,2.18,0.0,0.45799999999999996,6.997999999999999,45.8,6.0622,3,222,18.7,394.63,2.94,33.4
0.02985,0.0,2.18,0.0,0.45799999999999996,6.43,58.7,6.0622,3,222,18.7,394.12,5.21,28.7
0.14455,12.5,7.87,0.0,0.524,6.172000000000001,96.1,5.9505,5,311,15.2,396.9,19.15,27.1
0.21124,12.5,7.87,0.0,0.524,5.631,100.0,6.0821,5,311,15.2,386.63,29.93,16.5
0.22489,12.5,7.87,0.0,0.524,6.377000000000001,94.3,6.3467,5,311,15.2,392.52,20.45,15.0
0.11747,12.5,7.87,0.0,0.524,6.0089999999999995,82.9,6.2267,5,311,15.2,396.9,13.27,18.9
0.09378,12.5,7.87,0.0,0.524,5.888999999999999,39.0,5.4509,5,311,15.2,390.5,15.71,21.7
0.62976,0.0,8.14,0.0,0.5379999999999999,5.949,61.8,4.7075,4,307,21.0,396.9,8.26,20.4
0.62739,0.0,8.14,0.0,0.5379999999999999,5.834,56.5,4.4986,4,307,21.0,395.62,8.47,19.9
1.05393,0.0,8.14,0.0,0.5379999999999999,5.935,29.3,4.4986,4,307,21.0,386.85,6.58,23.1
0.7842,0.0,8.14,0.0,0.5379999999999999,5.99,81.7,4.2579,4,307,21.0,386.75,14.67,17.5
0.80271,0.0,8.14,0.0,0.5379999999999999,5.456,36.6,3.7965,4,307,21.0,288.99,11.69,20.2
0.7258,0.0,8.14,0.0,0.5379999999999999,5.727,69.5,3.7965,4,307,21.0,390.95,11.28,18.2
1.25179,0.0,8.14,0.0,0.5379999999999999,5.57,98.1,3.7979,4,307,21.0,376.57,21.02,13.6
0.8520399999999999,0.0,8.14,0.0,0.5379999999999999,5.965,89.2,4.0123,4,307,21.0,392.53,13.83,19.6
1.2324700000000002,0.0,8.14,0.0,0.5379999999999999,6.142,91.7,3.9769,4,307,21.0,396.9,18.72,15.2
0.9884299999999999,0.0,8.14,0.0,0.5379999999999999,5.813,100.0,4.0952,4,307,21.0,394.54,19.88,14.5
0.75026,0.0,8.14,0.0,0.5379999999999999,5.9239999999999995,94.1,4.3996,4,307,21.0,394.33,16.3,15.6
0.84054,0.0,8.14,0.0,0.5379999999999999,5.599,85.7,4.4546,4,307,21.0,303.42,16.51,13.9
0.67191,0.0,8.14,0.0,0.5379999999999999,5.813,90.3,4.6819999999999995,4,307,21.0,376.88,14.81,16.6
0.9557700000000001,0.0,8.14,0.0,0.5379999999999999,6.047000000000001,88.8,4.4534,4,307,21.0,306.38,17.28,14.8
0.77299,0.0,8.14,0.0,0.5379999999999999,6.495,94.4,4.4547,4,307,21.0,387.94,12.8,18.4
1.00245,0.0,8.14,0.0,0.5379999999999999,6.6739999999999995,87.3,4.239,4,307,21.0,380.23,11.98,21.0
1.13081,0.0,8.14,0.0,0.5379999999999999,5.712999999999999,94.1,4.2330000000000005,4,307,21.0,360.17,22.6,12.7
1.3547200000000001,0.0,8.14,0.0,0.5379999999999999,6.072,100.0,4.175,4,307,21.0,376.73,13.04,14.5
1.38799,0.0,8.14,0.0,0.5379999999999999,5.95,82.0,3.99,4,307,21.0,232.6,27.71,13.2
1.15172,0.0,8.14,0.0,0.5379999999999999,5.7010000000000005,95.0,3.7872,4,307,21.0,358.77,18.35,13.1
1.6128200000000001,0.0,8.14,0.0,0.5379999999999999,6.096,96.9,3.7598,4,307,21.0,248.31,20.34,13.5
0.08014,0.0,5.96,0.0,0.499,5.85,41.5,3.9342,5,279,19.2,396.9,8.77,21.0
0.17505,0.0,5.96,0.0,0.499,5.966,30.2,3.8473,5,279,19.2,393.43,10.13,24.7
0.027630000000000002,75.0,2.95,0.0,0.428,6.595,21.8,5.4011,3,252,18.3,395.63,4.32,30.8
0.033589999999999995,75.0,2.95,0.0,0.428,7.024,15.8,5.4011,3,252,18.3,395.62,1.98,34.9
0.12744,0.0,6.91,0.0,0.44799999999999995,6.77,2.9,5.7209,3,233,17.9,385.41,4.84,26.6
0.1415,0.0,6.91,0.0,0.44799999999999995,6.169,6.6,5.7209,3,233,17.9,383.37,5.81,25.3
0.12269000000000001,0.0,6.91,0.0,0.44799999999999995,6.069,40.0,5.7209,3,233,17.9,389.39,9.55,21.2
0.17142000000000002,0.0,6.91,0.0,0.44799999999999995,5.682,33.8,5.1004,3,233,17.9,396.9,10.21,19.3
0.18836,0.0,6.91,0.0,0.44799999999999995,5.7860000000000005,33.3,5.1004,3,233,17.9,396.9,14.15,20.0
0.25387,0.0,6.91,0.0,0.44799999999999995,5.399,95.3,5.87,3,233,17.9,396.9,30.81,14.4
0.21977,0.0,6.91,0.0,0.44799999999999995,5.602,62.0,6.0877,3,233,17.9,396.9,16.2,19.4
0.08872999999999999,21.0,5.64,0.0,0.439,5.962999999999999,45.7,6.8147,4,243,16.8,395.56,13.45,19.7
0.0536,21.0,5.64,0.0,0.439,6.511,21.1,6.8147,4,243,16.8,396.9,5.28,25.0
0.0136,75.0,4.0,0.0,0.41,5.888,47.6,7.3197,3,469,21.1,396.9,14.8,18.9
0.01311,90.0,1.22,0.0,0.40299999999999997,7.249,21.9,8.6966,5,226,17.9,395.93,4.81,35.4
0.02055,85.0,0.74,0.0,0.41,6.382999999999999,35.7,9.1876,2,313,17.3,396.9,5.77,24.7
0.01432,100.0,1.32,0.0,0.41100000000000003,6.816,40.5,8.3248,5,256,15.1,392.9,3.95,31.6
0.15445,25.0,5.13,0.0,0.45299999999999996,6.145,29.2,7.8148,8,284,19.7,390.68,6.86,23.3
0.10328,25.0,5.13,0.0,0.45299999999999996,5.9270000000000005,47.2,6.932,8,284,19.7,396.9,9.22,19.6
0.14932,25.0,5.13,0.0,0.45299999999999996,5.7410000000000005,66.2,7.2254,8,284,19.7,395.11,13.15,18.7
0.17171,25.0,5.13,0.0,0.45299999999999996,5.966,93.4,6.8185,8,284,19.7,378.08,14.44,16.0
0.11027,25.0,5.13,0.0,0.45299999999999996,6.456,67.8,7.2255,8,284,19.7,396.9,6.73,22.2
0.1265,25.0,5.13,0.0,0.45299999999999996,6.7620000000000005,43.4,7.9809,8,284,19.7,395.58,9.5,25.0
0.01951,17.5,1.38,0.0,0.4161,7.104,59.5,9.2229,3,216,18.6,393.24,8.05,33.0
0.03584,80.0,3.37,0.0,0.39799999999999996,6.29,17.8,6.6115,4,337,16.1,396.9,4.67,23.5
0.043789999999999996,80.0,3.37,0.0,0.39799999999999996,5.787000000000001,31.1,6.6115,4,337,16.1,396.9,10.24,19.4
0.05789,12.5,6.07,0.0,0.409,5.877999999999999,21.4,6.497999999999999,4,345,18.9,396.21,8.1,22.0
0.13554000000000002,12.5,6.07,0.0,0.409,5.593999999999999,36.8,6.497999999999999,4,345,18.9,396.9,13.09,17.4
0.12816,12.5,6.07,0.0,0.409,5.885,33.0,6.497999999999999,4,345,18.9,396.9,8.79,20.9
0.08826,0.0,10.81,0.0,0.413,6.417000000000001,6.6,5.2873,4,305,19.2,383.73,6.72,24.2
0.15875999999999998,0.0,10.81,0.0,0.413,5.961,17.5,5.2873,4,305,19.2,376.94,9.88,21.7
0.09164,0.0,10.81,0.0,0.413,6.065,7.8,5.2873,4,305,19.2,390.91,5.52,22.8
0.09512000000000001,0.0,12.83,0.0,0.43700000000000006,6.2860000000000005,45.0,4.5026,5,398,18.7,383.23,8.94,21.4
0.10153,0.0,12.83,0.0,0.43700000000000006,6.279,74.5,4.0522,5,398,18.7,373.66,11.97,20.0
0.08707000000000001,0.0,12.83,0.0,0.43700000000000006,6.14,45.8,4.0905,5,398,18.7,386.96,10.27,20.8
0.05646,0.0,12.83,0.0,0.43700000000000006,6.232,53.7,5.0141,5,398,18.7,386.4,12.34,21.2
0.04113,25.0,4.86,0.0,0.426,6.727,33.5,5.4007,4,281,19.0,396.9,5.29,28.0
0.04462,25.0,4.86,0.0,0.426,6.619,70.4,5.4007,4,281,19.0,395.63,7.22,23.9
0.03659,25.0,4.86,0.0,0.426,6.3020000000000005,32.2,5.4007,4,281,19.0,396.9,6.72,24.8
0.03551,25.0,4.86,0.0,0.426,6.167000000000001,46.7,5.4007,4,281,19.0,390.64,7.51,22.9
0.050589999999999996,0.0,4.49,0.0,0.449,6.388999999999999,48.0,4.7794,3,247,18.5,396.9,9.62,23.9
0.05735,0.0,4.49,0.0,0.449,6.63,56.1,4.4377,3,247,18.5,392.3,6.53,26.6
0.051879999999999996,0.0,4.49,0.0,0.449,6.015,45.1,4.4272,3,247,18.5,395.99,12.86,22.5
0.0566,0.0,3.41,0.0,0.489,7.007000000000001,86.3,3.4217,2,270,17.8,396.9,5.5,23.6
0.053020000000000005,0.0,3.41,0.0,0.489,7.079,63.1,3.4145,2,270,17.8,396.06,5.7,28.7
0.04684,0.0,3.41,0.0,0.489,6.417000000000001,66.1,3.0923,2,270,17.8,392.18,8.81,22.6
0.03932,0.0,3.41,0.0,0.489,6.405,73.9,3.0921,2,270,17.8,393.55,8.2,22.0
0.02875,28.0,15.04,0.0,0.46399999999999997,6.211,28.9,3.6659,4,270,18.2,396.33,6.21,25.0
0.04294,28.0,15.04,0.0,0.46399999999999997,6.249,77.3,3.615,4,270,18.2,396.9,10.59,20.6
0.12204000000000001,0.0,2.89,0.0,0.445,6.625,57.8,3.4952,2,276,18.0,357.98,6.65,28.4
0.11504,0.0,2.89,0.0,0.445,6.162999999999999,69.6,3.4952,2,276,18.0,391.83,11.34,21.4
0.12082999999999999,0.0,2.89,0.0,0.445,8.068999999999999,76.0,3.4952,2,276,18.0,396.9,4.21,38.7
0.08187,0.0,2.89,0.0,0.445,7.82,36.9,3.4952,2,276,18.0,393.53,3.57,43.8
0.0686,0.0,2.89,0.0,0.445,7.416,62.5,3.4952,2,276,18.0,396.9,6.19,33.2
0.14866,0.0,8.56,0.0,0.52,6.727,79.9,2.7778,5,384,20.9,394.76,9.42,27.5
0.11432,0.0,8.56,0.0,0.52,6.781000000000001,71.3,2.8561,5,384,20.9,395.58,7.67,26.5
0.22876,0.0,8.56,0.0,0.52,6.405,85.4,2.7147,5,384,20.9,70.8,10.63,18.6
0.1396,0.0,8.56,0.0,0.52,6.167000000000001,90.0,2.421,5,384,20.9,392.69,12.33,20.1
0.13262000000000002,0.0,8.56,0.0,0.52,5.851,96.7,2.1069,5,384,20.9,394.05,16.47,19.5
0.1712,0.0,8.56,0.0,0.52,5.836,91.9,2.211,5,384,20.9,395.67,18.66,19.5
0.13117,0.0,8.56,0.0,0.52,6.127000000000001,85.2,2.1224,5,384,20.9,387.69,14.09,20.4
0.12802,0.0,8.56,0.0,0.52,6.474,97.1,2.4329,5,384,20.9,395.24,12.27,19.8
0.26363000000000003,0.0,8.56,0.0,0.52,6.229,91.2,2.5451,5,384,20.9,391.23,15.55,19.4
0.10793,0.0,8.56,0.0,0.52,6.195,54.4,2.7778,5,384,20.9,393.49,13.0,21.7
0.10084,0.0,10.01,0.0,0.547,6.715,81.6,2.6775,6,432,17.8,395.59,10.16,22.8
0.12329000000000001,0.0,10.01,0.0,0.547,5.912999999999999,92.9,2.3534,6,432,17.8,394.95,16.21,18.8
0.22211999999999998,0.0,10.01,0.0,0.547,6.0920000000000005,95.4,2.548,6,432,17.8,396.9,17.09,18.7
0.14231,0.0,10.01,0.0,0.547,6.254,84.2,2.2565,6,432,17.8,388.74,10.45,18.5
0.15098,0.0,10.01,0.0,0.547,6.021,82.6,2.7474,6,432,17.8,394.51,10.3,19.2
0.06899,0.0,25.65,0.0,0.581,5.87,69.7,2.2577,2,188,19.1,389.15,14.37,22.0
0.07165,0.0,25.65,0.0,0.581,6.004,84.1,2.1974,2,188,19.1,377.67,14.27,20.3
0.09299,0.0,25.65,0.0,0.581,5.961,92.9,2.0869,2,188,19.1,378.09,17.93,20.5
0.09849,0.0,25.65,0.0,0.581,5.879,95.8,2.0063,2,188,19.1,379.38,17.58,18.8
0.16902,0.0,25.65,0.0,0.581,5.986000000000001,88.4,1.9929,2,188,19.1,385.02,14.81,21.4
0.25915,0.0,21.89,0.0,0.624,5.693,96.0,1.7883,4,437,21.2,392.11,17.19,16.2
0.32543,0.0,21.89,0.0,0.624,6.431,98.8,1.8125,4,437,21.2,396.9,15.39,18.0
0.88125,0.0,21.89,0.0,0.624,5.6370000000000005,94.7,1.9799,4,437,21.2,396.9,18.34,14.3
0.34006,0.0,21.89,0.0,0.624,6.457999999999999,98.9,2.1185,4,437,21.2,395.04,12.6,19.2
1.19294,0.0,21.89,0.0,0.624,6.3260000000000005,97.7,2.271,4,437,21.2,396.9,12.26,19.6
0.59005,0.0,21.89,0.0,0.624,6.372000000000001,97.9,2.3274,4,437,21.2,385.76,11.12,23.0
0.9761700000000001,0.0,21.89,0.0,0.624,5.757000000000001,98.4,2.346,4,437,21.2,262.76,17.31,15.6
0.55778,0.0,21.89,0.0,0.624,6.335,98.2,2.1107,4,437,21.2,394.67,16.96,18.1
0.32264,0.0,21.89,0.0,0.624,5.942,93.5,1.9669,4,437,21.2,378.25,16.9,17.4
0.35233000000000003,0.0,21.89,0.0,0.624,6.454,98.4,1.8498,4,437,21.2,394.08,14.59,17.1
0.54452,0.0,21.89,0.0,0.624,6.151,97.9,1.6687,4,437,21.2,396.9,18.46,17.8
0.2909,0.0,21.89,0.0,0.624,6.1739999999999995,93.6,1.6119,4,437,21.2,388.08,24.16,14.0
1.6286399999999999,0.0,21.89,0.0,0.624,5.019,100.0,1.4394,4,437,21.2,396.9,34.41,14.4
3.32105,0.0,19.58,1.0,0.871,5.403,100.0,1.3216,5,403,14.7,396.9,26.82,13.4
4.0974,0.0,19.58,0.0,0.871,5.468,100.0,1.4118,5,403,14.7,396.9,26.42,15.6
2.7797400000000003,0.0,19.58,0.0,0.871,4.9030000000000005,97.8,1.3459,5,403,14.7,396.9,29.29,11.8
2.37934,0.0,19.58,0.0,0.871,6.13,100.0,1.4191,5,403,14.7,172.91,27.8,13.8
2.7339700000000002,0.0,19.58,0.0,0.871,5.597,94.9,1.5257,5,403,14.7,351.85,21.45,15.4
1.49632,0.0,19.58,0.0,0.871,5.404,100.0,1.5916,5,403,14.7,341.6,13.28,19.6
2.14918,0.0,19.58,0.0,0.871,5.709,98.5,1.6232,5,403,14.7,261.95,15.79,19.4
1.41385,0.0,19.58,1.0,0.871,6.129,96.0,1.7494,5,403,14.7,321.02,15.12,17.0
2.4466799999999997,0.0,19.58,0.0,0.871,5.272,94.0,1.7364,5,403,14.7,88.63,16.14,13.1
1.34284,0.0,19.58,0.0,0.605,6.066,100.0,1.7573,5,403,14.7,353.89,6.43,24.3
1.4250200000000002,0.0,19.58,0.0,0.871,6.51,100.0,1.7659,5,403,14.7,364.31,7.39,23.3
1.27346,0.0,19.58,1.0,0.605,6.25,92.6,1.7984,5,403,14.7,338.92,5.5,27.0
1.46336,0.0,19.58,0.0,0.605,7.489,90.8,1.9709,5,403,14.7,374.43,1.73,50.0
1.8337700000000001,0.0,19.58,1.0,0.605,7.8020000000000005,98.2,2.0407,5,403,14.7,389.61,1.92,50.0
2.2423599999999997,0.0,19.58,0.0,0.605,5.854,91.8,2.4219999999999997,5,403,14.7,395.11,11.64,22.7
2.924,0.0,19.58,0.0,0.605,6.101,93.0,2.2834,5,403,14.7,240.16,9.81,25.0
2.01019,0.0,19.58,0.0,0.605,7.928999999999999,96.2,2.0459,5,403,14.7,369.3,3.7,50.0
2.3004,0.0,19.58,0.0,0.605,6.319,96.1,2.1,5,403,14.7,297.09,11.1,23.8
2.4495299999999998,0.0,19.58,0.0,0.605,6.402,95.2,2.2625,5,403,14.7,330.04,11.32,22.3
1.2074200000000002,0.0,19.58,0.0,0.605,5.875,94.6,2.4259,5,403,14.7,292.29,14.43,17.4
2.3139,0.0,19.58,0.0,0.605,5.88,97.3,2.3887,5,403,14.7,348.13,12.03,19.1
0.13914,0.0,4.05,0.0,0.51,5.572,88.5,2.5961,5,296,16.6,396.9,14.69,23.1
0.08447,0.0,4.05,0.0,0.51,5.859,68.7,2.7019,5,296,16.6,393.23,9.64,22.6
0.06663999999999999,0.0,4.05,0.0,0.51,6.546,33.1,3.1323,5,296,16.6,390.96,5.33,29.4
0.07022,0.0,4.05,0.0,0.51,6.02,47.2,3.5549,5,296,16.6,393.23,10.11,23.2
0.06642,0.0,4.05,0.0,0.51,6.86,74.4,2.9153,5,296,16.6,391.27,6.92,29.9
0.0578,0.0,2.46,0.0,0.488,6.98,58.4,2.8289999999999997,3,193,17.8,396.9,5.04,37.2
0.06588,0.0,2.46,0.0,0.488,7.765,83.3,2.741,3,193,17.8,395.56,7.56,39.8
0.06888,0.0,2.46,0.0,0.488,6.144,62.2,2.5979,3,193,17.8,396.9,9.45,36.2
0.09103,0.0,2.46,0.0,0.488,7.155,92.2,2.7006,3,193,17.8,394.12,4.82,37.9
0.08308,0.0,2.46,0.0,0.488,5.604,89.8,2.9879,3,193,17.8,391.0,13.98,26.4
0.06047,0.0,2.46,0.0,0.488,6.153,68.8,3.2797,3,193,17.8,387.11,13.15,29.6
0.07875,45.0,3.44,0.0,0.43700000000000006,6.782,41.1,3.7886,5,398,15.2,393.87,6.68,32.0
0.12579,45.0,3.44,0.0,0.43700000000000006,6.556,29.1,4.5667,5,398,15.2,382.84,4.56,29.8
0.0837,45.0,3.44,0.0,0.43700000000000006,7.185,38.9,4.5667,5,398,15.2,396.9,5.39,34.9
0.09068,45.0,3.44,0.0,0.43700000000000006,6.9510000000000005,21.5,6.4798,5,398,15.2,377.68,5.1,37.0
0.01439,60.0,2.93,0.0,0.401,6.604,18.8,6.2196,1,265,15.6,376.7,4.38,29.1
0.01381,80.0,0.46,0.0,0.42200000000000004,7.875,32.0,5.6484,4,255,14.4,394.23,2.97,50.0
0.04666,80.0,1.52,0.0,0.40399999999999997,7.107,36.6,7.309,2,329,12.6,354.31,8.61,30.3
0.03768,80.0,1.52,0.0,0.40399999999999997,7.274,38.3,7.309,2,329,12.6,392.2,6.62,34.6
0.0315,95.0,1.47,0.0,0.40299999999999997,6.975,15.3,7.6534,3,402,17.0,396.9,4.56,34.9
0.01778,95.0,1.47,0.0,0.40299999999999997,7.135,13.9,7.6534,3,402,17.0,384.3,4.45,32.9
0.03445,82.5,2.03,0.0,0.415,6.162000000000001,38.4,6.27,2,348,14.7,393.77,7.43,24.1
0.021769999999999998,82.5,2.03,0.0,0.415,7.61,15.7,6.27,2,348,14.7,395.38,3.11,42.3
0.0351,95.0,2.68,0.0,0.4161,7.853,33.2,5.118,4,224,14.7,392.78,3.81,48.5
0.02009,95.0,2.68,0.0,0.4161,8.033999999999999,31.9,5.118,4,224,14.7,390.55,2.88,50.0
0.13587,0.0,10.59,1.0,0.489,6.064,59.1,4.2392,4,277,18.6,381.32,14.66,24.4
0.43571000000000004,0.0,10.59,1.0,0.489,5.343999999999999,100.0,3.875,4,277,18.6,396.9,23.09,20.0
0.37578,0.0,10.59,1.0,0.489,5.404,88.6,3.665,4,277,18.6,395.24,23.98,19.3
0.21719000000000002,0.0,10.59,1.0,0.489,5.807,53.8,3.6526,4,277,18.6,390.94,16.03,22.4
0.14052,0.0,10.59,0.0,0.489,6.375,32.3,3.9454,4,277,18.6,385.81,9.38,28.1
0.28955,0.0,10.59,0.0,0.489,5.412000000000001,9.8,3.5875,4,277,18.6,348.93,29.55,23.7
0.0456,0.0,13.89,1.0,0.55,5.888,56.0,3.1121,5,276,16.4,392.8,13.51,23.3
0.07013,0.0,13.89,0.0,0.55,6.642,85.1,3.4211,5,276,16.4,392.78,9.69,28.7
0.11069000000000001,0.0,13.89,1.0,0.55,5.9510000000000005,93.8,2.8893,5,276,16.4,396.9,17.92,21.5
0.35809,0.0,6.2,1.0,0.507,6.9510000000000005,88.5,2.8617,8,307,17.4,391.7,9.71,26.7
0.40771,0.0,6.2,1.0,0.507,6.164,91.3,3.048,8,307,17.4,395.24,21.46,21.7
0.62356,0.0,6.2,1.0,0.507,6.879,77.7,3.2721,8,307,17.4,390.39,9.93,27.5
0.6147,0.0,6.2,0.0,0.507,6.617999999999999,80.8,3.2721,8,307,17.4,396.9,7.6,30.1
0.31533,0.0,6.2,0.0,0.504,8.266,78.3,2.8944,8,307,17.4,385.05,4.14,44.8
0.52693,0.0,6.2,0.0,0.504,8.725,83.0,2.8944,8,307,17.4,382.0,4.63,50.0
0.41238,0.0,6.2,0.0,0.504,7.162999999999999,79.9,3.2157,8,307,17.4,372.08,6.36,31.6
0.537,0.0,6.2,0.0,0.504,5.981,68.1,3.6715,8,307,17.4,378.35,11.65,24.3
0.46296000000000004,0.0,6.2,0.0,0.504,7.412000000000001,76.9,3.6715,8,307,17.4,376.14,5.25,31.7
0.57529,0.0,6.2,0.0,0.507,8.337,73.3,3.8384,8,307,17.4,385.91,2.47,41.7
0.44791000000000003,0.0,6.2,1.0,0.507,6.726,66.5,3.6519,8,307,17.4,360.2,8.05,29.0
0.33045,0.0,6.2,0.0,0.507,6.086,61.5,3.6519,8,307,17.4,376.75,10.88,24.0
0.51183,0.0,6.2,0.0,0.507,7.358,71.6,4.148,8,307,17.4,390.07,4.73,31.5
0.09252,30.0,4.93,0.0,0.428,6.606,42.2,6.1899,6,300,16.6,383.78,7.37,23.3
0.1029,30.0,4.93,0.0,0.428,6.358,52.9,7.0355,6,300,16.6,372.75,11.22,22.2
0.12757000000000002,30.0,4.93,0.0,0.428,6.393,7.8,7.0355,6,300,16.6,374.71,5.19,23.7
0.20608,22.0,5.86,0.0,0.431,5.593,76.5,7.9549,7,330,19.1,372.49,12.5,17.6
0.33983,22.0,5.86,0.0,0.431,6.108,34.9,8.0555,7,330,19.1,390.18,9.16,24.3
0.19657,22.0,5.86,0.0,0.431,6.226,79.2,8.0555,7,330,19.1,376.14,10.15,20.5
0.16439,22.0,5.86,0.0,0.431,6.433,49.1,7.8265,7,330,19.1,374.71,9.52,24.5
0.19072999999999998,22.0,5.86,0.0,0.431,6.718,17.5,7.8265,7,330,19.1,393.74,6.56,26.2
0.1403,22.0,5.86,0.0,0.431,6.487,13.0,7.3967,7,330,19.1,396.28,5.9,24.4
0.21409,22.0,5.86,0.0,0.431,6.438,8.9,7.3967,7,330,19.1,377.07,3.59,24.8
0.08221,22.0,5.86,0.0,0.431,6.957000000000001,6.8,8.9067,7,330,19.1,386.09,3.53,29.6
0.36894,22.0,5.86,0.0,0.431,8.259,8.4,8.9067,7,330,19.1,396.9,3.54,42.8
0.035480000000000005,80.0,3.64,0.0,0.392,5.876,19.1,9.2203,1,315,16.4,395.18,9.25,20.9
0.015380000000000001,90.0,3.75,0.0,0.39399999999999996,7.454,34.2,6.3361,3,244,15.9,386.34,3.11,44.0
0.61154,20.0,3.97,0.0,0.647,8.704,86.9,1.801,5,264,13.0,389.7,5.12,50.0
0.66351,20.0,3.97,0.0,0.647,7.332999999999999,100.0,1.8946,5,264,13.0,383.29,7.79,36.0
0.65665,20.0,3.97,0.0,0.647,6.8420000000000005,100.0,2.0107,5,264,13.0,391.93,6.9,30.1
0.5401100000000001,20.0,3.97,0.0,0.647,7.202999999999999,81.8,2.1121,5,264,13.0,392.8,9.59,33.8
0.5341199999999999,20.0,3.97,0.0,0.647,7.52,89.4,2.1398,5,264,13.0,388.37,7.26,43.1
0.82526,20.0,3.97,0.0,0.647,7.327000000000001,94.5,2.0788,5,264,13.0,393.42,11.25,31.0
0.55007,20.0,3.97,0.0,0.647,7.206,91.6,1.9301,5,264,13.0,387.89,8.1,36.5
0.76162,20.0,3.97,0.0,0.647,5.56,62.8,1.9865,5,264,13.0,392.4,10.45,22.8
0.57834,20.0,3.97,0.0,0.575,8.297,67.0,2.4216,5,264,13.0,384.54,7.44,50.0
0.5405,20.0,3.97,0.0,0.575,7.47,52.6,2.872,5,264,13.0,390.3,3.16,43.5
0.09065,20.0,6.96,1.0,0.46399999999999997,5.92,61.5,3.9175,3,223,18.6,391.34,13.65,20.7
0.29916,20.0,6.96,0.0,0.46399999999999997,5.856,42.1,4.428999999999999,3,223,18.6,388.65,13.0,21.1
0.1146,20.0,6.96,0.0,0.46399999999999997,6.537999999999999,58.7,3.9175,3,223,18.6,394.96,7.73,24.4
0.22188000000000002,20.0,6.96,1.0,0.46399999999999997,7.691,51.8,4.3665,3,223,18.6,390.77,6.58,35.2
0.05644,40.0,6.41,1.0,0.447,6.757999999999999,32.9,4.0776,4,254,17.6,396.9,3.53,32.4
0.09604,40.0,6.41,0.0,0.447,6.854,42.8,4.2673,4,254,17.6,396.9,2.98,32.0
0.10469,40.0,6.41,1.0,0.447,7.267,49.0,4.7872,4,254,17.6,389.25,6.05,33.2
0.07977999999999999,40.0,6.41,0.0,0.447,6.482,32.1,4.1403,4,254,17.6,396.9,7.19,29.1
0.21038,20.0,3.33,0.0,0.4429,6.812,32.2,4.1007,5,216,14.9,396.9,4.85,35.1
0.03578,20.0,3.33,0.0,0.4429,7.82,64.5,4.6947,5,216,14.9,387.31,3.76,45.4
0.06129,20.0,3.33,1.0,0.4429,7.645,49.7,5.2119,5,216,14.9,377.07,3.01,46.0
0.015009999999999999,90.0,1.21,1.0,0.401,7.922999999999999,24.8,5.885,1,198,13.6,395.52,3.16,50.0
0.009059999999999999,90.0,2.97,0.0,0.4,7.087999999999999,20.8,7.3073,1,285,15.3,394.72,7.85,32.2
0.01096,55.0,2.25,0.0,0.389,6.452999999999999,31.9,7.3073,1,300,15.3,394.72,8.23,22.0
0.03871,52.5,5.32,0.0,0.405,6.209,31.3,7.3172,6,293,16.6,396.9,7.14,23.2
0.04297,52.5,5.32,0.0,0.405,6.565,22.9,7.3172,6,293,16.6,371.72,9.51,24.8
0.035019999999999996,80.0,4.95,0.0,0.41100000000000003,6.861000000000001,27.9,5.1167,4,245,19.2,396.9,3.33,28.5
0.07886,80.0,4.95,0.0,0.41100000000000003,7.148,27.7,5.1167,4,245,19.2,396.9,3.56,37.3
0.08265,0.0,13.92,0.0,0.43700000000000006,6.127000000000001,18.4,5.5027,4,289,16.0,396.9,8.58,23.9
0.12932000000000002,0.0,13.92,0.0,0.43700000000000006,6.678,31.1,5.9604,4,289,16.0,396.9,6.27,28.6
0.053720000000000004,0.0,13.92,0.0,0.43700000000000006,6.5489999999999995,51.0,5.9604,4,289,16.0,392.85,7.39,27.1
0.06466000000000001,70.0,2.24,0.0,0.4,6.345,20.1,7.8278,5,358,14.8,368.24,4.97,22.5
0.05561,70.0,2.24,0.0,0.4,7.041,10.0,7.8278,5,358,14.8,371.58,4.74,29.0
0.04417,70.0,2.24,0.0,0.4,6.871,47.4,7.8278,5,358,14.8,390.86,6.07,24.8
0.05515,33.0,2.18,0.0,0.47200000000000003,7.236000000000001,41.1,4.022,7,222,18.4,393.68,6.93,36.1
0.07503,33.0,2.18,0.0,0.47200000000000003,7.42,71.9,3.0992,7,222,18.4,396.9,6.47,33.4
0.049319999999999996,33.0,2.18,0.0,0.47200000000000003,6.849,70.3,3.1827,7,222,18.4,396.9,7.53,28.2
0.49298000000000003,0.0,9.9,0.0,0.544,6.635,82.5,3.3175,4,304,18.4,396.9,4.54,22.8
0.3494,0.0,9.9,0.0,0.544,5.972,76.7,3.1025,4,304,18.4,396.24,9.97,20.3
2.63548,0.0,9.9,0.0,0.544,4.973,37.8,2.5194,4,304,18.4,350.45,12.64,16.1
0.7904100000000001,0.0,9.9,0.0,0.544,6.122000000000001,52.8,2.6403,4,304,18.4,396.9,5.98,22.1
0.26169000000000003,0.0,9.9,0.0,0.544,6.023,90.4,2.8339999999999996,4,304,18.4,396.3,11.72,19.4
0.26938,0.0,9.9,0.0,0.544,6.266,82.8,3.2628,4,304,18.4,393.39,7.9,21.6
0.3692,0.0,9.9,0.0,0.544,6.567,87.3,3.6023,4,304,18.4,395.69,9.28,23.8
0.25356,0.0,9.9,0.0,0.544,5.705,77.7,3.945,4,304,18.4,396.42,11.5,16.2
0.24522,0.0,9.9,0.0,0.544,5.782,71.7,4.0317,4,304,18.4,396.9,15.94,19.8
0.40202,0.0,9.9,0.0,0.544,6.382000000000001,67.2,3.5325,4,304,18.4,395.21,10.36,23.1
0.47547,0.0,9.9,0.0,0.544,6.1129999999999995,58.8,4.0019,4,304,18.4,396.23,12.73,21.0
0.1676,0.0,7.38,0.0,0.493,6.426,52.3,4.5404,5,287,19.6,396.9,7.2,23.8
0.18159,0.0,7.38,0.0,0.493,6.376,54.3,4.5404,5,287,19.6,396.9,6.87,23.1
0.35114,0.0,7.38,0.0,0.493,6.041,49.9,4.7211,5,287,19.6,396.9,7.7,20.4
0.28392,0.0,7.38,0.0,0.493,5.707999999999999,74.3,4.7211,5,287,19.6,391.13,11.74,18.5
0.34109,0.0,7.38,0.0,0.493,6.415,40.1,4.7211,5,287,19.6,396.9,6.12,25.0
0.19186,0.0,7.38,0.0,0.493,6.431,14.7,5.4159,5,287,19.6,393.68,5.08,24.6
0.30346999999999996,0.0,7.38,0.0,0.493,6.312,28.9,5.4159,5,287,19.6,396.9,6.15,23.0
0.24103000000000002,0.0,7.38,0.0,0.493,6.082999999999999,43.7,5.4159,5,287,19.6,396.9,12.79,22.2
0.06617,0.0,3.24,0.0,0.46,5.867999999999999,25.8,5.2146,4,430,16.9,382.44,9.97,19.3
0.06724,0.0,3.24,0.0,0.46,6.332999999999999,17.2,5.2146,4,430,16.9,375.21,7.34,22.6
0.050230000000000004,35.0,6.06,0.0,0.4379,5.706,28.4,6.6407,1,304,16.9,394.02,12.43,17.1
0.05083,0.0,5.19,0.0,0.515,6.316,38.1,6.4584,5,224,20.2,389.71,5.68,22.2
0.037380000000000004,0.0,5.19,0.0,0.515,6.31,38.5,6.4584,5,224,20.2,389.4,6.75,20.7
0.03961,0.0,5.19,0.0,0.515,6.037000000000001,34.5,5.9853,5,224,20.2,396.9,8.01,21.1
0.03427,0.0,5.19,0.0,0.515,5.869,46.3,5.2311,5,224,20.2,396.9,9.8,19.5
0.030410000000000003,0.0,5.19,0.0,0.515,5.895,59.6,5.615,5,224,20.2,394.81,10.56,18.5
0.03306,0.0,5.19,0.0,0.515,6.059,37.3,4.8122,5,224,20.2,396.14,8.51,20.6
0.054970000000000005,0.0,5.19,0.0,0.515,5.985,45.4,4.8122,5,224,20.2,396.9,9.74,19.0
0.06151,0.0,5.19,0.0,0.515,5.968,58.5,4.8122,5,224,20.2,396.9,9.29,18.7
0.013009999999999999,35.0,1.52,0.0,0.442,7.2410000000000005,49.3,7.0379,1,284,15.5,394.74,5.49,32.7
0.024980000000000002,0.0,1.89,0.0,0.518,6.54,59.7,6.2669,1,422,15.9,389.96,8.65,16.5
0.02543,55.0,3.78,0.0,0.484,6.696000000000001,56.4,5.7321,5,370,17.6,396.9,7.18,23.9
0.03113,0.0,4.39,0.0,0.442,6.013999999999999,48.5,8.0136,3,352,18.8,385.64,10.53,17.5
0.06162,0.0,4.39,0.0,0.442,5.898,52.3,8.0136,3,352,18.8,364.61,12.67,17.2
0.0187,85.0,4.15,0.0,0.429,6.516,27.7,8.5353,4,351,17.9,392.43,6.36,23.1
0.015009999999999999,80.0,2.01,0.0,0.435,6.635,29.7,8.344,4,280,17.0,390.94,5.99,24.5
0.0795,60.0,1.69,0.0,0.41100000000000003,6.579,35.9,10.7103,4,411,18.3,370.78,5.49,24.1
0.07244,60.0,1.69,0.0,0.41100000000000003,5.8839999999999995,18.5,10.7103,4,411,18.3,392.33,7.79,18.6
0.01709,90.0,2.02,0.0,0.41,6.728,36.1,12.1265,5,187,17.0,384.46,4.5,30.1
0.04301,80.0,1.91,0.0,0.413,5.662999999999999,21.9,10.5857,4,334,22.0,382.8,8.05,18.2
8.98296,0.0,18.1,1.0,0.77,6.212000000000001,97.4,2.1222,24,666,20.2,377.73,17.6,17.8
3.8497,0.0,18.1,1.0,0.77,6.395,91.0,2.5052,24,666,20.2,391.34,13.27,21.7
5.20177,0.0,18.1,1.0,0.77,6.127000000000001,83.4,2.7227,24,666,20.2,395.43,11.48,22.7
4.541919999999999,0.0,18.1,0.0,0.77,6.398,88.0,2.5182,24,666,20.2,374.56,7.79,25.0
3.83684,0.0,18.1,0.0,0.77,6.251,91.1,2.2955,24,666,20.2,350.65,14.19,19.9
3.6782199999999996,0.0,18.1,0.0,0.77,5.362,96.2,2.1036,24,666,20.2,380.79,10.19,20.8
4.22239,0.0,18.1,1.0,0.77,5.803,89.0,1.9047,24,666,20.2,353.04,14.64,16.8
3.4742800000000003,0.0,18.1,1.0,0.718,8.78,82.9,1.9047,24,666,20.2,354.55,5.29,21.9
4.55587,0.0,18.1,0.0,0.718,3.5610000000000004,87.9,1.6132,24,666,20.2,354.7,7.12,27.5
3.69695,0.0,18.1,0.0,0.718,4.963,91.4,1.7523,24,666,20.2,316.03,14.0,21.9
6.53876,0.0,18.1,1.0,0.631,7.016,97.5,1.2024,24,666,20.2,392.05,2.96,50.0
9.2323,0.0,18.1,0.0,0.631,6.216,100.0,1.1691,24,666,20.2,366.15,9.53,50.0
8.26725,0.0,18.1,1.0,0.6679999999999999,5.875,89.6,1.1296,24,666,20.2,347.88,8.88,50.0
11.1081,0.0,18.1,0.0,0.6679999999999999,4.906000000000001,100.0,1.1742,24,666,20.2,396.9,34.77,13.8
18.4982,0.0,18.1,0.0,0.6679999999999999,4.138,100.0,1.137,24,666,20.2,396.9,37.97,13.8
9.82349,0.0,18.1,0.0,0.6709999999999999,6.794,98.8,1.358,24,666,20.2,396.9,21.24,13.3
23.6482,0.0,18.1,0.0,0.6709999999999999,6.38,96.2,1.3861,24,666,20.2,396.9,23.69,13.1
17.8667,0.0,18.1,0.0,0.6709999999999999,6.223,100.0,1.3861,24,666,20.2,393.74,21.78,10.2
88.9762,0.0,18.1,0.0,0.6709999999999999,6.968,91.9,1.4165,24,666,20.2,396.9,17.21,10.4
15.8744,0.0,18.1,0.0,0.6709999999999999,6.545,99.1,1.5192,24,666,20.2,396.9,21.08,10.9
9.18702,0.0,18.1,0.0,0.7,5.5360000000000005,100.0,1.5804,24,666,20.2,396.9,23.6,11.3
20.0849,0.0,18.1,0.0,0.7,4.368,91.2,1.4395,24,666,20.2,285.83,30.63,8.8
16.8118,0.0,18.1,0.0,0.7,5.277,98.1,1.4261,24,666,20.2,396.9,30.81,7.2
24.3938,0.0,18.1,0.0,0.7,4.652,100.0,1.4672,24,666,20.2,396.9,28.28,10.5
22.5971,0.0,18.1,0.0,0.7,5.0,89.5,1.5184,24,666,20.2,396.9,31.99,7.4
8.15174,0.0,18.1,0.0,0.7,5.39,98.9,1.7281,24,666,20.2,396.9,20.85,11.5
6.96215,0.0,18.1,0.0,0.7,5.712999999999999,97.0,1.9265,24,666,20.2,394.43,17.11,15.1
5.29305,0.0,18.1,0.0,0.7,6.051,82.5,2.1678,24,666,20.2,378.38,18.76,23.2
11.5779,0.0,18.1,0.0,0.7,5.0360000000000005,97.0,1.77,24,666,20.2,396.9,25.68,9.7
8.71675,0.0,18.1,0.0,0.693,6.471,98.8,1.7257,24,666,20.2,391.98,17.12,13.1
5.87205,0.0,18.1,0.0,0.693,6.405,96.0,1.6768,24,666,20.2,396.9,19.37,12.5
7.67202,0.0,18.1,0.0,0.693,5.747000000000001,98.9,1.6334,24,666,20.2,393.1,19.92,8.5
38.3518,0.0,18.1,0.0,0.693,5.452999999999999,100.0,1.4896,24,666,20.2,396.9,30.59,5.0
9.91655,0.0,18.1,0.0,0.693,5.852,77.8,1.5004,24,666,20.2,338.16,29.97,6.3
25.0461,0.0,18.1,0.0,0.693,5.987,100.0,1.5888,24,666,20.2,396.9,26.77,5.6
9.59571,0.0,18.1,0.0,0.693,6.404,100.0,1.639,24,666,20.2,376.11,20.31,12.1
24.8017,0.0,18.1,0.0,0.693,5.349,96.0,1.7028,24,666,20.2,396.9,19.77,8.3
41.5292,0.0,18.1,0.0,0.693,5.531000000000001,85.4,1.6074,24,666,20.2,329.46,27.38,8.5
67.9208,0.0,18.1,0.0,0.693,5.683,100.0,1.4254,24,666,20.2,384.97,22.98,5.0
7.40389,0.0,18.1,0.0,0.597,5.617000000000001,97.9,1.4547,24,666,20.2,314.64,26.4,17.2
51.1358,0.0,18.1,0.0,0.597,5.757000000000001,100.0,1.413,24,666,20.2,2.6,10.11,15.0
14.0507,0.0,18.1,0.0,0.597,6.657,100.0,1.5275,24,666,20.2,35.05,21.22,17.2
18.811,0.0,18.1,0.0,0.597,4.628,100.0,1.5539,24,666,20.2,28.79,34.37,17.9
28.6558,0.0,18.1,0.0,0.597,5.155,100.0,1.5894,24,666,20.2,210.97,20.08,16.3
45.7461,0.0,18.1,0.0,0.693,4.519,100.0,1.6582,24,666,20.2,88.27,36.98,7.0
18.0846,0.0,18.1,0.0,0.679,6.434,100.0,1.8347,24,666,20.2,27.25,29.05,7.2
10.8342,0.0,18.1,0.0,0.679,6.782,90.8,1.8195,24,666,20.2,21.57,25.79,7.5
25.9406,0.0,18.1,0.0,0.679,5.303999999999999,89.1,1.6475,24,666,20.2,127.36,26.64,10.4
73.5341,0.0,18.1,0.0,0.679,5.957000000000001,100.0,1.8026,24,666,20.2,16.45,20.62,8.8
11.8123,0.0,18.1,0.0,0.718,6.824,76.5,1.794,24,666,20.2,48.45,22.74,8.4
11.0874,0.0,18.1,0.0,0.718,6.4110000000000005,100.0,1.8589,24,666,20.2,318.75,15.02,16.7
7.022589999999999,0.0,18.1,0.0,0.718,6.006,95.3,1.8746,24,666,20.2,319.98,15.7,14.2
12.0482,0.0,18.1,0.0,0.614,5.648,87.6,1.9512,24,666,20.2,291.55,14.1,20.8
8.792119999999999,0.0,18.1,0.0,0.584,5.565,70.6,2.0635,24,666,20.2,3.65,17.16,11.7
15.8603,0.0,18.1,0.0,0.679,5.896,95.4,1.9096,24,666,20.2,7.68,24.39,8.3
7.36711,0.0,18.1,0.0,0.679,6.193,78.1,1.9356,24,666,20.2,96.73,21.52,11.0
10.0623,0.0,18.1,0.0,0.584,6.832999999999999,94.3,2.0882,24,666,20.2,81.33,19.69,14.1
6.44405,0.0,18.1,0.0,0.584,6.425,74.8,2.2004,24,666,20.2,97.95,12.03,16.1
5.5810699999999995,0.0,18.1,0.0,0.713,6.436,87.9,2.3158,24,666,20.2,100.19,16.22,14.3
13.9134,0.0,18.1,0.0,0.713,6.207999999999999,95.0,2.2222,24,666,20.2,100.63,15.17,11.7
11.1604,0.0,18.1,0.0,0.74,6.629,94.6,2.1247,24,666,20.2,109.85,23.27,13.4
14.4208,0.0,18.1,0.0,0.74,6.461,93.3,2.0026,24,666,20.2,27.49,18.05,9.6
15.1772,0.0,18.1,0.0,0.74,6.152,100.0,1.9142,24,666,20.2,9.32,26.45,8.7
13.6781,0.0,18.1,0.0,0.74,5.935,87.9,1.8206,24,666,20.2,68.95,34.02,8.4
9.39063,0.0,18.1,0.0,0.74,5.627000000000001,93.9,1.8172,24,666,20.2,396.9,22.88,12.8
9.96654,0.0,18.1,0.0,0.74,6.485,100.0,1.9784,24,666,20.2,386.73,18.85,15.4
12.8023,0.0,18.1,0.0,0.74,5.854,96.6,1.8956,24,666,20.2,240.52,23.79,10.8
10.6718,0.0,18.1,0.0,0.74,6.459,94.8,1.9879,24,666,20.2,43.06,23.98,11.8
6.288069999999999,0.0,18.1,0.0,0.74,6.341,96.4,2.072,24,666,20.2,318.01,17.79,14.9
9.92485,0.0,18.1,0.0,0.74,6.251,96.6,2.198,24,666,20.2,388.52,16.44,12.6
9.329089999999999,0.0,18.1,0.0,0.713,6.185,98.7,2.2616,24,666,20.2,396.9,18.13,14.1
7.52601,0.0,18.1,0.0,0.713,6.417000000000001,98.3,2.185,24,666,20.2,304.21,19.31,13.0
5.09017,0.0,18.1,0.0,0.713,6.297000000000001,91.8,2.3682,24,666,20.2,385.09,17.27,16.1
9.513630000000001,0.0,18.1,0.0,0.713,6.728,94.1,2.4961,24,666,20.2,6.68,18.71,14.9
4.75237,0.0,18.1,0.0,0.713,6.525,86.5,2.4358,24,666,20.2,50.92,18.13,14.1
4.668830000000001,0.0,18.1,0.0,0.713,5.976,87.9,2.5806,24,666,20.2,10.48,19.01,12.7
8.20058,0.0,18.1,0.0,0.713,5.936,80.3,2.7792,24,666,20.2,3.5,16.94,13.5
6.80117,0.0,18.1,0.0,0.713,6.081,84.4,2.7175,24,666,20.2,396.9,14.7,20.0
3.69311,0.0,18.1,0.0,0.713,6.376,88.4,2.5671,24,666,20.2,391.43,14.65,17.7
6.65492,0.0,18.1,0.0,0.713,6.317,83.0,2.7344,24,666,20.2,396.9,13.99,19.5
5.82115,0.0,18.1,0.0,0.713,6.513,89.9,2.8016,24,666,20.2,393.82,10.29,20.2
7.83932,0.0,18.1,0.0,0.655,6.209,65.4,2.9634,24,666,20.2,396.9,13.22,21.4
4.422280000000001,0.0,18.1,0.0,0.584,6.002999999999999,94.5,2.5403,24,666,20.2,331.29,21.32,19.1
15.5757,0.0,18.1,0.0,0.58,5.926,71.0,2.9084,24,666,20.2,368.74,18.13,19.1
13.0751,0.0,18.1,0.0,0.58,5.712999999999999,56.7,2.8237,24,666,20.2,396.9,14.76,20.1
4.34879,0.0,18.1,0.0,0.58,6.167000000000001,84.0,3.0334,24,666,20.2,396.9,16.29,19.9
4.03841,0.0,18.1,0.0,0.532,6.229,90.7,3.0993,24,666,20.2,395.33,12.87,19.6
3.56868,0.0,18.1,0.0,0.58,6.437,75.0,2.8965,24,666,20.2,393.37,14.36,23.2
8.05579,0.0,18.1,0.0,0.584,5.4270000000000005,95.4,2.4298,24,666,20.2,352.58,18.14,13.8
6.39312,0.0,18.1,0.0,0.584,6.162000000000001,97.4,2.206,24,666,20.2,302.76,24.1,13.3
4.87141,0.0,18.1,0.0,0.614,6.484,93.6,2.3053,24,666,20.2,396.21,18.68,16.7
15.0234,0.0,18.1,0.0,0.614,5.303999999999999,97.3,2.1007,24,666,20.2,349.48,24.91,12.0
10.232999999999999,0.0,18.1,0.0,0.614,6.185,96.7,2.1705,24,666,20.2,379.7,18.03,14.6
5.8240099999999995,0.0,18.1,0.0,0.532,6.242000000000001,64.7,3.4242,24,666,20.2,396.9,10.74,23.0
5.7081800000000005,0.0,18.1,0.0,0.532,6.75,74.9,3.3317,24,666,20.2,393.07,7.74,23.7
2.81838,0.0,18.1,0.0,0.532,5.7620000000000005,40.3,4.0983,24,666,20.2,392.92,10.42,21.8
2.37857,0.0,18.1,0.0,0.583,5.871,41.9,3.7239999999999998,24,666,20.2,370.73,13.34,20.6
3.6736699999999995,0.0,18.1,0.0,0.583,6.312,51.9,3.9917,24,666,20.2,388.62,10.58,21.2
5.69175,0.0,18.1,0.0,0.583,6.114,79.8,3.5459,24,666,20.2,392.68,14.98,19.1
4.8356699999999995,0.0,18.1,0.0,0.583,5.905,53.2,3.1523,24,666,20.2,388.22,11.45,20.6
0.15086,0.0,27.74,0.0,0.609,5.454,92.7,1.8209,4,711,20.1,395.09,18.06,15.2
0.18337,0.0,27.74,0.0,0.609,5.414,98.3,1.7554,4,711,20.1,344.05,23.97,7.0
0.20745999999999998,0.0,27.74,0.0,0.609,5.093,98.0,1.8226,4,711,20.1,318.43,29.68,8.1
0.10574000000000001,0.0,27.74,0.0,0.609,5.983,98.8,1.8681,4,711,20.1,390.11,18.07,13.6
0.11132,0.0,27.74,0.0,0.609,5.983,83.5,2.1099,4,711,20.1,396.9,13.35,20.1
0.17331,0.0,9.69,0.0,0.585,5.707000000000001,54.0,2.3817,6,391,19.2,396.9,12.01,21.8
0.27957,0.0,9.69,0.0,0.585,5.926,42.6,2.3817,6,391,19.2,396.9,13.59,24.5
0.17899,0.0,9.69,0.0,0.585,5.67,28.8,2.7986,6,391,19.2,393.29,17.6,23.1
0.2896,0.0,9.69,0.0,0.585,5.39,72.9,2.7986,6,391,19.2,396.9,21.14,19.7
0.26838,0.0,9.69,0.0,0.585,5.794,70.6,2.8927,6,391,19.2,396.9,14.1,18.3
0.23911999999999997,0.0,9.69,0.0,0.585,6.019,65.3,2.4091,6,391,19.2,396.9,12.92,21.2
0.17783,0.0,9.69,0.0,0.585,5.569,73.5,2.3999,6,391,19.2,395.77,15.1,17.5
0.22438000000000002,0.0,9.69,0.0,0.585,6.027,79.7,2.4982,6,391,19.2,396.9,14.33,16.8
0.04527,0.0,11.93,0.0,0.573,6.12,76.7,2.2875,1,273,21.0,396.9,9.08,20.6
0.06076,0.0,11.93,0.0,0.573,6.976,91.0,2.1675,1,273,21.0,396.9,5.64,23.9
0.10959,0.0,11.93,0.0,0.573,6.794,89.3,2.3889,1,273,21.0,393.45,6.48,22.0
1 CRIM ZN INDUS CHAS NOX RM AGE DIS RAD TAX PTRATIO B LSTAT MEDV
2 0.00632 18.0 2.31 0.0 0.5379999999999999 6.575 65.2 4.09 1 296 15.3 396.9 4.98 24.0
3 0.02731 0.0 7.07 0.0 0.469 6.421 78.9 4.9671 2 242 17.8 396.9 9.14 21.6
4 0.02729 0.0 7.07 0.0 0.469 7.185 61.1 4.9671 2 242 17.8 392.83 4.03 34.7
5 0.032369999999999996 0.0 2.18 0.0 0.45799999999999996 6.997999999999999 45.8 6.0622 3 222 18.7 394.63 2.94 33.4
6 0.02985 0.0 2.18 0.0 0.45799999999999996 6.43 58.7 6.0622 3 222 18.7 394.12 5.21 28.7
7 0.14455 12.5 7.87 0.0 0.524 6.172000000000001 96.1 5.9505 5 311 15.2 396.9 19.15 27.1
8 0.21124 12.5 7.87 0.0 0.524 5.631 100.0 6.0821 5 311 15.2 386.63 29.93 16.5
9 0.22489 12.5 7.87 0.0 0.524 6.377000000000001 94.3 6.3467 5 311 15.2 392.52 20.45 15.0
10 0.11747 12.5 7.87 0.0 0.524 6.0089999999999995 82.9 6.2267 5 311 15.2 396.9 13.27 18.9
11 0.09378 12.5 7.87 0.0 0.524 5.888999999999999 39.0 5.4509 5 311 15.2 390.5 15.71 21.7
12 0.62976 0.0 8.14 0.0 0.5379999999999999 5.949 61.8 4.7075 4 307 21.0 396.9 8.26 20.4
13 0.62739 0.0 8.14 0.0 0.5379999999999999 5.834 56.5 4.4986 4 307 21.0 395.62 8.47 19.9
14 1.05393 0.0 8.14 0.0 0.5379999999999999 5.935 29.3 4.4986 4 307 21.0 386.85 6.58 23.1
15 0.7842 0.0 8.14 0.0 0.5379999999999999 5.99 81.7 4.2579 4 307 21.0 386.75 14.67 17.5
16 0.80271 0.0 8.14 0.0 0.5379999999999999 5.456 36.6 3.7965 4 307 21.0 288.99 11.69 20.2
17 0.7258 0.0 8.14 0.0 0.5379999999999999 5.727 69.5 3.7965 4 307 21.0 390.95 11.28 18.2
18 1.25179 0.0 8.14 0.0 0.5379999999999999 5.57 98.1 3.7979 4 307 21.0 376.57 21.02 13.6
19 0.8520399999999999 0.0 8.14 0.0 0.5379999999999999 5.965 89.2 4.0123 4 307 21.0 392.53 13.83 19.6
20 1.2324700000000002 0.0 8.14 0.0 0.5379999999999999 6.142 91.7 3.9769 4 307 21.0 396.9 18.72 15.2
21 0.9884299999999999 0.0 8.14 0.0 0.5379999999999999 5.813 100.0 4.0952 4 307 21.0 394.54 19.88 14.5
22 0.75026 0.0 8.14 0.0 0.5379999999999999 5.9239999999999995 94.1 4.3996 4 307 21.0 394.33 16.3 15.6
23 0.84054 0.0 8.14 0.0 0.5379999999999999 5.599 85.7 4.4546 4 307 21.0 303.42 16.51 13.9
24 0.67191 0.0 8.14 0.0 0.5379999999999999 5.813 90.3 4.6819999999999995 4 307 21.0 376.88 14.81 16.6
25 0.9557700000000001 0.0 8.14 0.0 0.5379999999999999 6.047000000000001 88.8 4.4534 4 307 21.0 306.38 17.28 14.8
26 0.77299 0.0 8.14 0.0 0.5379999999999999 6.495 94.4 4.4547 4 307 21.0 387.94 12.8 18.4
27 1.00245 0.0 8.14 0.0 0.5379999999999999 6.6739999999999995 87.3 4.239 4 307 21.0 380.23 11.98 21.0
28 1.13081 0.0 8.14 0.0 0.5379999999999999 5.712999999999999 94.1 4.2330000000000005 4 307 21.0 360.17 22.6 12.7
29 1.3547200000000001 0.0 8.14 0.0 0.5379999999999999 6.072 100.0 4.175 4 307 21.0 376.73 13.04 14.5
30 1.38799 0.0 8.14 0.0 0.5379999999999999 5.95 82.0 3.99 4 307 21.0 232.6 27.71 13.2
31 1.15172 0.0 8.14 0.0 0.5379999999999999 5.7010000000000005 95.0 3.7872 4 307 21.0 358.77 18.35 13.1
32 1.6128200000000001 0.0 8.14 0.0 0.5379999999999999 6.096 96.9 3.7598 4 307 21.0 248.31 20.34 13.5
33 0.08014 0.0 5.96 0.0 0.499 5.85 41.5 3.9342 5 279 19.2 396.9 8.77 21.0
34 0.17505 0.0 5.96 0.0 0.499 5.966 30.2 3.8473 5 279 19.2 393.43 10.13 24.7
35 0.027630000000000002 75.0 2.95 0.0 0.428 6.595 21.8 5.4011 3 252 18.3 395.63 4.32 30.8
36 0.033589999999999995 75.0 2.95 0.0 0.428 7.024 15.8 5.4011 3 252 18.3 395.62 1.98 34.9
37 0.12744 0.0 6.91 0.0 0.44799999999999995 6.77 2.9 5.7209 3 233 17.9 385.41 4.84 26.6
38 0.1415 0.0 6.91 0.0 0.44799999999999995 6.169 6.6 5.7209 3 233 17.9 383.37 5.81 25.3
39 0.12269000000000001 0.0 6.91 0.0 0.44799999999999995 6.069 40.0 5.7209 3 233 17.9 389.39 9.55 21.2
40 0.17142000000000002 0.0 6.91 0.0 0.44799999999999995 5.682 33.8 5.1004 3 233 17.9 396.9 10.21 19.3
41 0.18836 0.0 6.91 0.0 0.44799999999999995 5.7860000000000005 33.3 5.1004 3 233 17.9 396.9 14.15 20.0
42 0.25387 0.0 6.91 0.0 0.44799999999999995 5.399 95.3 5.87 3 233 17.9 396.9 30.81 14.4
43 0.21977 0.0 6.91 0.0 0.44799999999999995 5.602 62.0 6.0877 3 233 17.9 396.9 16.2 19.4
44 0.08872999999999999 21.0 5.64 0.0 0.439 5.962999999999999 45.7 6.8147 4 243 16.8 395.56 13.45 19.7
45 0.0536 21.0 5.64 0.0 0.439 6.511 21.1 6.8147 4 243 16.8 396.9 5.28 25.0
46 0.0136 75.0 4.0 0.0 0.41 5.888 47.6 7.3197 3 469 21.1 396.9 14.8 18.9
47 0.01311 90.0 1.22 0.0 0.40299999999999997 7.249 21.9 8.6966 5 226 17.9 395.93 4.81 35.4
48 0.02055 85.0 0.74 0.0 0.41 6.382999999999999 35.7 9.1876 2 313 17.3 396.9 5.77 24.7
49 0.01432 100.0 1.32 0.0 0.41100000000000003 6.816 40.5 8.3248 5 256 15.1 392.9 3.95 31.6
50 0.15445 25.0 5.13 0.0 0.45299999999999996 6.145 29.2 7.8148 8 284 19.7 390.68 6.86 23.3
51 0.10328 25.0 5.13 0.0 0.45299999999999996 5.9270000000000005 47.2 6.932 8 284 19.7 396.9 9.22 19.6
52 0.14932 25.0 5.13 0.0 0.45299999999999996 5.7410000000000005 66.2 7.2254 8 284 19.7 395.11 13.15 18.7
53 0.17171 25.0 5.13 0.0 0.45299999999999996 5.966 93.4 6.8185 8 284 19.7 378.08 14.44 16.0
54 0.11027 25.0 5.13 0.0 0.45299999999999996 6.456 67.8 7.2255 8 284 19.7 396.9 6.73 22.2
55 0.1265 25.0 5.13 0.0 0.45299999999999996 6.7620000000000005 43.4 7.9809 8 284 19.7 395.58 9.5 25.0
56 0.01951 17.5 1.38 0.0 0.4161 7.104 59.5 9.2229 3 216 18.6 393.24 8.05 33.0
57 0.03584 80.0 3.37 0.0 0.39799999999999996 6.29 17.8 6.6115 4 337 16.1 396.9 4.67 23.5
58 0.043789999999999996 80.0 3.37 0.0 0.39799999999999996 5.787000000000001 31.1 6.6115 4 337 16.1 396.9 10.24 19.4
59 0.05789 12.5 6.07 0.0 0.409 5.877999999999999 21.4 6.497999999999999 4 345 18.9 396.21 8.1 22.0
60 0.13554000000000002 12.5 6.07 0.0 0.409 5.593999999999999 36.8 6.497999999999999 4 345 18.9 396.9 13.09 17.4
61 0.12816 12.5 6.07 0.0 0.409 5.885 33.0 6.497999999999999 4 345 18.9 396.9 8.79 20.9
62 0.08826 0.0 10.81 0.0 0.413 6.417000000000001 6.6 5.2873 4 305 19.2 383.73 6.72 24.2
63 0.15875999999999998 0.0 10.81 0.0 0.413 5.961 17.5 5.2873 4 305 19.2 376.94 9.88 21.7
64 0.09164 0.0 10.81 0.0 0.413 6.065 7.8 5.2873 4 305 19.2 390.91 5.52 22.8
65 0.09512000000000001 0.0 12.83 0.0 0.43700000000000006 6.2860000000000005 45.0 4.5026 5 398 18.7 383.23 8.94 21.4
66 0.10153 0.0 12.83 0.0 0.43700000000000006 6.279 74.5 4.0522 5 398 18.7 373.66 11.97 20.0
67 0.08707000000000001 0.0 12.83 0.0 0.43700000000000006 6.14 45.8 4.0905 5 398 18.7 386.96 10.27 20.8
68 0.05646 0.0 12.83 0.0 0.43700000000000006 6.232 53.7 5.0141 5 398 18.7 386.4 12.34 21.2
69 0.04113 25.0 4.86 0.0 0.426 6.727 33.5 5.4007 4 281 19.0 396.9 5.29 28.0
70 0.04462 25.0 4.86 0.0 0.426 6.619 70.4 5.4007 4 281 19.0 395.63 7.22 23.9
71 0.03659 25.0 4.86 0.0 0.426 6.3020000000000005 32.2 5.4007 4 281 19.0 396.9 6.72 24.8
72 0.03551 25.0 4.86 0.0 0.426 6.167000000000001 46.7 5.4007 4 281 19.0 390.64 7.51 22.9
73 0.050589999999999996 0.0 4.49 0.0 0.449 6.388999999999999 48.0 4.7794 3 247 18.5 396.9 9.62 23.9
74 0.05735 0.0 4.49 0.0 0.449 6.63 56.1 4.4377 3 247 18.5 392.3 6.53 26.6
75 0.051879999999999996 0.0 4.49 0.0 0.449 6.015 45.1 4.4272 3 247 18.5 395.99 12.86 22.5
76 0.0566 0.0 3.41 0.0 0.489 7.007000000000001 86.3 3.4217 2 270 17.8 396.9 5.5 23.6
77 0.053020000000000005 0.0 3.41 0.0 0.489 7.079 63.1 3.4145 2 270 17.8 396.06 5.7 28.7
78 0.04684 0.0 3.41 0.0 0.489 6.417000000000001 66.1 3.0923 2 270 17.8 392.18 8.81 22.6
79 0.03932 0.0 3.41 0.0 0.489 6.405 73.9 3.0921 2 270 17.8 393.55 8.2 22.0
80 0.02875 28.0 15.04 0.0 0.46399999999999997 6.211 28.9 3.6659 4 270 18.2 396.33 6.21 25.0
81 0.04294 28.0 15.04 0.0 0.46399999999999997 6.249 77.3 3.615 4 270 18.2 396.9 10.59 20.6
82 0.12204000000000001 0.0 2.89 0.0 0.445 6.625 57.8 3.4952 2 276 18.0 357.98 6.65 28.4
83 0.11504 0.0 2.89 0.0 0.445 6.162999999999999 69.6 3.4952 2 276 18.0 391.83 11.34 21.4
84 0.12082999999999999 0.0 2.89 0.0 0.445 8.068999999999999 76.0 3.4952 2 276 18.0 396.9 4.21 38.7
85 0.08187 0.0 2.89 0.0 0.445 7.82 36.9 3.4952 2 276 18.0 393.53 3.57 43.8
86 0.0686 0.0 2.89 0.0 0.445 7.416 62.5 3.4952 2 276 18.0 396.9 6.19 33.2
87 0.14866 0.0 8.56 0.0 0.52 6.727 79.9 2.7778 5 384 20.9 394.76 9.42 27.5
88 0.11432 0.0 8.56 0.0 0.52 6.781000000000001 71.3 2.8561 5 384 20.9 395.58 7.67 26.5
89 0.22876 0.0 8.56 0.0 0.52 6.405 85.4 2.7147 5 384 20.9 70.8 10.63 18.6
90 0.1396 0.0 8.56 0.0 0.52 6.167000000000001 90.0 2.421 5 384 20.9 392.69 12.33 20.1
91 0.13262000000000002 0.0 8.56 0.0 0.52 5.851 96.7 2.1069 5 384 20.9 394.05 16.47 19.5
92 0.1712 0.0 8.56 0.0 0.52 5.836 91.9 2.211 5 384 20.9 395.67 18.66 19.5
93 0.13117 0.0 8.56 0.0 0.52 6.127000000000001 85.2 2.1224 5 384 20.9 387.69 14.09 20.4
94 0.12802 0.0 8.56 0.0 0.52 6.474 97.1 2.4329 5 384 20.9 395.24 12.27 19.8
95 0.26363000000000003 0.0 8.56 0.0 0.52 6.229 91.2 2.5451 5 384 20.9 391.23 15.55 19.4
96 0.10793 0.0 8.56 0.0 0.52 6.195 54.4 2.7778 5 384 20.9 393.49 13.0 21.7
97 0.10084 0.0 10.01 0.0 0.547 6.715 81.6 2.6775 6 432 17.8 395.59 10.16 22.8
98 0.12329000000000001 0.0 10.01 0.0 0.547 5.912999999999999 92.9 2.3534 6 432 17.8 394.95 16.21 18.8
99 0.22211999999999998 0.0 10.01 0.0 0.547 6.0920000000000005 95.4 2.548 6 432 17.8 396.9 17.09 18.7
100 0.14231 0.0 10.01 0.0 0.547 6.254 84.2 2.2565 6 432 17.8 388.74 10.45 18.5
101 0.15098 0.0 10.01 0.0 0.547 6.021 82.6 2.7474 6 432 17.8 394.51 10.3 19.2
102 0.06899 0.0 25.65 0.0 0.581 5.87 69.7 2.2577 2 188 19.1 389.15 14.37 22.0
103 0.07165 0.0 25.65 0.0 0.581 6.004 84.1 2.1974 2 188 19.1 377.67 14.27 20.3
104 0.09299 0.0 25.65 0.0 0.581 5.961 92.9 2.0869 2 188 19.1 378.09 17.93 20.5
105 0.09849 0.0 25.65 0.0 0.581 5.879 95.8 2.0063 2 188 19.1 379.38 17.58 18.8
106 0.16902 0.0 25.65 0.0 0.581 5.986000000000001 88.4 1.9929 2 188 19.1 385.02 14.81 21.4
107 0.25915 0.0 21.89 0.0 0.624 5.693 96.0 1.7883 4 437 21.2 392.11 17.19 16.2
108 0.32543 0.0 21.89 0.0 0.624 6.431 98.8 1.8125 4 437 21.2 396.9 15.39 18.0
109 0.88125 0.0 21.89 0.0 0.624 5.6370000000000005 94.7 1.9799 4 437 21.2 396.9 18.34 14.3
110 0.34006 0.0 21.89 0.0 0.624 6.457999999999999 98.9 2.1185 4 437 21.2 395.04 12.6 19.2
111 1.19294 0.0 21.89 0.0 0.624 6.3260000000000005 97.7 2.271 4 437 21.2 396.9 12.26 19.6
112 0.59005 0.0 21.89 0.0 0.624 6.372000000000001 97.9 2.3274 4 437 21.2 385.76 11.12 23.0
113 0.9761700000000001 0.0 21.89 0.0 0.624 5.757000000000001 98.4 2.346 4 437 21.2 262.76 17.31 15.6
114 0.55778 0.0 21.89 0.0 0.624 6.335 98.2 2.1107 4 437 21.2 394.67 16.96 18.1
115 0.32264 0.0 21.89 0.0 0.624 5.942 93.5 1.9669 4 437 21.2 378.25 16.9 17.4
116 0.35233000000000003 0.0 21.89 0.0 0.624 6.454 98.4 1.8498 4 437 21.2 394.08 14.59 17.1
117 0.54452 0.0 21.89 0.0 0.624 6.151 97.9 1.6687 4 437 21.2 396.9 18.46 17.8
118 0.2909 0.0 21.89 0.0 0.624 6.1739999999999995 93.6 1.6119 4 437 21.2 388.08 24.16 14.0
119 1.6286399999999999 0.0 21.89 0.0 0.624 5.019 100.0 1.4394 4 437 21.2 396.9 34.41 14.4
120 3.32105 0.0 19.58 1.0 0.871 5.403 100.0 1.3216 5 403 14.7 396.9 26.82 13.4
121 4.0974 0.0 19.58 0.0 0.871 5.468 100.0 1.4118 5 403 14.7 396.9 26.42 15.6
122 2.7797400000000003 0.0 19.58 0.0 0.871 4.9030000000000005 97.8 1.3459 5 403 14.7 396.9 29.29 11.8
123 2.37934 0.0 19.58 0.0 0.871 6.13 100.0 1.4191 5 403 14.7 172.91 27.8 13.8
124 2.7339700000000002 0.0 19.58 0.0 0.871 5.597 94.9 1.5257 5 403 14.7 351.85 21.45 15.4
125 1.49632 0.0 19.58 0.0 0.871 5.404 100.0 1.5916 5 403 14.7 341.6 13.28 19.6
126 2.14918 0.0 19.58 0.0 0.871 5.709 98.5 1.6232 5 403 14.7 261.95 15.79 19.4
127 1.41385 0.0 19.58 1.0 0.871 6.129 96.0 1.7494 5 403 14.7 321.02 15.12 17.0
128 2.4466799999999997 0.0 19.58 0.0 0.871 5.272 94.0 1.7364 5 403 14.7 88.63 16.14 13.1
129 1.34284 0.0 19.58 0.0 0.605 6.066 100.0 1.7573 5 403 14.7 353.89 6.43 24.3
130 1.4250200000000002 0.0 19.58 0.0 0.871 6.51 100.0 1.7659 5 403 14.7 364.31 7.39 23.3
131 1.27346 0.0 19.58 1.0 0.605 6.25 92.6 1.7984 5 403 14.7 338.92 5.5 27.0
132 1.46336 0.0 19.58 0.0 0.605 7.489 90.8 1.9709 5 403 14.7 374.43 1.73 50.0
133 1.8337700000000001 0.0 19.58 1.0 0.605 7.8020000000000005 98.2 2.0407 5 403 14.7 389.61 1.92 50.0
134 2.2423599999999997 0.0 19.58 0.0 0.605 5.854 91.8 2.4219999999999997 5 403 14.7 395.11 11.64 22.7
135 2.924 0.0 19.58 0.0 0.605 6.101 93.0 2.2834 5 403 14.7 240.16 9.81 25.0
136 2.01019 0.0 19.58 0.0 0.605 7.928999999999999 96.2 2.0459 5 403 14.7 369.3 3.7 50.0
137 2.3004 0.0 19.58 0.0 0.605 6.319 96.1 2.1 5 403 14.7 297.09 11.1 23.8
138 2.4495299999999998 0.0 19.58 0.0 0.605 6.402 95.2 2.2625 5 403 14.7 330.04 11.32 22.3
139 1.2074200000000002 0.0 19.58 0.0 0.605 5.875 94.6 2.4259 5 403 14.7 292.29 14.43 17.4
140 2.3139 0.0 19.58 0.0 0.605 5.88 97.3 2.3887 5 403 14.7 348.13 12.03 19.1
141 0.13914 0.0 4.05 0.0 0.51 5.572 88.5 2.5961 5 296 16.6 396.9 14.69 23.1
142 0.08447 0.0 4.05 0.0 0.51 5.859 68.7 2.7019 5 296 16.6 393.23 9.64 22.6
143 0.06663999999999999 0.0 4.05 0.0 0.51 6.546 33.1 3.1323 5 296 16.6 390.96 5.33 29.4
144 0.07022 0.0 4.05 0.0 0.51 6.02 47.2 3.5549 5 296 16.6 393.23 10.11 23.2
145 0.06642 0.0 4.05 0.0 0.51 6.86 74.4 2.9153 5 296 16.6 391.27 6.92 29.9
146 0.0578 0.0 2.46 0.0 0.488 6.98 58.4 2.8289999999999997 3 193 17.8 396.9 5.04 37.2
147 0.06588 0.0 2.46 0.0 0.488 7.765 83.3 2.741 3 193 17.8 395.56 7.56 39.8
148 0.06888 0.0 2.46 0.0 0.488 6.144 62.2 2.5979 3 193 17.8 396.9 9.45 36.2
149 0.09103 0.0 2.46 0.0 0.488 7.155 92.2 2.7006 3 193 17.8 394.12 4.82 37.9
150 0.08308 0.0 2.46 0.0 0.488 5.604 89.8 2.9879 3 193 17.8 391.0 13.98 26.4
151 0.06047 0.0 2.46 0.0 0.488 6.153 68.8 3.2797 3 193 17.8 387.11 13.15 29.6
152 0.07875 45.0 3.44 0.0 0.43700000000000006 6.782 41.1 3.7886 5 398 15.2 393.87 6.68 32.0
153 0.12579 45.0 3.44 0.0 0.43700000000000006 6.556 29.1 4.5667 5 398 15.2 382.84 4.56 29.8
154 0.0837 45.0 3.44 0.0 0.43700000000000006 7.185 38.9 4.5667 5 398 15.2 396.9 5.39 34.9
155 0.09068 45.0 3.44 0.0 0.43700000000000006 6.9510000000000005 21.5 6.4798 5 398 15.2 377.68 5.1 37.0
156 0.01439 60.0 2.93 0.0 0.401 6.604 18.8 6.2196 1 265 15.6 376.7 4.38 29.1
157 0.01381 80.0 0.46 0.0 0.42200000000000004 7.875 32.0 5.6484 4 255 14.4 394.23 2.97 50.0
158 0.04666 80.0 1.52 0.0 0.40399999999999997 7.107 36.6 7.309 2 329 12.6 354.31 8.61 30.3
159 0.03768 80.0 1.52 0.0 0.40399999999999997 7.274 38.3 7.309 2 329 12.6 392.2 6.62 34.6
160 0.0315 95.0 1.47 0.0 0.40299999999999997 6.975 15.3 7.6534 3 402 17.0 396.9 4.56 34.9
161 0.01778 95.0 1.47 0.0 0.40299999999999997 7.135 13.9 7.6534 3 402 17.0 384.3 4.45 32.9
162 0.03445 82.5 2.03 0.0 0.415 6.162000000000001 38.4 6.27 2 348 14.7 393.77 7.43 24.1
163 0.021769999999999998 82.5 2.03 0.0 0.415 7.61 15.7 6.27 2 348 14.7 395.38 3.11 42.3
164 0.0351 95.0 2.68 0.0 0.4161 7.853 33.2 5.118 4 224 14.7 392.78 3.81 48.5
165 0.02009 95.0 2.68 0.0 0.4161 8.033999999999999 31.9 5.118 4 224 14.7 390.55 2.88 50.0
166 0.13587 0.0 10.59 1.0 0.489 6.064 59.1 4.2392 4 277 18.6 381.32 14.66 24.4
167 0.43571000000000004 0.0 10.59 1.0 0.489 5.343999999999999 100.0 3.875 4 277 18.6 396.9 23.09 20.0
168 0.37578 0.0 10.59 1.0 0.489 5.404 88.6 3.665 4 277 18.6 395.24 23.98 19.3
169 0.21719000000000002 0.0 10.59 1.0 0.489 5.807 53.8 3.6526 4 277 18.6 390.94 16.03 22.4
170 0.14052 0.0 10.59 0.0 0.489 6.375 32.3 3.9454 4 277 18.6 385.81 9.38 28.1
171 0.28955 0.0 10.59 0.0 0.489 5.412000000000001 9.8 3.5875 4 277 18.6 348.93 29.55 23.7
172 0.0456 0.0 13.89 1.0 0.55 5.888 56.0 3.1121 5 276 16.4 392.8 13.51 23.3
173 0.07013 0.0 13.89 0.0 0.55 6.642 85.1 3.4211 5 276 16.4 392.78 9.69 28.7
174 0.11069000000000001 0.0 13.89 1.0 0.55 5.9510000000000005 93.8 2.8893 5 276 16.4 396.9 17.92 21.5
175 0.35809 0.0 6.2 1.0 0.507 6.9510000000000005 88.5 2.8617 8 307 17.4 391.7 9.71 26.7
176 0.40771 0.0 6.2 1.0 0.507 6.164 91.3 3.048 8 307 17.4 395.24 21.46 21.7
177 0.62356 0.0 6.2 1.0 0.507 6.879 77.7 3.2721 8 307 17.4 390.39 9.93 27.5
178 0.6147 0.0 6.2 0.0 0.507 6.617999999999999 80.8 3.2721 8 307 17.4 396.9 7.6 30.1
179 0.31533 0.0 6.2 0.0 0.504 8.266 78.3 2.8944 8 307 17.4 385.05 4.14 44.8
180 0.52693 0.0 6.2 0.0 0.504 8.725 83.0 2.8944 8 307 17.4 382.0 4.63 50.0
181 0.41238 0.0 6.2 0.0 0.504 7.162999999999999 79.9 3.2157 8 307 17.4 372.08 6.36 31.6
182 0.537 0.0 6.2 0.0 0.504 5.981 68.1 3.6715 8 307 17.4 378.35 11.65 24.3
183 0.46296000000000004 0.0 6.2 0.0 0.504 7.412000000000001 76.9 3.6715 8 307 17.4 376.14 5.25 31.7
184 0.57529 0.0 6.2 0.0 0.507 8.337 73.3 3.8384 8 307 17.4 385.91 2.47 41.7
185 0.44791000000000003 0.0 6.2 1.0 0.507 6.726 66.5 3.6519 8 307 17.4 360.2 8.05 29.0
186 0.33045 0.0 6.2 0.0 0.507 6.086 61.5 3.6519 8 307 17.4 376.75 10.88 24.0
187 0.51183 0.0 6.2 0.0 0.507 7.358 71.6 4.148 8 307 17.4 390.07 4.73 31.5
188 0.09252 30.0 4.93 0.0 0.428 6.606 42.2 6.1899 6 300 16.6 383.78 7.37 23.3
189 0.1029 30.0 4.93 0.0 0.428 6.358 52.9 7.0355 6 300 16.6 372.75 11.22 22.2
190 0.12757000000000002 30.0 4.93 0.0 0.428 6.393 7.8 7.0355 6 300 16.6 374.71 5.19 23.7
191 0.20608 22.0 5.86 0.0 0.431 5.593 76.5 7.9549 7 330 19.1 372.49 12.5 17.6
192 0.33983 22.0 5.86 0.0 0.431 6.108 34.9 8.0555 7 330 19.1 390.18 9.16 24.3
193 0.19657 22.0 5.86 0.0 0.431 6.226 79.2 8.0555 7 330 19.1 376.14 10.15 20.5
194 0.16439 22.0 5.86 0.0 0.431 6.433 49.1 7.8265 7 330 19.1 374.71 9.52 24.5
195 0.19072999999999998 22.0 5.86 0.0 0.431 6.718 17.5 7.8265 7 330 19.1 393.74 6.56 26.2
196 0.1403 22.0 5.86 0.0 0.431 6.487 13.0 7.3967 7 330 19.1 396.28 5.9 24.4
197 0.21409 22.0 5.86 0.0 0.431 6.438 8.9 7.3967 7 330 19.1 377.07 3.59 24.8
198 0.08221 22.0 5.86 0.0 0.431 6.957000000000001 6.8 8.9067 7 330 19.1 386.09 3.53 29.6
199 0.36894 22.0 5.86 0.0 0.431 8.259 8.4 8.9067 7 330 19.1 396.9 3.54 42.8
200 0.035480000000000005 80.0 3.64 0.0 0.392 5.876 19.1 9.2203 1 315 16.4 395.18 9.25 20.9
201 0.015380000000000001 90.0 3.75 0.0 0.39399999999999996 7.454 34.2 6.3361 3 244 15.9 386.34 3.11 44.0
202 0.61154 20.0 3.97 0.0 0.647 8.704 86.9 1.801 5 264 13.0 389.7 5.12 50.0
203 0.66351 20.0 3.97 0.0 0.647 7.332999999999999 100.0 1.8946 5 264 13.0 383.29 7.79 36.0
204 0.65665 20.0 3.97 0.0 0.647 6.8420000000000005 100.0 2.0107 5 264 13.0 391.93 6.9 30.1
205 0.5401100000000001 20.0 3.97 0.0 0.647 7.202999999999999 81.8 2.1121 5 264 13.0 392.8 9.59 33.8
206 0.5341199999999999 20.0 3.97 0.0 0.647 7.52 89.4 2.1398 5 264 13.0 388.37 7.26 43.1
207 0.82526 20.0 3.97 0.0 0.647 7.327000000000001 94.5 2.0788 5 264 13.0 393.42 11.25 31.0
208 0.55007 20.0 3.97 0.0 0.647 7.206 91.6 1.9301 5 264 13.0 387.89 8.1 36.5
209 0.76162 20.0 3.97 0.0 0.647 5.56 62.8 1.9865 5 264 13.0 392.4 10.45 22.8
210 0.57834 20.0 3.97 0.0 0.575 8.297 67.0 2.4216 5 264 13.0 384.54 7.44 50.0
211 0.5405 20.0 3.97 0.0 0.575 7.47 52.6 2.872 5 264 13.0 390.3 3.16 43.5
212 0.09065 20.0 6.96 1.0 0.46399999999999997 5.92 61.5 3.9175 3 223 18.6 391.34 13.65 20.7
213 0.29916 20.0 6.96 0.0 0.46399999999999997 5.856 42.1 4.428999999999999 3 223 18.6 388.65 13.0 21.1
214 0.1146 20.0 6.96 0.0 0.46399999999999997 6.537999999999999 58.7 3.9175 3 223 18.6 394.96 7.73 24.4
215 0.22188000000000002 20.0 6.96 1.0 0.46399999999999997 7.691 51.8 4.3665 3 223 18.6 390.77 6.58 35.2
216 0.05644 40.0 6.41 1.0 0.447 6.757999999999999 32.9 4.0776 4 254 17.6 396.9 3.53 32.4
217 0.09604 40.0 6.41 0.0 0.447 6.854 42.8 4.2673 4 254 17.6 396.9 2.98 32.0
218 0.10469 40.0 6.41 1.0 0.447 7.267 49.0 4.7872 4 254 17.6 389.25 6.05 33.2
219 0.07977999999999999 40.0 6.41 0.0 0.447 6.482 32.1 4.1403 4 254 17.6 396.9 7.19 29.1
220 0.21038 20.0 3.33 0.0 0.4429 6.812 32.2 4.1007 5 216 14.9 396.9 4.85 35.1
221 0.03578 20.0 3.33 0.0 0.4429 7.82 64.5 4.6947 5 216 14.9 387.31 3.76 45.4
222 0.06129 20.0 3.33 1.0 0.4429 7.645 49.7 5.2119 5 216 14.9 377.07 3.01 46.0
223 0.015009999999999999 90.0 1.21 1.0 0.401 7.922999999999999 24.8 5.885 1 198 13.6 395.52 3.16 50.0
224 0.009059999999999999 90.0 2.97 0.0 0.4 7.087999999999999 20.8 7.3073 1 285 15.3 394.72 7.85 32.2
225 0.01096 55.0 2.25 0.0 0.389 6.452999999999999 31.9 7.3073 1 300 15.3 394.72 8.23 22.0
226 0.03871 52.5 5.32 0.0 0.405 6.209 31.3 7.3172 6 293 16.6 396.9 7.14 23.2
227 0.04297 52.5 5.32 0.0 0.405 6.565 22.9 7.3172 6 293 16.6 371.72 9.51 24.8
228 0.035019999999999996 80.0 4.95 0.0 0.41100000000000003 6.861000000000001 27.9 5.1167 4 245 19.2 396.9 3.33 28.5
229 0.07886 80.0 4.95 0.0 0.41100000000000003 7.148 27.7 5.1167 4 245 19.2 396.9 3.56 37.3
230 0.08265 0.0 13.92 0.0 0.43700000000000006 6.127000000000001 18.4 5.5027 4 289 16.0 396.9 8.58 23.9
231 0.12932000000000002 0.0 13.92 0.0 0.43700000000000006 6.678 31.1 5.9604 4 289 16.0 396.9 6.27 28.6
232 0.053720000000000004 0.0 13.92 0.0 0.43700000000000006 6.5489999999999995 51.0 5.9604 4 289 16.0 392.85 7.39 27.1
233 0.06466000000000001 70.0 2.24 0.0 0.4 6.345 20.1 7.8278 5 358 14.8 368.24 4.97 22.5
234 0.05561 70.0 2.24 0.0 0.4 7.041 10.0 7.8278 5 358 14.8 371.58 4.74 29.0
235 0.04417 70.0 2.24 0.0 0.4 6.871 47.4 7.8278 5 358 14.8 390.86 6.07 24.8
236 0.05515 33.0 2.18 0.0 0.47200000000000003 7.236000000000001 41.1 4.022 7 222 18.4 393.68 6.93 36.1
237 0.07503 33.0 2.18 0.0 0.47200000000000003 7.42 71.9 3.0992 7 222 18.4 396.9 6.47 33.4
238 0.049319999999999996 33.0 2.18 0.0 0.47200000000000003 6.849 70.3 3.1827 7 222 18.4 396.9 7.53 28.2
239 0.49298000000000003 0.0 9.9 0.0 0.544 6.635 82.5 3.3175 4 304 18.4 396.9 4.54 22.8
240 0.3494 0.0 9.9 0.0 0.544 5.972 76.7 3.1025 4 304 18.4 396.24 9.97 20.3
241 2.63548 0.0 9.9 0.0 0.544 4.973 37.8 2.5194 4 304 18.4 350.45 12.64 16.1
242 0.7904100000000001 0.0 9.9 0.0 0.544 6.122000000000001 52.8 2.6403 4 304 18.4 396.9 5.98 22.1
243 0.26169000000000003 0.0 9.9 0.0 0.544 6.023 90.4 2.8339999999999996 4 304 18.4 396.3 11.72 19.4
244 0.26938 0.0 9.9 0.0 0.544 6.266 82.8 3.2628 4 304 18.4 393.39 7.9 21.6
245 0.3692 0.0 9.9 0.0 0.544 6.567 87.3 3.6023 4 304 18.4 395.69 9.28 23.8
246 0.25356 0.0 9.9 0.0 0.544 5.705 77.7 3.945 4 304 18.4 396.42 11.5 16.2
247 0.24522 0.0 9.9 0.0 0.544 5.782 71.7 4.0317 4 304 18.4 396.9 15.94 19.8
248 0.40202 0.0 9.9 0.0 0.544 6.382000000000001 67.2 3.5325 4 304 18.4 395.21 10.36 23.1
249 0.47547 0.0 9.9 0.0 0.544 6.1129999999999995 58.8 4.0019 4 304 18.4 396.23 12.73 21.0
250 0.1676 0.0 7.38 0.0 0.493 6.426 52.3 4.5404 5 287 19.6 396.9 7.2 23.8
251 0.18159 0.0 7.38 0.0 0.493 6.376 54.3 4.5404 5 287 19.6 396.9 6.87 23.1
252 0.35114 0.0 7.38 0.0 0.493 6.041 49.9 4.7211 5 287 19.6 396.9 7.7 20.4
253 0.28392 0.0 7.38 0.0 0.493 5.707999999999999 74.3 4.7211 5 287 19.6 391.13 11.74 18.5
254 0.34109 0.0 7.38 0.0 0.493 6.415 40.1 4.7211 5 287 19.6 396.9 6.12 25.0
255 0.19186 0.0 7.38 0.0 0.493 6.431 14.7 5.4159 5 287 19.6 393.68 5.08 24.6
256 0.30346999999999996 0.0 7.38 0.0 0.493 6.312 28.9 5.4159 5 287 19.6 396.9 6.15 23.0
257 0.24103000000000002 0.0 7.38 0.0 0.493 6.082999999999999 43.7 5.4159 5 287 19.6 396.9 12.79 22.2
258 0.06617 0.0 3.24 0.0 0.46 5.867999999999999 25.8 5.2146 4 430 16.9 382.44 9.97 19.3
259 0.06724 0.0 3.24 0.0 0.46 6.332999999999999 17.2 5.2146 4 430 16.9 375.21 7.34 22.6
260 0.050230000000000004 35.0 6.06 0.0 0.4379 5.706 28.4 6.6407 1 304 16.9 394.02 12.43 17.1
261 0.05083 0.0 5.19 0.0 0.515 6.316 38.1 6.4584 5 224 20.2 389.71 5.68 22.2
262 0.037380000000000004 0.0 5.19 0.0 0.515 6.31 38.5 6.4584 5 224 20.2 389.4 6.75 20.7
263 0.03961 0.0 5.19 0.0 0.515 6.037000000000001 34.5 5.9853 5 224 20.2 396.9 8.01 21.1
264 0.03427 0.0 5.19 0.0 0.515 5.869 46.3 5.2311 5 224 20.2 396.9 9.8 19.5
265 0.030410000000000003 0.0 5.19 0.0 0.515 5.895 59.6 5.615 5 224 20.2 394.81 10.56 18.5
266 0.03306 0.0 5.19 0.0 0.515 6.059 37.3 4.8122 5 224 20.2 396.14 8.51 20.6
267 0.054970000000000005 0.0 5.19 0.0 0.515 5.985 45.4 4.8122 5 224 20.2 396.9 9.74 19.0
268 0.06151 0.0 5.19 0.0 0.515 5.968 58.5 4.8122 5 224 20.2 396.9 9.29 18.7
269 0.013009999999999999 35.0 1.52 0.0 0.442 7.2410000000000005 49.3 7.0379 1 284 15.5 394.74 5.49 32.7
270 0.024980000000000002 0.0 1.89 0.0 0.518 6.54 59.7 6.2669 1 422 15.9 389.96 8.65 16.5
271 0.02543 55.0 3.78 0.0 0.484 6.696000000000001 56.4 5.7321 5 370 17.6 396.9 7.18 23.9
272 0.03113 0.0 4.39 0.0 0.442 6.013999999999999 48.5 8.0136 3 352 18.8 385.64 10.53 17.5
273 0.06162 0.0 4.39 0.0 0.442 5.898 52.3 8.0136 3 352 18.8 364.61 12.67 17.2
274 0.0187 85.0 4.15 0.0 0.429 6.516 27.7 8.5353 4 351 17.9 392.43 6.36 23.1
275 0.015009999999999999 80.0 2.01 0.0 0.435 6.635 29.7 8.344 4 280 17.0 390.94 5.99 24.5
276 0.0795 60.0 1.69 0.0 0.41100000000000003 6.579 35.9 10.7103 4 411 18.3 370.78 5.49 24.1
277 0.07244 60.0 1.69 0.0 0.41100000000000003 5.8839999999999995 18.5 10.7103 4 411 18.3 392.33 7.79 18.6
278 0.01709 90.0 2.02 0.0 0.41 6.728 36.1 12.1265 5 187 17.0 384.46 4.5 30.1
279 0.04301 80.0 1.91 0.0 0.413 5.662999999999999 21.9 10.5857 4 334 22.0 382.8 8.05 18.2
280 8.98296 0.0 18.1 1.0 0.77 6.212000000000001 97.4 2.1222 24 666 20.2 377.73 17.6 17.8
281 3.8497 0.0 18.1 1.0 0.77 6.395 91.0 2.5052 24 666 20.2 391.34 13.27 21.7
282 5.20177 0.0 18.1 1.0 0.77 6.127000000000001 83.4 2.7227 24 666 20.2 395.43 11.48 22.7
283 4.541919999999999 0.0 18.1 0.0 0.77 6.398 88.0 2.5182 24 666 20.2 374.56 7.79 25.0
284 3.83684 0.0 18.1 0.0 0.77 6.251 91.1 2.2955 24 666 20.2 350.65 14.19 19.9
285 3.6782199999999996 0.0 18.1 0.0 0.77 5.362 96.2 2.1036 24 666 20.2 380.79 10.19 20.8
286 4.22239 0.0 18.1 1.0 0.77 5.803 89.0 1.9047 24 666 20.2 353.04 14.64 16.8
287 3.4742800000000003 0.0 18.1 1.0 0.718 8.78 82.9 1.9047 24 666 20.2 354.55 5.29 21.9
288 4.55587 0.0 18.1 0.0 0.718 3.5610000000000004 87.9 1.6132 24 666 20.2 354.7 7.12 27.5
289 3.69695 0.0 18.1 0.0 0.718 4.963 91.4 1.7523 24 666 20.2 316.03 14.0 21.9
290 6.53876 0.0 18.1 1.0 0.631 7.016 97.5 1.2024 24 666 20.2 392.05 2.96 50.0
291 9.2323 0.0 18.1 0.0 0.631 6.216 100.0 1.1691 24 666 20.2 366.15 9.53 50.0
292 8.26725 0.0 18.1 1.0 0.6679999999999999 5.875 89.6 1.1296 24 666 20.2 347.88 8.88 50.0
293 11.1081 0.0 18.1 0.0 0.6679999999999999 4.906000000000001 100.0 1.1742 24 666 20.2 396.9 34.77 13.8
294 18.4982 0.0 18.1 0.0 0.6679999999999999 4.138 100.0 1.137 24 666 20.2 396.9 37.97 13.8
295 9.82349 0.0 18.1 0.0 0.6709999999999999 6.794 98.8 1.358 24 666 20.2 396.9 21.24 13.3
296 23.6482 0.0 18.1 0.0 0.6709999999999999 6.38 96.2 1.3861 24 666 20.2 396.9 23.69 13.1
297 17.8667 0.0 18.1 0.0 0.6709999999999999 6.223 100.0 1.3861 24 666 20.2 393.74 21.78 10.2
298 88.9762 0.0 18.1 0.0 0.6709999999999999 6.968 91.9 1.4165 24 666 20.2 396.9 17.21 10.4
299 15.8744 0.0 18.1 0.0 0.6709999999999999 6.545 99.1 1.5192 24 666 20.2 396.9 21.08 10.9
300 9.18702 0.0 18.1 0.0 0.7 5.5360000000000005 100.0 1.5804 24 666 20.2 396.9 23.6 11.3
301 20.0849 0.0 18.1 0.0 0.7 4.368 91.2 1.4395 24 666 20.2 285.83 30.63 8.8
302 16.8118 0.0 18.1 0.0 0.7 5.277 98.1 1.4261 24 666 20.2 396.9 30.81 7.2
303 24.3938 0.0 18.1 0.0 0.7 4.652 100.0 1.4672 24 666 20.2 396.9 28.28 10.5
304 22.5971 0.0 18.1 0.0 0.7 5.0 89.5 1.5184 24 666 20.2 396.9 31.99 7.4
305 8.15174 0.0 18.1 0.0 0.7 5.39 98.9 1.7281 24 666 20.2 396.9 20.85 11.5
306 6.96215 0.0 18.1 0.0 0.7 5.712999999999999 97.0 1.9265 24 666 20.2 394.43 17.11 15.1
307 5.29305 0.0 18.1 0.0 0.7 6.051 82.5 2.1678 24 666 20.2 378.38 18.76 23.2
308 11.5779 0.0 18.1 0.0 0.7 5.0360000000000005 97.0 1.77 24 666 20.2 396.9 25.68 9.7
309 8.71675 0.0 18.1 0.0 0.693 6.471 98.8 1.7257 24 666 20.2 391.98 17.12 13.1
310 5.87205 0.0 18.1 0.0 0.693 6.405 96.0 1.6768 24 666 20.2 396.9 19.37 12.5
311 7.67202 0.0 18.1 0.0 0.693 5.747000000000001 98.9 1.6334 24 666 20.2 393.1 19.92 8.5
312 38.3518 0.0 18.1 0.0 0.693 5.452999999999999 100.0 1.4896 24 666 20.2 396.9 30.59 5.0
313 9.91655 0.0 18.1 0.0 0.693 5.852 77.8 1.5004 24 666 20.2 338.16 29.97 6.3
314 25.0461 0.0 18.1 0.0 0.693 5.987 100.0 1.5888 24 666 20.2 396.9 26.77 5.6
315 9.59571 0.0 18.1 0.0 0.693 6.404 100.0 1.639 24 666 20.2 376.11 20.31 12.1
316 24.8017 0.0 18.1 0.0 0.693 5.349 96.0 1.7028 24 666 20.2 396.9 19.77 8.3
317 41.5292 0.0 18.1 0.0 0.693 5.531000000000001 85.4 1.6074 24 666 20.2 329.46 27.38 8.5
318 67.9208 0.0 18.1 0.0 0.693 5.683 100.0 1.4254 24 666 20.2 384.97 22.98 5.0
319 7.40389 0.0 18.1 0.0 0.597 5.617000000000001 97.9 1.4547 24 666 20.2 314.64 26.4 17.2
320 51.1358 0.0 18.1 0.0 0.597 5.757000000000001 100.0 1.413 24 666 20.2 2.6 10.11 15.0
321 14.0507 0.0 18.1 0.0 0.597 6.657 100.0 1.5275 24 666 20.2 35.05 21.22 17.2
322 18.811 0.0 18.1 0.0 0.597 4.628 100.0 1.5539 24 666 20.2 28.79 34.37 17.9
323 28.6558 0.0 18.1 0.0 0.597 5.155 100.0 1.5894 24 666 20.2 210.97 20.08 16.3
324 45.7461 0.0 18.1 0.0 0.693 4.519 100.0 1.6582 24 666 20.2 88.27 36.98 7.0
325 18.0846 0.0 18.1 0.0 0.679 6.434 100.0 1.8347 24 666 20.2 27.25 29.05 7.2
326 10.8342 0.0 18.1 0.0 0.679 6.782 90.8 1.8195 24 666 20.2 21.57 25.79 7.5
327 25.9406 0.0 18.1 0.0 0.679 5.303999999999999 89.1 1.6475 24 666 20.2 127.36 26.64 10.4
328 73.5341 0.0 18.1 0.0 0.679 5.957000000000001 100.0 1.8026 24 666 20.2 16.45 20.62 8.8
329 11.8123 0.0 18.1 0.0 0.718 6.824 76.5 1.794 24 666 20.2 48.45 22.74 8.4
330 11.0874 0.0 18.1 0.0 0.718 6.4110000000000005 100.0 1.8589 24 666 20.2 318.75 15.02 16.7
331 7.022589999999999 0.0 18.1 0.0 0.718 6.006 95.3 1.8746 24 666 20.2 319.98 15.7 14.2
332 12.0482 0.0 18.1 0.0 0.614 5.648 87.6 1.9512 24 666 20.2 291.55 14.1 20.8
333 8.792119999999999 0.0 18.1 0.0 0.584 5.565 70.6 2.0635 24 666 20.2 3.65 17.16 11.7
334 15.8603 0.0 18.1 0.0 0.679 5.896 95.4 1.9096 24 666 20.2 7.68 24.39 8.3
335 7.36711 0.0 18.1 0.0 0.679 6.193 78.1 1.9356 24 666 20.2 96.73 21.52 11.0
336 10.0623 0.0 18.1 0.0 0.584 6.832999999999999 94.3 2.0882 24 666 20.2 81.33 19.69 14.1
337 6.44405 0.0 18.1 0.0 0.584 6.425 74.8 2.2004 24 666 20.2 97.95 12.03 16.1
338 5.5810699999999995 0.0 18.1 0.0 0.713 6.436 87.9 2.3158 24 666 20.2 100.19 16.22 14.3
339 13.9134 0.0 18.1 0.0 0.713 6.207999999999999 95.0 2.2222 24 666 20.2 100.63 15.17 11.7
340 11.1604 0.0 18.1 0.0 0.74 6.629 94.6 2.1247 24 666 20.2 109.85 23.27 13.4
341 14.4208 0.0 18.1 0.0 0.74 6.461 93.3 2.0026 24 666 20.2 27.49 18.05 9.6
342 15.1772 0.0 18.1 0.0 0.74 6.152 100.0 1.9142 24 666 20.2 9.32 26.45 8.7
343 13.6781 0.0 18.1 0.0 0.74 5.935 87.9 1.8206 24 666 20.2 68.95 34.02 8.4
344 9.39063 0.0 18.1 0.0 0.74 5.627000000000001 93.9 1.8172 24 666 20.2 396.9 22.88 12.8
345 9.96654 0.0 18.1 0.0 0.74 6.485 100.0 1.9784 24 666 20.2 386.73 18.85 15.4
346 12.8023 0.0 18.1 0.0 0.74 5.854 96.6 1.8956 24 666 20.2 240.52 23.79 10.8
347 10.6718 0.0 18.1 0.0 0.74 6.459 94.8 1.9879 24 666 20.2 43.06 23.98 11.8
348 6.288069999999999 0.0 18.1 0.0 0.74 6.341 96.4 2.072 24 666 20.2 318.01 17.79 14.9
349 9.92485 0.0 18.1 0.0 0.74 6.251 96.6 2.198 24 666 20.2 388.52 16.44 12.6
350 9.329089999999999 0.0 18.1 0.0 0.713 6.185 98.7 2.2616 24 666 20.2 396.9 18.13 14.1
351 7.52601 0.0 18.1 0.0 0.713 6.417000000000001 98.3 2.185 24 666 20.2 304.21 19.31 13.0
352 5.09017 0.0 18.1 0.0 0.713 6.297000000000001 91.8 2.3682 24 666 20.2 385.09 17.27 16.1
353 9.513630000000001 0.0 18.1 0.0 0.713 6.728 94.1 2.4961 24 666 20.2 6.68 18.71 14.9
354 4.75237 0.0 18.1 0.0 0.713 6.525 86.5 2.4358 24 666 20.2 50.92 18.13 14.1
355 4.668830000000001 0.0 18.1 0.0 0.713 5.976 87.9 2.5806 24 666 20.2 10.48 19.01 12.7
356 8.20058 0.0 18.1 0.0 0.713 5.936 80.3 2.7792 24 666 20.2 3.5 16.94 13.5
357 6.80117 0.0 18.1 0.0 0.713 6.081 84.4 2.7175 24 666 20.2 396.9 14.7 20.0
358 3.69311 0.0 18.1 0.0 0.713 6.376 88.4 2.5671 24 666 20.2 391.43 14.65 17.7
359 6.65492 0.0 18.1 0.0 0.713 6.317 83.0 2.7344 24 666 20.2 396.9 13.99 19.5
360 5.82115 0.0 18.1 0.0 0.713 6.513 89.9 2.8016 24 666 20.2 393.82 10.29 20.2
361 7.83932 0.0 18.1 0.0 0.655 6.209 65.4 2.9634 24 666 20.2 396.9 13.22 21.4
362 4.422280000000001 0.0 18.1 0.0 0.584 6.002999999999999 94.5 2.5403 24 666 20.2 331.29 21.32 19.1
363 15.5757 0.0 18.1 0.0 0.58 5.926 71.0 2.9084 24 666 20.2 368.74 18.13 19.1
364 13.0751 0.0 18.1 0.0 0.58 5.712999999999999 56.7 2.8237 24 666 20.2 396.9 14.76 20.1
365 4.34879 0.0 18.1 0.0 0.58 6.167000000000001 84.0 3.0334 24 666 20.2 396.9 16.29 19.9
366 4.03841 0.0 18.1 0.0 0.532 6.229 90.7 3.0993 24 666 20.2 395.33 12.87 19.6
367 3.56868 0.0 18.1 0.0 0.58 6.437 75.0 2.8965 24 666 20.2 393.37 14.36 23.2
368 8.05579 0.0 18.1 0.0 0.584 5.4270000000000005 95.4 2.4298 24 666 20.2 352.58 18.14 13.8
369 6.39312 0.0 18.1 0.0 0.584 6.162000000000001 97.4 2.206 24 666 20.2 302.76 24.1 13.3
370 4.87141 0.0 18.1 0.0 0.614 6.484 93.6 2.3053 24 666 20.2 396.21 18.68 16.7
371 15.0234 0.0 18.1 0.0 0.614 5.303999999999999 97.3 2.1007 24 666 20.2 349.48 24.91 12.0
372 10.232999999999999 0.0 18.1 0.0 0.614 6.185 96.7 2.1705 24 666 20.2 379.7 18.03 14.6
373 5.8240099999999995 0.0 18.1 0.0 0.532 6.242000000000001 64.7 3.4242 24 666 20.2 396.9 10.74 23.0
374 5.7081800000000005 0.0 18.1 0.0 0.532 6.75 74.9 3.3317 24 666 20.2 393.07 7.74 23.7
375 2.81838 0.0 18.1 0.0 0.532 5.7620000000000005 40.3 4.0983 24 666 20.2 392.92 10.42 21.8
376 2.37857 0.0 18.1 0.0 0.583 5.871 41.9 3.7239999999999998 24 666 20.2 370.73 13.34 20.6
377 3.6736699999999995 0.0 18.1 0.0 0.583 6.312 51.9 3.9917 24 666 20.2 388.62 10.58 21.2
378 5.69175 0.0 18.1 0.0 0.583 6.114 79.8 3.5459 24 666 20.2 392.68 14.98 19.1
379 4.8356699999999995 0.0 18.1 0.0 0.583 5.905 53.2 3.1523 24 666 20.2 388.22 11.45 20.6
380 0.15086 0.0 27.74 0.0 0.609 5.454 92.7 1.8209 4 711 20.1 395.09 18.06 15.2
381 0.18337 0.0 27.74 0.0 0.609 5.414 98.3 1.7554 4 711 20.1 344.05 23.97 7.0
382 0.20745999999999998 0.0 27.74 0.0 0.609 5.093 98.0 1.8226 4 711 20.1 318.43 29.68 8.1
383 0.10574000000000001 0.0 27.74 0.0 0.609 5.983 98.8 1.8681 4 711 20.1 390.11 18.07 13.6
384 0.11132 0.0 27.74 0.0 0.609 5.983 83.5 2.1099 4 711 20.1 396.9 13.35 20.1
385 0.17331 0.0 9.69 0.0 0.585 5.707000000000001 54.0 2.3817 6 391 19.2 396.9 12.01 21.8
386 0.27957 0.0 9.69 0.0 0.585 5.926 42.6 2.3817 6 391 19.2 396.9 13.59 24.5
387 0.17899 0.0 9.69 0.0 0.585 5.67 28.8 2.7986 6 391 19.2 393.29 17.6 23.1
388 0.2896 0.0 9.69 0.0 0.585 5.39 72.9 2.7986 6 391 19.2 396.9 21.14 19.7
389 0.26838 0.0 9.69 0.0 0.585 5.794 70.6 2.8927 6 391 19.2 396.9 14.1 18.3
390 0.23911999999999997 0.0 9.69 0.0 0.585 6.019 65.3 2.4091 6 391 19.2 396.9 12.92 21.2
391 0.17783 0.0 9.69 0.0 0.585 5.569 73.5 2.3999 6 391 19.2 395.77 15.1 17.5
392 0.22438000000000002 0.0 9.69 0.0 0.585 6.027 79.7 2.4982 6 391 19.2 396.9 14.33 16.8
393 0.04527 0.0 11.93 0.0 0.573 6.12 76.7 2.2875 1 273 21.0 396.9 9.08 20.6
394 0.06076 0.0 11.93 0.0 0.573 6.976 91.0 2.1675 1 273 21.0 396.9 5.64 23.9
395 0.10959 0.0 11.93 0.0 0.573 6.794 89.3 2.3889 1 273 21.0 393.45 6.48 22.0

Просмотреть файл

@ -1,151 +0,0 @@
sepal_length,sepal_width,petal_length,petal_width,species
5.1,3.5,1.4,0.2,Iris-setosa
4.9,3.0,1.4,0.2,Iris-setosa
4.7,3.2,1.3,0.2,Iris-setosa
4.6,3.1,1.5,0.2,Iris-setosa
5.0,3.6,1.4,0.2,Iris-setosa
5.4,3.9,1.7,0.4,Iris-setosa
4.6,3.4,1.4,0.3,Iris-setosa
5.0,3.4,1.5,0.2,Iris-setosa
4.4,2.9,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5.4,3.7,1.5,0.2,Iris-setosa
4.8,3.4,1.6,0.2,Iris-setosa
4.8,3.0,1.4,0.1,Iris-setosa
4.3,3.0,1.1,0.1,Iris-setosa
5.8,4.0,1.2,0.2,Iris-setosa
5.7,4.4,1.5,0.4,Iris-setosa
5.4,3.9,1.3,0.4,Iris-setosa
5.1,3.5,1.4,0.3,Iris-setosa
5.7,3.8,1.7,0.3,Iris-setosa
5.1,3.8,1.5,0.3,Iris-setosa
5.4,3.4,1.7,0.2,Iris-setosa
5.1,3.7,1.5,0.4,Iris-setosa
4.6,3.6,1.0,0.2,Iris-setosa
5.1,3.3,1.7,0.5,Iris-setosa
4.8,3.4,1.9,0.2,Iris-setosa
5.0,3.0,1.6,0.2,Iris-setosa
5.0,3.4,1.6,0.4,Iris-setosa
5.2,3.5,1.5,0.2,Iris-setosa
5.2,3.4,1.4,0.2,Iris-setosa
4.7,3.2,1.6,0.2,Iris-setosa
4.8,3.1,1.6,0.2,Iris-setosa
5.4,3.4,1.5,0.4,Iris-setosa
5.2,4.1,1.5,0.1,Iris-setosa
5.5,4.2,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5.0,3.2,1.2,0.2,Iris-setosa
5.5,3.5,1.3,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
4.4,3.0,1.3,0.2,Iris-setosa
5.1,3.4,1.5,0.2,Iris-setosa
5.0,3.5,1.3,0.3,Iris-setosa
4.5,2.3,1.3,0.3,Iris-setosa
4.4,3.2,1.3,0.2,Iris-setosa
5.0,3.5,1.6,0.6,Iris-setosa
5.1,3.8,1.9,0.4,Iris-setosa
4.8,3.0,1.4,0.3,Iris-setosa
5.1,3.8,1.6,0.2,Iris-setosa
4.6,3.2,1.4,0.2,Iris-setosa
5.3,3.7,1.5,0.2,Iris-setosa
5.0,3.3,1.4,0.2,Iris-setosa
7.0,3.2,4.7,1.4,Iris-versicolor
6.4,3.2,4.5,1.5,Iris-versicolor
6.9,3.1,4.9,1.5,Iris-versicolor
5.5,2.3,4.0,1.3,Iris-versicolor
6.5,2.8,4.6,1.5,Iris-versicolor
5.7,2.8,4.5,1.3,Iris-versicolor
6.3,3.3,4.7,1.6,Iris-versicolor
4.9,2.4,3.3,1.0,Iris-versicolor
6.6,2.9,4.6,1.3,Iris-versicolor
5.2,2.7,3.9,1.4,Iris-versicolor
5.0,2.0,3.5,1.0,Iris-versicolor
5.9,3.0,4.2,1.5,Iris-versicolor
6.0,2.2,4.0,1.0,Iris-versicolor
6.1,2.9,4.7,1.4,Iris-versicolor
5.6,2.9,3.6,1.3,Iris-versicolor
6.7,3.1,4.4,1.4,Iris-versicolor
5.6,3.0,4.5,1.5,Iris-versicolor
5.8,2.7,4.1,1.0,Iris-versicolor
6.2,2.2,4.5,1.5,Iris-versicolor
5.6,2.5,3.9,1.1,Iris-versicolor
5.9,3.2,4.8,1.8,Iris-versicolor
6.1,2.8,4.0,1.3,Iris-versicolor
6.3,2.5,4.9,1.5,Iris-versicolor
6.1,2.8,4.7,1.2,Iris-versicolor
6.4,2.9,4.3,1.3,Iris-versicolor
6.6,3.0,4.4,1.4,Iris-versicolor
6.8,2.8,4.8,1.4,Iris-versicolor
6.7,3.0,5.0,1.7,Iris-versicolor
6.0,2.9,4.5,1.5,Iris-versicolor
5.7,2.6,3.5,1.0,Iris-versicolor
5.5,2.4,3.8,1.1,Iris-versicolor
5.5,2.4,3.7,1.0,Iris-versicolor
5.8,2.7,3.9,1.2,Iris-versicolor
6.0,2.7,5.1,1.6,Iris-versicolor
5.4,3.0,4.5,1.5,Iris-versicolor
6.0,3.4,4.5,1.6,Iris-versicolor
6.7,3.1,4.7,1.5,Iris-versicolor
6.3,2.3,4.4,1.3,Iris-versicolor
5.6,3.0,4.1,1.3,Iris-versicolor
5.5,2.5,4.0,1.3,Iris-versicolor
5.5,2.6,4.4,1.2,Iris-versicolor
6.1,3.0,4.6,1.4,Iris-versicolor
5.8,2.6,4.0,1.2,Iris-versicolor
5.0,2.3,3.3,1.0,Iris-versicolor
5.6,2.7,4.2,1.3,Iris-versicolor
5.7,3.0,4.2,1.2,Iris-versicolor
5.7,2.9,4.2,1.3,Iris-versicolor
6.2,2.9,4.3,1.3,Iris-versicolor
5.1,2.5,3.0,1.1,Iris-versicolor
5.7,2.8,4.1,1.3,Iris-versicolor
6.3,3.3,6.0,2.5,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
7.1,3.0,5.9,2.1,Iris-virginica
6.3,2.9,5.6,1.8,Iris-virginica
6.5,3.0,5.8,2.2,Iris-virginica
7.6,3.0,6.6,2.1,Iris-virginica
4.9,2.5,4.5,1.7,Iris-virginica
7.3,2.9,6.3,1.8,Iris-virginica
6.7,2.5,5.8,1.8,Iris-virginica
7.2,3.6,6.1,2.5,Iris-virginica
6.5,3.2,5.1,2.0,Iris-virginica
6.4,2.7,5.3,1.9,Iris-virginica
6.8,3.0,5.5,2.1,Iris-virginica
5.7,2.5,5.0,2.0,Iris-virginica
5.8,2.8,5.1,2.4,Iris-virginica
6.4,3.2,5.3,2.3,Iris-virginica
6.5,3.0,5.5,1.8,Iris-virginica
7.7,3.8,6.7,2.2,Iris-virginica
7.7,2.6,6.9,2.3,Iris-virginica
6.0,2.2,5.0,1.5,Iris-virginica
6.9,3.2,5.7,2.3,Iris-virginica
5.6,2.8,4.9,2.0,Iris-virginica
7.7,2.8,6.7,2.0,Iris-virginica
6.3,2.7,4.9,1.8,Iris-virginica
6.7,3.3,5.7,2.1,Iris-virginica
7.2,3.2,6.0,1.8,Iris-virginica
6.2,2.8,4.8,1.8,Iris-virginica
6.1,3.0,4.9,1.8,Iris-virginica
6.4,2.8,5.6,2.1,Iris-virginica
7.2,3.0,5.8,1.6,Iris-virginica
7.4,2.8,6.1,1.9,Iris-virginica
7.9,3.8,6.4,2.0,Iris-virginica
6.4,2.8,5.6,2.2,Iris-virginica
6.3,2.8,5.1,1.5,Iris-virginica
6.1,2.6,5.6,1.4,Iris-virginica
7.7,3.0,6.1,2.3,Iris-virginica
6.3,3.4,5.6,2.4,Iris-virginica
6.4,3.1,5.5,1.8,Iris-virginica
6.0,3.0,4.8,1.8,Iris-virginica
6.9,3.1,5.4,2.1,Iris-virginica
6.7,3.1,5.6,2.4,Iris-virginica
6.9,3.1,5.1,2.3,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
6.8,3.2,5.9,2.3,Iris-virginica
6.7,3.3,5.7,2.5,Iris-virginica
6.7,3.0,5.2,2.3,Iris-virginica
6.3,2.5,5.0,1.9,Iris-virginica
6.5,3.0,5.2,2.0,Iris-virginica
6.2,3.4,5.4,2.3,Iris-virginica
5.9,3.0,5.1,1.8,Iris-virginica
1 sepal_length sepal_width petal_length petal_width species
2 5.1 3.5 1.4 0.2 Iris-setosa
3 4.9 3.0 1.4 0.2 Iris-setosa
4 4.7 3.2 1.3 0.2 Iris-setosa
5 4.6 3.1 1.5 0.2 Iris-setosa
6 5.0 3.6 1.4 0.2 Iris-setosa
7 5.4 3.9 1.7 0.4 Iris-setosa
8 4.6 3.4 1.4 0.3 Iris-setosa
9 5.0 3.4 1.5 0.2 Iris-setosa
10 4.4 2.9 1.4 0.2 Iris-setosa
11 4.9 3.1 1.5 0.1 Iris-setosa
12 5.4 3.7 1.5 0.2 Iris-setosa
13 4.8 3.4 1.6 0.2 Iris-setosa
14 4.8 3.0 1.4 0.1 Iris-setosa
15 4.3 3.0 1.1 0.1 Iris-setosa
16 5.8 4.0 1.2 0.2 Iris-setosa
17 5.7 4.4 1.5 0.4 Iris-setosa
18 5.4 3.9 1.3 0.4 Iris-setosa
19 5.1 3.5 1.4 0.3 Iris-setosa
20 5.7 3.8 1.7 0.3 Iris-setosa
21 5.1 3.8 1.5 0.3 Iris-setosa
22 5.4 3.4 1.7 0.2 Iris-setosa
23 5.1 3.7 1.5 0.4 Iris-setosa
24 4.6 3.6 1.0 0.2 Iris-setosa
25 5.1 3.3 1.7 0.5 Iris-setosa
26 4.8 3.4 1.9 0.2 Iris-setosa
27 5.0 3.0 1.6 0.2 Iris-setosa
28 5.0 3.4 1.6 0.4 Iris-setosa
29 5.2 3.5 1.5 0.2 Iris-setosa
30 5.2 3.4 1.4 0.2 Iris-setosa
31 4.7 3.2 1.6 0.2 Iris-setosa
32 4.8 3.1 1.6 0.2 Iris-setosa
33 5.4 3.4 1.5 0.4 Iris-setosa
34 5.2 4.1 1.5 0.1 Iris-setosa
35 5.5 4.2 1.4 0.2 Iris-setosa
36 4.9 3.1 1.5 0.1 Iris-setosa
37 5.0 3.2 1.2 0.2 Iris-setosa
38 5.5 3.5 1.3 0.2 Iris-setosa
39 4.9 3.1 1.5 0.1 Iris-setosa
40 4.4 3.0 1.3 0.2 Iris-setosa
41 5.1 3.4 1.5 0.2 Iris-setosa
42 5.0 3.5 1.3 0.3 Iris-setosa
43 4.5 2.3 1.3 0.3 Iris-setosa
44 4.4 3.2 1.3 0.2 Iris-setosa
45 5.0 3.5 1.6 0.6 Iris-setosa
46 5.1 3.8 1.9 0.4 Iris-setosa
47 4.8 3.0 1.4 0.3 Iris-setosa
48 5.1 3.8 1.6 0.2 Iris-setosa
49 4.6 3.2 1.4 0.2 Iris-setosa
50 5.3 3.7 1.5 0.2 Iris-setosa
51 5.0 3.3 1.4 0.2 Iris-setosa
52 7.0 3.2 4.7 1.4 Iris-versicolor
53 6.4 3.2 4.5 1.5 Iris-versicolor
54 6.9 3.1 4.9 1.5 Iris-versicolor
55 5.5 2.3 4.0 1.3 Iris-versicolor
56 6.5 2.8 4.6 1.5 Iris-versicolor
57 5.7 2.8 4.5 1.3 Iris-versicolor
58 6.3 3.3 4.7 1.6 Iris-versicolor
59 4.9 2.4 3.3 1.0 Iris-versicolor
60 6.6 2.9 4.6 1.3 Iris-versicolor
61 5.2 2.7 3.9 1.4 Iris-versicolor
62 5.0 2.0 3.5 1.0 Iris-versicolor
63 5.9 3.0 4.2 1.5 Iris-versicolor
64 6.0 2.2 4.0 1.0 Iris-versicolor
65 6.1 2.9 4.7 1.4 Iris-versicolor
66 5.6 2.9 3.6 1.3 Iris-versicolor
67 6.7 3.1 4.4 1.4 Iris-versicolor
68 5.6 3.0 4.5 1.5 Iris-versicolor
69 5.8 2.7 4.1 1.0 Iris-versicolor
70 6.2 2.2 4.5 1.5 Iris-versicolor
71 5.6 2.5 3.9 1.1 Iris-versicolor
72 5.9 3.2 4.8 1.8 Iris-versicolor
73 6.1 2.8 4.0 1.3 Iris-versicolor
74 6.3 2.5 4.9 1.5 Iris-versicolor
75 6.1 2.8 4.7 1.2 Iris-versicolor
76 6.4 2.9 4.3 1.3 Iris-versicolor
77 6.6 3.0 4.4 1.4 Iris-versicolor
78 6.8 2.8 4.8 1.4 Iris-versicolor
79 6.7 3.0 5.0 1.7 Iris-versicolor
80 6.0 2.9 4.5 1.5 Iris-versicolor
81 5.7 2.6 3.5 1.0 Iris-versicolor
82 5.5 2.4 3.8 1.1 Iris-versicolor
83 5.5 2.4 3.7 1.0 Iris-versicolor
84 5.8 2.7 3.9 1.2 Iris-versicolor
85 6.0 2.7 5.1 1.6 Iris-versicolor
86 5.4 3.0 4.5 1.5 Iris-versicolor
87 6.0 3.4 4.5 1.6 Iris-versicolor
88 6.7 3.1 4.7 1.5 Iris-versicolor
89 6.3 2.3 4.4 1.3 Iris-versicolor
90 5.6 3.0 4.1 1.3 Iris-versicolor
91 5.5 2.5 4.0 1.3 Iris-versicolor
92 5.5 2.6 4.4 1.2 Iris-versicolor
93 6.1 3.0 4.6 1.4 Iris-versicolor
94 5.8 2.6 4.0 1.2 Iris-versicolor
95 5.0 2.3 3.3 1.0 Iris-versicolor
96 5.6 2.7 4.2 1.3 Iris-versicolor
97 5.7 3.0 4.2 1.2 Iris-versicolor
98 5.7 2.9 4.2 1.3 Iris-versicolor
99 6.2 2.9 4.3 1.3 Iris-versicolor
100 5.1 2.5 3.0 1.1 Iris-versicolor
101 5.7 2.8 4.1 1.3 Iris-versicolor
102 6.3 3.3 6.0 2.5 Iris-virginica
103 5.8 2.7 5.1 1.9 Iris-virginica
104 7.1 3.0 5.9 2.1 Iris-virginica
105 6.3 2.9 5.6 1.8 Iris-virginica
106 6.5 3.0 5.8 2.2 Iris-virginica
107 7.6 3.0 6.6 2.1 Iris-virginica
108 4.9 2.5 4.5 1.7 Iris-virginica
109 7.3 2.9 6.3 1.8 Iris-virginica
110 6.7 2.5 5.8 1.8 Iris-virginica
111 7.2 3.6 6.1 2.5 Iris-virginica
112 6.5 3.2 5.1 2.0 Iris-virginica
113 6.4 2.7 5.3 1.9 Iris-virginica
114 6.8 3.0 5.5 2.1 Iris-virginica
115 5.7 2.5 5.0 2.0 Iris-virginica
116 5.8 2.8 5.1 2.4 Iris-virginica
117 6.4 3.2 5.3 2.3 Iris-virginica
118 6.5 3.0 5.5 1.8 Iris-virginica
119 7.7 3.8 6.7 2.2 Iris-virginica
120 7.7 2.6 6.9 2.3 Iris-virginica
121 6.0 2.2 5.0 1.5 Iris-virginica
122 6.9 3.2 5.7 2.3 Iris-virginica
123 5.6 2.8 4.9 2.0 Iris-virginica
124 7.7 2.8 6.7 2.0 Iris-virginica
125 6.3 2.7 4.9 1.8 Iris-virginica
126 6.7 3.3 5.7 2.1 Iris-virginica
127 7.2 3.2 6.0 1.8 Iris-virginica
128 6.2 2.8 4.8 1.8 Iris-virginica
129 6.1 3.0 4.9 1.8 Iris-virginica
130 6.4 2.8 5.6 2.1 Iris-virginica
131 7.2 3.0 5.8 1.6 Iris-virginica
132 7.4 2.8 6.1 1.9 Iris-virginica
133 7.9 3.8 6.4 2.0 Iris-virginica
134 6.4 2.8 5.6 2.2 Iris-virginica
135 6.3 2.8 5.1 1.5 Iris-virginica
136 6.1 2.6 5.6 1.4 Iris-virginica
137 7.7 3.0 6.1 2.3 Iris-virginica
138 6.3 3.4 5.6 2.4 Iris-virginica
139 6.4 3.1 5.5 1.8 Iris-virginica
140 6.0 3.0 4.8 1.8 Iris-virginica
141 6.9 3.1 5.4 2.1 Iris-virginica
142 6.7 3.1 5.6 2.4 Iris-virginica
143 6.9 3.1 5.1 2.3 Iris-virginica
144 5.8 2.7 5.1 1.9 Iris-virginica
145 6.8 3.2 5.9 2.3 Iris-virginica
146 6.7 3.3 5.7 2.5 Iris-virginica
147 6.7 3.0 5.2 2.3 Iris-virginica
148 6.3 2.5 5.0 1.9 Iris-virginica
149 6.5 3.0 5.2 2.0 Iris-virginica
150 6.2 3.4 5.4 2.3 Iris-virginica
151 5.9 3.0 5.1 1.8 Iris-virginica

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,769 +0,0 @@
Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome
6,148,72,35,0,33.6,0.627,50,1
1,85,66,29,0,26.6,0.35100000000000003,31,0
8,183,64,0,0,23.3,0.672,32,1
1,89,66,23,94,28.1,0.16699999999999998,21,0
0,137,40,35,168,43.1,2.2880000000000003,33,1
5,116,74,0,0,25.6,0.201,30,0
3,78,50,32,88,31.0,0.248,26,1
10,115,0,0,0,35.3,0.134,29,0
2,197,70,45,543,30.5,0.158,53,1
8,125,96,0,0,0.0,0.23199999999999998,54,1
4,110,92,0,0,37.6,0.191,30,0
10,168,74,0,0,38.0,0.537,34,1
10,139,80,0,0,27.1,1.4409999999999998,57,0
1,189,60,23,846,30.1,0.39799999999999996,59,1
5,166,72,19,175,25.8,0.5870000000000001,51,1
7,100,0,0,0,30.0,0.484,32,1
0,118,84,47,230,45.8,0.551,31,1
7,107,74,0,0,29.6,0.254,31,1
1,103,30,38,83,43.3,0.183,33,0
1,115,70,30,96,34.6,0.529,32,1
3,126,88,41,235,39.3,0.7040000000000001,27,0
8,99,84,0,0,35.4,0.38799999999999996,50,0
7,196,90,0,0,39.8,0.451,41,1
9,119,80,35,0,29.0,0.263,29,1
11,143,94,33,146,36.6,0.254,51,1
10,125,70,26,115,31.1,0.205,41,1
7,147,76,0,0,39.4,0.257,43,1
1,97,66,15,140,23.2,0.48700000000000004,22,0
13,145,82,19,110,22.2,0.245,57,0
5,117,92,0,0,34.1,0.337,38,0
5,109,75,26,0,36.0,0.546,60,0
3,158,76,36,245,31.6,0.851,28,1
3,88,58,11,54,24.8,0.267,22,0
6,92,92,0,0,19.9,0.188,28,0
10,122,78,31,0,27.6,0.512,45,0
4,103,60,33,192,24.0,0.966,33,0
11,138,76,0,0,33.2,0.42,35,0
9,102,76,37,0,32.9,0.665,46,1
2,90,68,42,0,38.2,0.503,27,1
4,111,72,47,207,37.1,1.39,56,1
3,180,64,25,70,34.0,0.271,26,0
7,133,84,0,0,40.2,0.696,37,0
7,106,92,18,0,22.7,0.235,48,0
9,171,110,24,240,45.4,0.721,54,1
7,159,64,0,0,27.4,0.294,40,0
0,180,66,39,0,42.0,1.893,25,1
1,146,56,0,0,29.7,0.564,29,0
2,71,70,27,0,28.0,0.586,22,0
7,103,66,32,0,39.1,0.344,31,1
7,105,0,0,0,0.0,0.305,24,0
1,103,80,11,82,19.4,0.491,22,0
1,101,50,15,36,24.2,0.526,26,0
5,88,66,21,23,24.4,0.342,30,0
8,176,90,34,300,33.7,0.467,58,1
7,150,66,42,342,34.7,0.718,42,0
1,73,50,10,0,23.0,0.248,21,0
7,187,68,39,304,37.7,0.254,41,1
0,100,88,60,110,46.8,0.9620000000000001,31,0
0,146,82,0,0,40.5,1.781,44,0
0,105,64,41,142,41.5,0.17300000000000001,22,0
2,84,0,0,0,0.0,0.304,21,0
8,133,72,0,0,32.9,0.27,39,1
5,44,62,0,0,25.0,0.5870000000000001,36,0
2,141,58,34,128,25.4,0.6990000000000001,24,0
7,114,66,0,0,32.8,0.258,42,1
5,99,74,27,0,29.0,0.203,32,0
0,109,88,30,0,32.5,0.855,38,1
2,109,92,0,0,42.7,0.845,54,0
1,95,66,13,38,19.6,0.33399999999999996,25,0
4,146,85,27,100,28.9,0.18899999999999997,27,0
2,100,66,20,90,32.9,0.867,28,1
5,139,64,35,140,28.6,0.41100000000000003,26,0
13,126,90,0,0,43.4,0.583,42,1
4,129,86,20,270,35.1,0.231,23,0
1,79,75,30,0,32.0,0.396,22,0
1,0,48,20,0,24.7,0.14,22,0
7,62,78,0,0,32.6,0.391,41,0
5,95,72,33,0,37.7,0.37,27,0
0,131,0,0,0,43.2,0.27,26,1
2,112,66,22,0,25.0,0.307,24,0
3,113,44,13,0,22.4,0.14,22,0
2,74,0,0,0,0.0,0.102,22,0
7,83,78,26,71,29.3,0.767,36,0
0,101,65,28,0,24.6,0.237,22,0
5,137,108,0,0,48.8,0.22699999999999998,37,1
2,110,74,29,125,32.4,0.698,27,0
13,106,72,54,0,36.6,0.17800000000000002,45,0
2,100,68,25,71,38.5,0.324,26,0
15,136,70,32,110,37.1,0.153,43,1
1,107,68,19,0,26.5,0.165,24,0
1,80,55,0,0,19.1,0.258,21,0
4,123,80,15,176,32.0,0.44299999999999995,34,0
7,81,78,40,48,46.7,0.261,42,0
4,134,72,0,0,23.8,0.27699999999999997,60,1
2,142,82,18,64,24.7,0.7609999999999999,21,0
6,144,72,27,228,33.9,0.255,40,0
2,92,62,28,0,31.6,0.13,24,0
1,71,48,18,76,20.4,0.32299999999999995,22,0
6,93,50,30,64,28.7,0.35600000000000004,23,0
1,122,90,51,220,49.7,0.325,31,1
1,163,72,0,0,39.0,1.222,33,1
1,151,60,0,0,26.1,0.179,22,0
0,125,96,0,0,22.5,0.262,21,0
1,81,72,18,40,26.6,0.28300000000000003,24,0
2,85,65,0,0,39.6,0.93,27,0
1,126,56,29,152,28.7,0.8009999999999999,21,0
1,96,122,0,0,22.4,0.207,27,0
4,144,58,28,140,29.5,0.287,37,0
3,83,58,31,18,34.3,0.336,25,0
0,95,85,25,36,37.4,0.247,24,1
3,171,72,33,135,33.3,0.19899999999999998,24,1
8,155,62,26,495,34.0,0.5429999999999999,46,1
1,89,76,34,37,31.2,0.192,23,0
4,76,62,0,0,34.0,0.391,25,0
7,160,54,32,175,30.5,0.588,39,1
4,146,92,0,0,31.2,0.539,61,1
5,124,74,0,0,34.0,0.22,38,1
5,78,48,0,0,33.7,0.654,25,0
4,97,60,23,0,28.2,0.44299999999999995,22,0
4,99,76,15,51,23.2,0.223,21,0
0,162,76,56,100,53.2,0.759,25,1
6,111,64,39,0,34.2,0.26,24,0
2,107,74,30,100,33.6,0.40399999999999997,23,0
5,132,80,0,0,26.8,0.18600000000000003,69,0
0,113,76,0,0,33.3,0.278,23,1
1,88,30,42,99,55.0,0.496,26,1
3,120,70,30,135,42.9,0.452,30,0
1,118,58,36,94,33.3,0.261,23,0
1,117,88,24,145,34.5,0.40299999999999997,40,1
0,105,84,0,0,27.9,0.741,62,1
4,173,70,14,168,29.7,0.361,33,1
9,122,56,0,0,33.3,1.114,33,1
3,170,64,37,225,34.5,0.35600000000000004,30,1
8,84,74,31,0,38.3,0.457,39,0
2,96,68,13,49,21.1,0.647,26,0
2,125,60,20,140,33.8,0.08800000000000001,31,0
0,100,70,26,50,30.8,0.597,21,0
0,93,60,25,92,28.7,0.532,22,0
0,129,80,0,0,31.2,0.703,29,0
5,105,72,29,325,36.9,0.159,28,0
3,128,78,0,0,21.1,0.268,55,0
5,106,82,30,0,39.5,0.28600000000000003,38,0
2,108,52,26,63,32.5,0.318,22,0
10,108,66,0,0,32.4,0.272,42,1
4,154,62,31,284,32.8,0.237,23,0
0,102,75,23,0,0.0,0.5720000000000001,21,0
9,57,80,37,0,32.8,0.096,41,0
2,106,64,35,119,30.5,1.4,34,0
5,147,78,0,0,33.7,0.218,65,0
2,90,70,17,0,27.3,0.085,22,0
1,136,74,50,204,37.4,0.39899999999999997,24,0
4,114,65,0,0,21.9,0.43200000000000005,37,0
9,156,86,28,155,34.3,1.189,42,1
1,153,82,42,485,40.6,0.687,23,0
8,188,78,0,0,47.9,0.13699999999999998,43,1
7,152,88,44,0,50.0,0.337,36,1
2,99,52,15,94,24.6,0.637,21,0
1,109,56,21,135,25.2,0.833,23,0
2,88,74,19,53,29.0,0.22899999999999998,22,0
17,163,72,41,114,40.9,0.8170000000000001,47,1
4,151,90,38,0,29.7,0.294,36,0
7,102,74,40,105,37.2,0.204,45,0
0,114,80,34,285,44.2,0.16699999999999998,27,0
2,100,64,23,0,29.7,0.368,21,0
0,131,88,0,0,31.6,0.743,32,1
6,104,74,18,156,29.9,0.722,41,1
3,148,66,25,0,32.5,0.256,22,0
4,120,68,0,0,29.6,0.7090000000000001,34,0
4,110,66,0,0,31.9,0.47100000000000003,29,0
3,111,90,12,78,28.4,0.495,29,0
6,102,82,0,0,30.8,0.18,36,1
6,134,70,23,130,35.4,0.542,29,1
2,87,0,23,0,28.9,0.773,25,0
1,79,60,42,48,43.5,0.6779999999999999,23,0
2,75,64,24,55,29.7,0.37,33,0
8,179,72,42,130,32.7,0.7190000000000001,36,1
6,85,78,0,0,31.2,0.382,42,0
0,129,110,46,130,67.1,0.319,26,1
5,143,78,0,0,45.0,0.19,47,0
5,130,82,0,0,39.1,0.956,37,1
6,87,80,0,0,23.2,0.084,32,0
0,119,64,18,92,34.9,0.725,23,0
1,0,74,20,23,27.7,0.299,21,0
5,73,60,0,0,26.8,0.268,27,0
4,141,74,0,0,27.6,0.244,40,0
7,194,68,28,0,35.9,0.745,41,1
8,181,68,36,495,30.1,0.615,60,1
1,128,98,41,58,32.0,1.321,33,1
8,109,76,39,114,27.9,0.64,31,1
5,139,80,35,160,31.6,0.361,25,1
3,111,62,0,0,22.6,0.142,21,0
9,123,70,44,94,33.1,0.374,40,0
7,159,66,0,0,30.4,0.38299999999999995,36,1
11,135,0,0,0,52.3,0.578,40,1
8,85,55,20,0,24.4,0.136,42,0
5,158,84,41,210,39.4,0.395,29,1
1,105,58,0,0,24.3,0.187,21,0
3,107,62,13,48,22.9,0.6779999999999999,23,1
4,109,64,44,99,34.8,0.905,26,1
4,148,60,27,318,30.9,0.15,29,1
0,113,80,16,0,31.0,0.8740000000000001,21,0
1,138,82,0,0,40.1,0.23600000000000002,28,0
0,108,68,20,0,27.3,0.787,32,0
2,99,70,16,44,20.4,0.235,27,0
6,103,72,32,190,37.7,0.324,55,0
5,111,72,28,0,23.9,0.40700000000000003,27,0
8,196,76,29,280,37.5,0.605,57,1
5,162,104,0,0,37.7,0.151,52,1
1,96,64,27,87,33.2,0.289,21,0
7,184,84,33,0,35.5,0.355,41,1
2,81,60,22,0,27.7,0.29,25,0
0,147,85,54,0,42.8,0.375,24,0
7,179,95,31,0,34.2,0.16399999999999998,60,0
0,140,65,26,130,42.6,0.431,24,1
9,112,82,32,175,34.2,0.26,36,1
12,151,70,40,271,41.8,0.742,38,1
5,109,62,41,129,35.8,0.514,25,1
6,125,68,30,120,30.0,0.46399999999999997,32,0
5,85,74,22,0,29.0,1.224,32,1
5,112,66,0,0,37.8,0.261,41,1
0,177,60,29,478,34.6,1.072,21,1
2,158,90,0,0,31.6,0.805,66,1
7,119,0,0,0,25.2,0.209,37,0
7,142,60,33,190,28.8,0.687,61,0
1,100,66,15,56,23.6,0.6659999999999999,26,0
1,87,78,27,32,34.6,0.10099999999999999,22,0
0,101,76,0,0,35.7,0.198,26,0
3,162,52,38,0,37.2,0.652,24,1
4,197,70,39,744,36.7,2.329,31,0
0,117,80,31,53,45.2,0.08900000000000001,24,0
4,142,86,0,0,44.0,0.645,22,1
6,134,80,37,370,46.2,0.23800000000000002,46,1
1,79,80,25,37,25.4,0.583,22,0
4,122,68,0,0,35.0,0.39399999999999996,29,0
3,74,68,28,45,29.7,0.293,23,0
4,171,72,0,0,43.6,0.479,26,1
7,181,84,21,192,35.9,0.586,51,1
0,179,90,27,0,44.1,0.6859999999999999,23,1
9,164,84,21,0,30.8,0.831,32,1
0,104,76,0,0,18.4,0.5820000000000001,27,0
1,91,64,24,0,29.2,0.192,21,0
4,91,70,32,88,33.1,0.446,22,0
3,139,54,0,0,25.6,0.402,22,1
6,119,50,22,176,27.1,1.318,33,1
2,146,76,35,194,38.2,0.32899999999999996,29,0
9,184,85,15,0,30.0,1.213,49,1
10,122,68,0,0,31.2,0.258,41,0
0,165,90,33,680,52.3,0.42700000000000005,23,0
9,124,70,33,402,35.4,0.282,34,0
1,111,86,19,0,30.1,0.14300000000000002,23,0
9,106,52,0,0,31.2,0.38,42,0
2,129,84,0,0,28.0,0.284,27,0
2,90,80,14,55,24.4,0.249,24,0
0,86,68,32,0,35.8,0.23800000000000002,25,0
12,92,62,7,258,27.6,0.9259999999999999,44,1
1,113,64,35,0,33.6,0.5429999999999999,21,1
3,111,56,39,0,30.1,0.557,30,0
2,114,68,22,0,28.7,0.092,25,0
1,193,50,16,375,25.9,0.655,24,0
11,155,76,28,150,33.3,1.3530000000000002,51,1
3,191,68,15,130,30.9,0.299,34,0
3,141,0,0,0,30.0,0.7609999999999999,27,1
4,95,70,32,0,32.1,0.612,24,0
3,142,80,15,0,32.4,0.2,63,0
4,123,62,0,0,32.0,0.226,35,1
5,96,74,18,67,33.6,0.997,43,0
0,138,0,0,0,36.3,0.9329999999999999,25,1
2,128,64,42,0,40.0,1.101,24,0
0,102,52,0,0,25.1,0.078,21,0
2,146,0,0,0,27.5,0.24,28,1
10,101,86,37,0,45.6,1.136,38,1
2,108,62,32,56,25.2,0.128,21,0
3,122,78,0,0,23.0,0.254,40,0
1,71,78,50,45,33.2,0.42200000000000004,21,0
13,106,70,0,0,34.2,0.251,52,0
2,100,70,52,57,40.5,0.677,25,0
7,106,60,24,0,26.5,0.29600000000000004,29,1
0,104,64,23,116,27.8,0.45399999999999996,23,0
5,114,74,0,0,24.9,0.7440000000000001,57,0
2,108,62,10,278,25.3,0.8809999999999999,22,0
0,146,70,0,0,37.9,0.33399999999999996,28,1
10,129,76,28,122,35.9,0.28,39,0
7,133,88,15,155,32.4,0.262,37,0
7,161,86,0,0,30.4,0.165,47,1
2,108,80,0,0,27.0,0.259,52,1
7,136,74,26,135,26.0,0.647,51,0
5,155,84,44,545,38.7,0.619,34,0
1,119,86,39,220,45.6,0.8079999999999999,29,1
4,96,56,17,49,20.8,0.34,26,0
5,108,72,43,75,36.1,0.263,33,0
0,78,88,29,40,36.9,0.434,21,0
0,107,62,30,74,36.6,0.757,25,1
2,128,78,37,182,43.3,1.224,31,1
1,128,48,45,194,40.5,0.613,24,1
0,161,50,0,0,21.9,0.254,65,0
6,151,62,31,120,35.5,0.6920000000000001,28,0
2,146,70,38,360,28.0,0.337,29,1
0,126,84,29,215,30.7,0.52,24,0
14,100,78,25,184,36.6,0.41200000000000003,46,1
8,112,72,0,0,23.6,0.84,58,0
0,167,0,0,0,32.3,0.8390000000000001,30,1
2,144,58,33,135,31.6,0.42200000000000004,25,1
5,77,82,41,42,35.8,0.156,35,0
5,115,98,0,0,52.9,0.209,28,1
3,150,76,0,0,21.0,0.207,37,0
2,120,76,37,105,39.7,0.215,29,0
10,161,68,23,132,25.5,0.326,47,1
0,137,68,14,148,24.8,0.14300000000000002,21,0
0,128,68,19,180,30.5,1.391,25,1
2,124,68,28,205,32.9,0.875,30,1
6,80,66,30,0,26.2,0.313,41,0
0,106,70,37,148,39.4,0.605,22,0
2,155,74,17,96,26.6,0.433,27,1
3,113,50,10,85,29.5,0.626,25,0
7,109,80,31,0,35.9,1.127,43,1
2,112,68,22,94,34.1,0.315,26,0
3,99,80,11,64,19.3,0.284,30,0
3,182,74,0,0,30.5,0.345,29,1
3,115,66,39,140,38.1,0.15,28,0
6,194,78,0,0,23.5,0.129,59,1
4,129,60,12,231,27.5,0.527,31,0
3,112,74,30,0,31.6,0.19699999999999998,25,1
0,124,70,20,0,27.4,0.254,36,1
13,152,90,33,29,26.8,0.731,43,1
2,112,75,32,0,35.7,0.14800000000000002,21,0
1,157,72,21,168,25.6,0.12300000000000001,24,0
1,122,64,32,156,35.1,0.6920000000000001,30,1
10,179,70,0,0,35.1,0.2,37,0
2,102,86,36,120,45.5,0.127,23,1
6,105,70,32,68,30.8,0.122,37,0
8,118,72,19,0,23.1,1.476,46,0
2,87,58,16,52,32.7,0.166,25,0
1,180,0,0,0,43.3,0.282,41,1
12,106,80,0,0,23.6,0.13699999999999998,44,0
1,95,60,18,58,23.9,0.26,22,0
0,165,76,43,255,47.9,0.259,26,0
0,117,0,0,0,33.8,0.932,44,0
5,115,76,0,0,31.2,0.34299999999999997,44,1
9,152,78,34,171,34.2,0.893,33,1
7,178,84,0,0,39.9,0.331,41,1
1,130,70,13,105,25.9,0.47200000000000003,22,0
1,95,74,21,73,25.9,0.6729999999999999,36,0
1,0,68,35,0,32.0,0.389,22,0
5,122,86,0,0,34.7,0.29,33,0
8,95,72,0,0,36.8,0.485,57,0
8,126,88,36,108,38.5,0.349,49,0
1,139,46,19,83,28.7,0.654,22,0
3,116,0,0,0,23.5,0.187,23,0
3,99,62,19,74,21.8,0.27899999999999997,26,0
5,0,80,32,0,41.0,0.34600000000000003,37,1
4,92,80,0,0,42.2,0.237,29,0
4,137,84,0,0,31.2,0.252,30,0
3,61,82,28,0,34.4,0.243,46,0
1,90,62,12,43,27.2,0.58,24,0
3,90,78,0,0,42.7,0.5589999999999999,21,0
9,165,88,0,0,30.4,0.302,49,1
1,125,50,40,167,33.3,0.9620000000000001,28,1
13,129,0,30,0,39.9,0.569,44,1
12,88,74,40,54,35.3,0.37799999999999995,48,0
1,196,76,36,249,36.5,0.875,29,1
5,189,64,33,325,31.2,0.583,29,1
5,158,70,0,0,29.8,0.207,63,0
5,103,108,37,0,39.2,0.305,65,0
4,146,78,0,0,38.5,0.52,67,1
4,147,74,25,293,34.9,0.385,30,0
5,99,54,28,83,34.0,0.499,30,0
6,124,72,0,0,27.6,0.368,29,1
0,101,64,17,0,21.0,0.252,21,0
3,81,86,16,66,27.5,0.306,22,0
1,133,102,28,140,32.8,0.23399999999999999,45,1
3,173,82,48,465,38.4,2.137,25,1
0,118,64,23,89,0.0,1.7309999999999999,21,0
0,84,64,22,66,35.8,0.545,21,0
2,105,58,40,94,34.9,0.225,25,0
2,122,52,43,158,36.2,0.816,28,0
12,140,82,43,325,39.2,0.528,58,1
0,98,82,15,84,25.2,0.299,22,0
1,87,60,37,75,37.2,0.509,22,0
4,156,75,0,0,48.3,0.23800000000000002,32,1
0,93,100,39,72,43.4,1.021,35,0
1,107,72,30,82,30.8,0.821,24,0
0,105,68,22,0,20.0,0.23600000000000002,22,0
1,109,60,8,182,25.4,0.9470000000000001,21,0
1,90,62,18,59,25.1,1.268,25,0
1,125,70,24,110,24.3,0.221,25,0
1,119,54,13,50,22.3,0.205,24,0
5,116,74,29,0,32.3,0.66,35,1
8,105,100,36,0,43.3,0.239,45,1
5,144,82,26,285,32.0,0.452,58,1
3,100,68,23,81,31.6,0.9490000000000001,28,0
1,100,66,29,196,32.0,0.444,42,0
5,166,76,0,0,45.7,0.34,27,1
1,131,64,14,415,23.7,0.389,21,0
4,116,72,12,87,22.1,0.46299999999999997,37,0
4,158,78,0,0,32.9,0.8029999999999999,31,1
2,127,58,24,275,27.7,1.6,25,0
3,96,56,34,115,24.7,0.9440000000000001,39,0
0,131,66,40,0,34.3,0.196,22,1
3,82,70,0,0,21.1,0.389,25,0
3,193,70,31,0,34.9,0.24100000000000002,25,1
4,95,64,0,0,32.0,0.161,31,1
6,137,61,0,0,24.2,0.151,55,0
5,136,84,41,88,35.0,0.28600000000000003,35,1
9,72,78,25,0,31.6,0.28,38,0
5,168,64,0,0,32.9,0.135,41,1
2,123,48,32,165,42.1,0.52,26,0
4,115,72,0,0,28.9,0.376,46,1
0,101,62,0,0,21.9,0.336,25,0
8,197,74,0,0,25.9,1.1909999999999998,39,1
1,172,68,49,579,42.4,0.7020000000000001,28,1
6,102,90,39,0,35.7,0.674,28,0
1,112,72,30,176,34.4,0.528,25,0
1,143,84,23,310,42.4,1.0759999999999998,22,0
1,143,74,22,61,26.2,0.256,21,0
0,138,60,35,167,34.6,0.534,21,1
3,173,84,33,474,35.7,0.258,22,1
1,97,68,21,0,27.2,1.095,22,0
4,144,82,32,0,38.5,0.5539999999999999,37,1
1,83,68,0,0,18.2,0.624,27,0
3,129,64,29,115,26.4,0.21899999999999997,28,1
1,119,88,41,170,45.3,0.507,26,0
2,94,68,18,76,26.0,0.561,21,0
0,102,64,46,78,40.6,0.496,21,0
2,115,64,22,0,30.8,0.42100000000000004,21,0
8,151,78,32,210,42.9,0.516,36,1
4,184,78,39,277,37.0,0.264,31,1
0,94,0,0,0,0.0,0.256,25,0
1,181,64,30,180,34.1,0.32799999999999996,38,1
0,135,94,46,145,40.6,0.284,26,0
1,95,82,25,180,35.0,0.233,43,1
2,99,0,0,0,22.2,0.10800000000000001,23,0
3,89,74,16,85,30.4,0.551,38,0
1,80,74,11,60,30.0,0.527,22,0
2,139,75,0,0,25.6,0.16699999999999998,29,0
1,90,68,8,0,24.5,1.138,36,0
0,141,0,0,0,42.4,0.205,29,1
12,140,85,33,0,37.4,0.244,41,0
5,147,75,0,0,29.9,0.434,28,0
1,97,70,15,0,18.2,0.147,21,0
6,107,88,0,0,36.8,0.727,31,0
0,189,104,25,0,34.3,0.435,41,1
2,83,66,23,50,32.2,0.49700000000000005,22,0
4,117,64,27,120,33.2,0.23,24,0
8,108,70,0,0,30.5,0.955,33,1
4,117,62,12,0,29.7,0.38,30,1
0,180,78,63,14,59.4,2.42,25,1
1,100,72,12,70,25.3,0.6579999999999999,28,0
0,95,80,45,92,36.5,0.33,26,0
0,104,64,37,64,33.6,0.51,22,1
0,120,74,18,63,30.5,0.285,26,0
1,82,64,13,95,21.2,0.415,23,0
2,134,70,0,0,28.9,0.542,23,1
0,91,68,32,210,39.9,0.381,25,0
2,119,0,0,0,19.6,0.8320000000000001,72,0
2,100,54,28,105,37.8,0.498,24,0
14,175,62,30,0,33.6,0.212,38,1
1,135,54,0,0,26.7,0.687,62,0
5,86,68,28,71,30.2,0.364,24,0
10,148,84,48,237,37.6,1.001,51,1
9,134,74,33,60,25.9,0.46,81,0
9,120,72,22,56,20.8,0.733,48,0
1,71,62,0,0,21.8,0.41600000000000004,26,0
8,74,70,40,49,35.3,0.705,39,0
5,88,78,30,0,27.6,0.258,37,0
10,115,98,0,0,24.0,1.022,34,0
0,124,56,13,105,21.8,0.452,21,0
0,74,52,10,36,27.8,0.26899999999999996,22,0
0,97,64,36,100,36.8,0.6,25,0
8,120,0,0,0,30.0,0.183,38,1
6,154,78,41,140,46.1,0.5710000000000001,27,0
1,144,82,40,0,41.3,0.607,28,0
0,137,70,38,0,33.2,0.17,22,0
0,119,66,27,0,38.8,0.259,22,0
7,136,90,0,0,29.9,0.21,50,0
4,114,64,0,0,28.9,0.126,24,0
0,137,84,27,0,27.3,0.231,59,0
2,105,80,45,191,33.7,0.711,29,1
7,114,76,17,110,23.8,0.466,31,0
8,126,74,38,75,25.9,0.162,39,0
4,132,86,31,0,28.0,0.419,63,0
3,158,70,30,328,35.5,0.344,35,1
0,123,88,37,0,35.2,0.19699999999999998,29,0
4,85,58,22,49,27.8,0.306,28,0
0,84,82,31,125,38.2,0.233,23,0
0,145,0,0,0,44.2,0.63,31,1
0,135,68,42,250,42.3,0.365,24,1
1,139,62,41,480,40.7,0.536,21,0
0,173,78,32,265,46.5,1.159,58,0
4,99,72,17,0,25.6,0.294,28,0
8,194,80,0,0,26.1,0.551,67,0
2,83,65,28,66,36.8,0.629,24,0
2,89,90,30,0,33.5,0.292,42,0
4,99,68,38,0,32.8,0.145,33,0
4,125,70,18,122,28.9,1.1440000000000001,45,1
3,80,0,0,0,0.0,0.174,22,0
6,166,74,0,0,26.6,0.304,66,0
5,110,68,0,0,26.0,0.292,30,0
2,81,72,15,76,30.1,0.547,25,0
7,195,70,33,145,25.1,0.163,55,1
6,154,74,32,193,29.3,0.8390000000000001,39,0
2,117,90,19,71,25.2,0.313,21,0
3,84,72,32,0,37.2,0.267,28,0
6,0,68,41,0,39.0,0.727,41,1
7,94,64,25,79,33.3,0.738,41,0
3,96,78,39,0,37.3,0.23800000000000002,40,0
10,75,82,0,0,33.3,0.263,38,0
0,180,90,26,90,36.5,0.314,35,1
1,130,60,23,170,28.6,0.6920000000000001,21,0
2,84,50,23,76,30.4,0.968,21,0
8,120,78,0,0,25.0,0.409,64,0
12,84,72,31,0,29.7,0.297,46,1
0,139,62,17,210,22.1,0.207,21,0
9,91,68,0,0,24.2,0.2,58,0
2,91,62,0,0,27.3,0.525,22,0
3,99,54,19,86,25.6,0.154,24,0
3,163,70,18,105,31.6,0.268,28,1
9,145,88,34,165,30.3,0.7709999999999999,53,1
7,125,86,0,0,37.6,0.304,51,0
13,76,60,0,0,32.8,0.18,41,0
6,129,90,7,326,19.6,0.5820000000000001,60,0
2,68,70,32,66,25.0,0.187,25,0
3,124,80,33,130,33.2,0.305,26,0
6,114,0,0,0,0.0,0.18899999999999997,26,0
9,130,70,0,0,34.2,0.652,45,1
3,125,58,0,0,31.6,0.151,24,0
3,87,60,18,0,21.8,0.444,21,0
1,97,64,19,82,18.2,0.299,21,0
3,116,74,15,105,26.3,0.107,24,0
0,117,66,31,188,30.8,0.493,22,0
0,111,65,0,0,24.6,0.66,31,0
2,122,60,18,106,29.8,0.7170000000000001,22,0
0,107,76,0,0,45.3,0.6859999999999999,24,0
1,86,66,52,65,41.3,0.917,29,0
6,91,0,0,0,29.8,0.501,31,0
1,77,56,30,56,33.3,1.251,24,0
4,132,0,0,0,32.9,0.302,23,1
0,105,90,0,0,29.6,0.19699999999999998,46,0
0,57,60,0,0,21.7,0.735,67,0
0,127,80,37,210,36.3,0.804,23,0
3,129,92,49,155,36.4,0.968,32,1
8,100,74,40,215,39.4,0.6609999999999999,43,1
3,128,72,25,190,32.4,0.5489999999999999,27,1
10,90,85,32,0,34.9,0.825,56,1
4,84,90,23,56,39.5,0.159,25,0
1,88,78,29,76,32.0,0.365,29,0
8,186,90,35,225,34.5,0.423,37,1
5,187,76,27,207,43.6,1.034,53,1
4,131,68,21,166,33.1,0.16,28,0
1,164,82,43,67,32.8,0.341,50,0
4,189,110,31,0,28.5,0.68,37,0
1,116,70,28,0,27.4,0.204,21,0
3,84,68,30,106,31.9,0.591,25,0
6,114,88,0,0,27.8,0.247,66,0
1,88,62,24,44,29.9,0.42200000000000004,23,0
1,84,64,23,115,36.9,0.47100000000000003,28,0
7,124,70,33,215,25.5,0.161,37,0
1,97,70,40,0,38.1,0.218,30,0
8,110,76,0,0,27.8,0.237,58,0
11,103,68,40,0,46.2,0.126,42,0
11,85,74,0,0,30.1,0.3,35,0
6,125,76,0,0,33.8,0.121,54,1
0,198,66,32,274,41.3,0.502,28,1
1,87,68,34,77,37.6,0.401,24,0
6,99,60,19,54,26.9,0.49700000000000005,32,0
0,91,80,0,0,32.4,0.601,27,0
2,95,54,14,88,26.1,0.748,22,0
1,99,72,30,18,38.6,0.41200000000000003,21,0
6,92,62,32,126,32.0,0.085,46,0
4,154,72,29,126,31.3,0.33799999999999997,37,0
0,121,66,30,165,34.3,0.203,33,1
3,78,70,0,0,32.5,0.27,39,0
2,130,96,0,0,22.6,0.268,21,0
3,111,58,31,44,29.5,0.43,22,0
2,98,60,17,120,34.7,0.198,22,0
1,143,86,30,330,30.1,0.892,23,0
1,119,44,47,63,35.5,0.28,25,0
6,108,44,20,130,24.0,0.813,35,0
2,118,80,0,0,42.9,0.693,21,1
10,133,68,0,0,27.0,0.245,36,0
2,197,70,99,0,34.7,0.575,62,1
0,151,90,46,0,42.1,0.371,21,1
6,109,60,27,0,25.0,0.20600000000000002,27,0
12,121,78,17,0,26.5,0.259,62,0
8,100,76,0,0,38.7,0.19,42,0
8,124,76,24,600,28.7,0.687,52,1
1,93,56,11,0,22.5,0.41700000000000004,22,0
8,143,66,0,0,34.9,0.129,41,1
6,103,66,0,0,24.3,0.249,29,0
3,176,86,27,156,33.3,1.1540000000000001,52,1
0,73,0,0,0,21.1,0.342,25,0
11,111,84,40,0,46.8,0.925,45,1
2,112,78,50,140,39.4,0.175,24,0
3,132,80,0,0,34.4,0.402,44,1
2,82,52,22,115,28.5,1.699,25,0
6,123,72,45,230,33.6,0.733,34,0
0,188,82,14,185,32.0,0.682,22,1
0,67,76,0,0,45.3,0.19399999999999998,46,0
1,89,24,19,25,27.8,0.5589999999999999,21,0
1,173,74,0,0,36.8,0.08800000000000001,38,1
1,109,38,18,120,23.1,0.40700000000000003,26,0
1,108,88,19,0,27.1,0.4,24,0
6,96,0,0,0,23.7,0.19,28,0
1,124,74,36,0,27.8,0.1,30,0
7,150,78,29,126,35.2,0.6920000000000001,54,1
4,183,0,0,0,28.4,0.212,36,1
1,124,60,32,0,35.8,0.514,21,0
1,181,78,42,293,40.0,1.258,22,1
1,92,62,25,41,19.5,0.48200000000000004,25,0
0,152,82,39,272,41.5,0.27,27,0
1,111,62,13,182,24.0,0.138,23,0
3,106,54,21,158,30.9,0.292,24,0
3,174,58,22,194,32.9,0.593,36,1
7,168,88,42,321,38.2,0.787,40,1
6,105,80,28,0,32.5,0.878,26,0
11,138,74,26,144,36.1,0.557,50,1
3,106,72,0,0,25.8,0.207,27,0
6,117,96,0,0,28.7,0.157,30,0
2,68,62,13,15,20.1,0.257,23,0
9,112,82,24,0,28.2,1.2819999999999998,50,1
0,119,0,0,0,32.4,0.141,24,1
2,112,86,42,160,38.4,0.24600000000000002,28,0
2,92,76,20,0,24.2,1.6980000000000002,28,0
6,183,94,0,0,40.8,1.4609999999999999,45,0
0,94,70,27,115,43.5,0.34700000000000003,21,0
2,108,64,0,0,30.8,0.158,21,0
4,90,88,47,54,37.7,0.36200000000000004,29,0
0,125,68,0,0,24.7,0.20600000000000002,21,0
0,132,78,0,0,32.4,0.39299999999999996,21,0
5,128,80,0,0,34.6,0.14400000000000002,45,0
4,94,65,22,0,24.7,0.14800000000000002,21,0
7,114,64,0,0,27.4,0.732,34,1
0,102,78,40,90,34.5,0.23800000000000002,24,0
2,111,60,0,0,26.2,0.34299999999999997,23,0
1,128,82,17,183,27.5,0.115,22,0
10,92,62,0,0,25.9,0.16699999999999998,31,0
13,104,72,0,0,31.2,0.465,38,1
5,104,74,0,0,28.8,0.153,48,0
2,94,76,18,66,31.6,0.649,23,0
7,97,76,32,91,40.9,0.871,32,1
1,100,74,12,46,19.5,0.149,28,0
0,102,86,17,105,29.3,0.695,27,0
4,128,70,0,0,34.3,0.303,24,0
6,147,80,0,0,29.5,0.17800000000000002,50,1
4,90,0,0,0,28.0,0.61,31,0
3,103,72,30,152,27.6,0.73,27,0
2,157,74,35,440,39.4,0.134,30,0
1,167,74,17,144,23.4,0.447,33,1
0,179,50,36,159,37.8,0.455,22,1
11,136,84,35,130,28.3,0.26,42,1
0,107,60,25,0,26.4,0.133,23,0
1,91,54,25,100,25.2,0.23399999999999999,23,0
1,117,60,23,106,33.8,0.466,27,0
5,123,74,40,77,34.1,0.26899999999999996,28,0
2,120,54,0,0,26.8,0.455,27,0
1,106,70,28,135,34.2,0.142,22,0
2,155,52,27,540,38.7,0.24,25,1
2,101,58,35,90,21.8,0.155,22,0
1,120,80,48,200,38.9,1.162,41,0
11,127,106,0,0,39.0,0.19,51,0
3,80,82,31,70,34.2,1.2919999999999998,27,1
10,162,84,0,0,27.7,0.182,54,0
1,199,76,43,0,42.9,1.3940000000000001,22,1
8,167,106,46,231,37.6,0.165,43,1
9,145,80,46,130,37.9,0.637,40,1
6,115,60,39,0,33.7,0.245,40,1
1,112,80,45,132,34.8,0.217,24,0
4,145,82,18,0,32.5,0.235,70,1
10,111,70,27,0,27.5,0.141,40,1
6,98,58,33,190,34.0,0.43,43,0
9,154,78,30,100,30.9,0.16399999999999998,45,0
6,165,68,26,168,33.6,0.631,49,0
1,99,58,10,0,25.4,0.551,21,0
10,68,106,23,49,35.5,0.285,47,0
3,123,100,35,240,57.3,0.88,22,0
8,91,82,0,0,35.6,0.5870000000000001,68,0
6,195,70,0,0,30.9,0.32799999999999996,31,1
9,156,86,0,0,24.8,0.23,53,1
0,93,60,0,0,35.3,0.263,25,0
3,121,52,0,0,36.0,0.127,25,1
2,101,58,17,265,24.2,0.614,23,0
2,56,56,28,45,24.2,0.332,22,0
0,162,76,36,0,49.6,0.364,26,1
0,95,64,39,105,44.6,0.366,22,0
4,125,80,0,0,32.3,0.536,27,1
5,136,82,0,0,0.0,0.64,69,0
2,129,74,26,205,33.2,0.591,25,0
3,130,64,0,0,23.1,0.314,22,0
1,107,50,19,0,28.3,0.18100000000000002,29,0
1,140,74,26,180,24.1,0.828,23,0
1,144,82,46,180,46.1,0.335,46,1
8,107,80,0,0,24.6,0.856,34,0
13,158,114,0,0,42.3,0.257,44,1
2,121,70,32,95,39.1,0.8859999999999999,23,0
7,129,68,49,125,38.5,0.439,43,1
2,90,60,0,0,23.5,0.191,25,0
7,142,90,24,480,30.4,0.128,43,1
3,169,74,19,125,29.9,0.268,31,1
0,99,0,0,0,25.0,0.253,22,0
4,127,88,11,155,34.5,0.598,28,0
4,118,70,0,0,44.5,0.904,26,0
2,122,76,27,200,35.9,0.483,26,0
6,125,78,31,0,27.6,0.565,49,1
1,168,88,29,0,35.0,0.905,52,1
2,129,0,0,0,38.5,0.304,41,0
4,110,76,20,100,28.4,0.11800000000000001,27,0
6,80,80,36,0,39.8,0.177,28,0
10,115,0,0,0,0.0,0.261,30,1
2,127,46,21,335,34.4,0.17600000000000002,22,0
9,164,78,0,0,32.8,0.14800000000000002,45,1
2,93,64,32,160,38.0,0.674,23,1
3,158,64,13,387,31.2,0.295,24,0
5,126,78,27,22,29.6,0.439,40,0
10,129,62,36,0,41.2,0.441,38,1
0,134,58,20,291,26.4,0.35200000000000004,21,0
3,102,74,0,0,29.5,0.121,32,0
7,187,50,33,392,33.9,0.826,34,1
3,173,78,39,185,33.8,0.97,31,1
10,94,72,18,0,23.1,0.595,56,0
1,108,60,46,178,35.5,0.415,24,0
5,97,76,27,0,35.6,0.37799999999999995,52,1
4,83,86,19,0,29.3,0.317,34,0
1,114,66,36,200,38.1,0.289,21,0
1,149,68,29,127,29.3,0.349,42,1
5,117,86,30,105,39.1,0.251,42,0
1,111,94,0,0,32.8,0.265,45,0
4,112,78,40,0,39.4,0.23600000000000002,38,0
1,116,78,29,180,36.1,0.496,25,0
0,141,84,26,0,32.4,0.433,22,0
2,175,88,0,0,22.9,0.326,22,0
2,92,52,0,0,30.1,0.141,22,0
3,130,78,23,79,28.4,0.32299999999999995,34,1
8,120,86,0,0,28.4,0.259,22,1
2,174,88,37,120,44.5,0.6459999999999999,24,1
2,106,56,27,165,29.0,0.426,22,0
2,105,75,0,0,23.3,0.56,53,0
4,95,60,32,0,35.4,0.284,28,0
0,126,86,27,120,27.4,0.515,21,0
8,65,72,23,0,32.0,0.6,42,0
2,99,60,17,160,36.6,0.45299999999999996,21,0
1,102,74,0,0,39.5,0.293,42,1
11,120,80,37,150,42.3,0.785,48,1
3,102,44,20,94,30.8,0.4,26,0
1,109,58,18,116,28.5,0.21899999999999997,22,0
9,140,94,0,0,32.7,0.7340000000000001,45,1
13,153,88,37,140,40.6,1.1740000000000002,39,0
12,100,84,33,105,30.0,0.488,46,0
1,147,94,41,0,49.3,0.358,27,1
1,81,74,41,57,46.3,1.0959999999999999,32,0
3,187,70,22,200,36.4,0.408,36,1
6,162,62,0,0,24.3,0.17800000000000002,50,1
4,136,70,0,0,31.2,1.182,22,1
1,121,78,39,74,39.0,0.261,28,0
3,108,62,24,0,26.0,0.223,25,0
0,181,88,44,510,43.3,0.222,26,1
8,154,78,32,0,32.4,0.44299999999999995,45,1
1,128,88,39,110,36.5,1.057,37,1
7,137,90,41,0,32.0,0.391,39,0
0,123,72,0,0,36.3,0.258,52,1
1,106,76,0,0,37.5,0.19699999999999998,26,0
6,190,92,0,0,35.5,0.278,66,1
2,88,58,26,16,28.4,0.7659999999999999,22,0
9,170,74,31,0,44.0,0.40299999999999997,43,1
9,89,62,0,0,22.5,0.142,33,0
10,101,76,48,180,32.9,0.171,63,0
2,122,70,27,0,36.8,0.34,27,0
5,121,72,23,112,26.2,0.245,30,0
1,126,60,0,0,30.1,0.349,47,1
1,93,70,31,0,30.4,0.315,23,0
1 Pregnancies Glucose BloodPressure SkinThickness Insulin BMI DiabetesPedigreeFunction Age Outcome
2 6 148 72 35 0 33.6 0.627 50 1
3 1 85 66 29 0 26.6 0.35100000000000003 31 0
4 8 183 64 0 0 23.3 0.672 32 1
5 1 89 66 23 94 28.1 0.16699999999999998 21 0
6 0 137 40 35 168 43.1 2.2880000000000003 33 1
7 5 116 74 0 0 25.6 0.201 30 0
8 3 78 50 32 88 31.0 0.248 26 1
9 10 115 0 0 0 35.3 0.134 29 0
10 2 197 70 45 543 30.5 0.158 53 1
11 8 125 96 0 0 0.0 0.23199999999999998 54 1
12 4 110 92 0 0 37.6 0.191 30 0
13 10 168 74 0 0 38.0 0.537 34 1
14 10 139 80 0 0 27.1 1.4409999999999998 57 0
15 1 189 60 23 846 30.1 0.39799999999999996 59 1
16 5 166 72 19 175 25.8 0.5870000000000001 51 1
17 7 100 0 0 0 30.0 0.484 32 1
18 0 118 84 47 230 45.8 0.551 31 1
19 7 107 74 0 0 29.6 0.254 31 1
20 1 103 30 38 83 43.3 0.183 33 0
21 1 115 70 30 96 34.6 0.529 32 1
22 3 126 88 41 235 39.3 0.7040000000000001 27 0
23 8 99 84 0 0 35.4 0.38799999999999996 50 0
24 7 196 90 0 0 39.8 0.451 41 1
25 9 119 80 35 0 29.0 0.263 29 1
26 11 143 94 33 146 36.6 0.254 51 1
27 10 125 70 26 115 31.1 0.205 41 1
28 7 147 76 0 0 39.4 0.257 43 1
29 1 97 66 15 140 23.2 0.48700000000000004 22 0
30 13 145 82 19 110 22.2 0.245 57 0
31 5 117 92 0 0 34.1 0.337 38 0
32 5 109 75 26 0 36.0 0.546 60 0
33 3 158 76 36 245 31.6 0.851 28 1
34 3 88 58 11 54 24.8 0.267 22 0
35 6 92 92 0 0 19.9 0.188 28 0
36 10 122 78 31 0 27.6 0.512 45 0
37 4 103 60 33 192 24.0 0.966 33 0
38 11 138 76 0 0 33.2 0.42 35 0
39 9 102 76 37 0 32.9 0.665 46 1
40 2 90 68 42 0 38.2 0.503 27 1
41 4 111 72 47 207 37.1 1.39 56 1
42 3 180 64 25 70 34.0 0.271 26 0
43 7 133 84 0 0 40.2 0.696 37 0
44 7 106 92 18 0 22.7 0.235 48 0
45 9 171 110 24 240 45.4 0.721 54 1
46 7 159 64 0 0 27.4 0.294 40 0
47 0 180 66 39 0 42.0 1.893 25 1
48 1 146 56 0 0 29.7 0.564 29 0
49 2 71 70 27 0 28.0 0.586 22 0
50 7 103 66 32 0 39.1 0.344 31 1
51 7 105 0 0 0 0.0 0.305 24 0
52 1 103 80 11 82 19.4 0.491 22 0
53 1 101 50 15 36 24.2 0.526 26 0
54 5 88 66 21 23 24.4 0.342 30 0
55 8 176 90 34 300 33.7 0.467 58 1
56 7 150 66 42 342 34.7 0.718 42 0
57 1 73 50 10 0 23.0 0.248 21 0
58 7 187 68 39 304 37.7 0.254 41 1
59 0 100 88 60 110 46.8 0.9620000000000001 31 0
60 0 146 82 0 0 40.5 1.781 44 0
61 0 105 64 41 142 41.5 0.17300000000000001 22 0
62 2 84 0 0 0 0.0 0.304 21 0
63 8 133 72 0 0 32.9 0.27 39 1
64 5 44 62 0 0 25.0 0.5870000000000001 36 0
65 2 141 58 34 128 25.4 0.6990000000000001 24 0
66 7 114 66 0 0 32.8 0.258 42 1
67 5 99 74 27 0 29.0 0.203 32 0
68 0 109 88 30 0 32.5 0.855 38 1
69 2 109 92 0 0 42.7 0.845 54 0
70 1 95 66 13 38 19.6 0.33399999999999996 25 0
71 4 146 85 27 100 28.9 0.18899999999999997 27 0
72 2 100 66 20 90 32.9 0.867 28 1
73 5 139 64 35 140 28.6 0.41100000000000003 26 0
74 13 126 90 0 0 43.4 0.583 42 1
75 4 129 86 20 270 35.1 0.231 23 0
76 1 79 75 30 0 32.0 0.396 22 0
77 1 0 48 20 0 24.7 0.14 22 0
78 7 62 78 0 0 32.6 0.391 41 0
79 5 95 72 33 0 37.7 0.37 27 0
80 0 131 0 0 0 43.2 0.27 26 1
81 2 112 66 22 0 25.0 0.307 24 0
82 3 113 44 13 0 22.4 0.14 22 0
83 2 74 0 0 0 0.0 0.102 22 0
84 7 83 78 26 71 29.3 0.767 36 0
85 0 101 65 28 0 24.6 0.237 22 0
86 5 137 108 0 0 48.8 0.22699999999999998 37 1
87 2 110 74 29 125 32.4 0.698 27 0
88 13 106 72 54 0 36.6 0.17800000000000002 45 0
89 2 100 68 25 71 38.5 0.324 26 0
90 15 136 70 32 110 37.1 0.153 43 1
91 1 107 68 19 0 26.5 0.165 24 0
92 1 80 55 0 0 19.1 0.258 21 0
93 4 123 80 15 176 32.0 0.44299999999999995 34 0
94 7 81 78 40 48 46.7 0.261 42 0
95 4 134 72 0 0 23.8 0.27699999999999997 60 1
96 2 142 82 18 64 24.7 0.7609999999999999 21 0
97 6 144 72 27 228 33.9 0.255 40 0
98 2 92 62 28 0 31.6 0.13 24 0
99 1 71 48 18 76 20.4 0.32299999999999995 22 0
100 6 93 50 30 64 28.7 0.35600000000000004 23 0
101 1 122 90 51 220 49.7 0.325 31 1
102 1 163 72 0 0 39.0 1.222 33 1
103 1 151 60 0 0 26.1 0.179 22 0
104 0 125 96 0 0 22.5 0.262 21 0
105 1 81 72 18 40 26.6 0.28300000000000003 24 0
106 2 85 65 0 0 39.6 0.93 27 0
107 1 126 56 29 152 28.7 0.8009999999999999 21 0
108 1 96 122 0 0 22.4 0.207 27 0
109 4 144 58 28 140 29.5 0.287 37 0
110 3 83 58 31 18 34.3 0.336 25 0
111 0 95 85 25 36 37.4 0.247 24 1
112 3 171 72 33 135 33.3 0.19899999999999998 24 1
113 8 155 62 26 495 34.0 0.5429999999999999 46 1
114 1 89 76 34 37 31.2 0.192 23 0
115 4 76 62 0 0 34.0 0.391 25 0
116 7 160 54 32 175 30.5 0.588 39 1
117 4 146 92 0 0 31.2 0.539 61 1
118 5 124 74 0 0 34.0 0.22 38 1
119 5 78 48 0 0 33.7 0.654 25 0
120 4 97 60 23 0 28.2 0.44299999999999995 22 0
121 4 99 76 15 51 23.2 0.223 21 0
122 0 162 76 56 100 53.2 0.759 25 1
123 6 111 64 39 0 34.2 0.26 24 0
124 2 107 74 30 100 33.6 0.40399999999999997 23 0
125 5 132 80 0 0 26.8 0.18600000000000003 69 0
126 0 113 76 0 0 33.3 0.278 23 1
127 1 88 30 42 99 55.0 0.496 26 1
128 3 120 70 30 135 42.9 0.452 30 0
129 1 118 58 36 94 33.3 0.261 23 0
130 1 117 88 24 145 34.5 0.40299999999999997 40 1
131 0 105 84 0 0 27.9 0.741 62 1
132 4 173 70 14 168 29.7 0.361 33 1
133 9 122 56 0 0 33.3 1.114 33 1
134 3 170 64 37 225 34.5 0.35600000000000004 30 1
135 8 84 74 31 0 38.3 0.457 39 0
136 2 96 68 13 49 21.1 0.647 26 0
137 2 125 60 20 140 33.8 0.08800000000000001 31 0
138 0 100 70 26 50 30.8 0.597 21 0
139 0 93 60 25 92 28.7 0.532 22 0
140 0 129 80 0 0 31.2 0.703 29 0
141 5 105 72 29 325 36.9 0.159 28 0
142 3 128 78 0 0 21.1 0.268 55 0
143 5 106 82 30 0 39.5 0.28600000000000003 38 0
144 2 108 52 26 63 32.5 0.318 22 0
145 10 108 66 0 0 32.4 0.272 42 1
146 4 154 62 31 284 32.8 0.237 23 0
147 0 102 75 23 0 0.0 0.5720000000000001 21 0
148 9 57 80 37 0 32.8 0.096 41 0
149 2 106 64 35 119 30.5 1.4 34 0
150 5 147 78 0 0 33.7 0.218 65 0
151 2 90 70 17 0 27.3 0.085 22 0
152 1 136 74 50 204 37.4 0.39899999999999997 24 0
153 4 114 65 0 0 21.9 0.43200000000000005 37 0
154 9 156 86 28 155 34.3 1.189 42 1
155 1 153 82 42 485 40.6 0.687 23 0
156 8 188 78 0 0 47.9 0.13699999999999998 43 1
157 7 152 88 44 0 50.0 0.337 36 1
158 2 99 52 15 94 24.6 0.637 21 0
159 1 109 56 21 135 25.2 0.833 23 0
160 2 88 74 19 53 29.0 0.22899999999999998 22 0
161 17 163 72 41 114 40.9 0.8170000000000001 47 1
162 4 151 90 38 0 29.7 0.294 36 0
163 7 102 74 40 105 37.2 0.204 45 0
164 0 114 80 34 285 44.2 0.16699999999999998 27 0
165 2 100 64 23 0 29.7 0.368 21 0
166 0 131 88 0 0 31.6 0.743 32 1
167 6 104 74 18 156 29.9 0.722 41 1
168 3 148 66 25 0 32.5 0.256 22 0
169 4 120 68 0 0 29.6 0.7090000000000001 34 0
170 4 110 66 0 0 31.9 0.47100000000000003 29 0
171 3 111 90 12 78 28.4 0.495 29 0
172 6 102 82 0 0 30.8 0.18 36 1
173 6 134 70 23 130 35.4 0.542 29 1
174 2 87 0 23 0 28.9 0.773 25 0
175 1 79 60 42 48 43.5 0.6779999999999999 23 0
176 2 75 64 24 55 29.7 0.37 33 0
177 8 179 72 42 130 32.7 0.7190000000000001 36 1
178 6 85 78 0 0 31.2 0.382 42 0
179 0 129 110 46 130 67.1 0.319 26 1
180 5 143 78 0 0 45.0 0.19 47 0
181 5 130 82 0 0 39.1 0.956 37 1
182 6 87 80 0 0 23.2 0.084 32 0
183 0 119 64 18 92 34.9 0.725 23 0
184 1 0 74 20 23 27.7 0.299 21 0
185 5 73 60 0 0 26.8 0.268 27 0
186 4 141 74 0 0 27.6 0.244 40 0
187 7 194 68 28 0 35.9 0.745 41 1
188 8 181 68 36 495 30.1 0.615 60 1
189 1 128 98 41 58 32.0 1.321 33 1
190 8 109 76 39 114 27.9 0.64 31 1
191 5 139 80 35 160 31.6 0.361 25 1
192 3 111 62 0 0 22.6 0.142 21 0
193 9 123 70 44 94 33.1 0.374 40 0
194 7 159 66 0 0 30.4 0.38299999999999995 36 1
195 11 135 0 0 0 52.3 0.578 40 1
196 8 85 55 20 0 24.4 0.136 42 0
197 5 158 84 41 210 39.4 0.395 29 1
198 1 105 58 0 0 24.3 0.187 21 0
199 3 107 62 13 48 22.9 0.6779999999999999 23 1
200 4 109 64 44 99 34.8 0.905 26 1
201 4 148 60 27 318 30.9 0.15 29 1
202 0 113 80 16 0 31.0 0.8740000000000001 21 0
203 1 138 82 0 0 40.1 0.23600000000000002 28 0
204 0 108 68 20 0 27.3 0.787 32 0
205 2 99 70 16 44 20.4 0.235 27 0
206 6 103 72 32 190 37.7 0.324 55 0
207 5 111 72 28 0 23.9 0.40700000000000003 27 0
208 8 196 76 29 280 37.5 0.605 57 1
209 5 162 104 0 0 37.7 0.151 52 1
210 1 96 64 27 87 33.2 0.289 21 0
211 7 184 84 33 0 35.5 0.355 41 1
212 2 81 60 22 0 27.7 0.29 25 0
213 0 147 85 54 0 42.8 0.375 24 0
214 7 179 95 31 0 34.2 0.16399999999999998 60 0
215 0 140 65 26 130 42.6 0.431 24 1
216 9 112 82 32 175 34.2 0.26 36 1
217 12 151 70 40 271 41.8 0.742 38 1
218 5 109 62 41 129 35.8 0.514 25 1
219 6 125 68 30 120 30.0 0.46399999999999997 32 0
220 5 85 74 22 0 29.0 1.224 32 1
221 5 112 66 0 0 37.8 0.261 41 1
222 0 177 60 29 478 34.6 1.072 21 1
223 2 158 90 0 0 31.6 0.805 66 1
224 7 119 0 0 0 25.2 0.209 37 0
225 7 142 60 33 190 28.8 0.687 61 0
226 1 100 66 15 56 23.6 0.6659999999999999 26 0
227 1 87 78 27 32 34.6 0.10099999999999999 22 0
228 0 101 76 0 0 35.7 0.198 26 0
229 3 162 52 38 0 37.2 0.652 24 1
230 4 197 70 39 744 36.7 2.329 31 0
231 0 117 80 31 53 45.2 0.08900000000000001 24 0
232 4 142 86 0 0 44.0 0.645 22 1
233 6 134 80 37 370 46.2 0.23800000000000002 46 1
234 1 79 80 25 37 25.4 0.583 22 0
235 4 122 68 0 0 35.0 0.39399999999999996 29 0
236 3 74 68 28 45 29.7 0.293 23 0
237 4 171 72 0 0 43.6 0.479 26 1
238 7 181 84 21 192 35.9 0.586 51 1
239 0 179 90 27 0 44.1 0.6859999999999999 23 1
240 9 164 84 21 0 30.8 0.831 32 1
241 0 104 76 0 0 18.4 0.5820000000000001 27 0
242 1 91 64 24 0 29.2 0.192 21 0
243 4 91 70 32 88 33.1 0.446 22 0
244 3 139 54 0 0 25.6 0.402 22 1
245 6 119 50 22 176 27.1 1.318 33 1
246 2 146 76 35 194 38.2 0.32899999999999996 29 0
247 9 184 85 15 0 30.0 1.213 49 1
248 10 122 68 0 0 31.2 0.258 41 0
249 0 165 90 33 680 52.3 0.42700000000000005 23 0
250 9 124 70 33 402 35.4 0.282 34 0
251 1 111 86 19 0 30.1 0.14300000000000002 23 0
252 9 106 52 0 0 31.2 0.38 42 0
253 2 129 84 0 0 28.0 0.284 27 0
254 2 90 80 14 55 24.4 0.249 24 0
255 0 86 68 32 0 35.8 0.23800000000000002 25 0
256 12 92 62 7 258 27.6 0.9259999999999999 44 1
257 1 113 64 35 0 33.6 0.5429999999999999 21 1
258 3 111 56 39 0 30.1 0.557 30 0
259 2 114 68 22 0 28.7 0.092 25 0
260 1 193 50 16 375 25.9 0.655 24 0
261 11 155 76 28 150 33.3 1.3530000000000002 51 1
262 3 191 68 15 130 30.9 0.299 34 0
263 3 141 0 0 0 30.0 0.7609999999999999 27 1
264 4 95 70 32 0 32.1 0.612 24 0
265 3 142 80 15 0 32.4 0.2 63 0
266 4 123 62 0 0 32.0 0.226 35 1
267 5 96 74 18 67 33.6 0.997 43 0
268 0 138 0 0 0 36.3 0.9329999999999999 25 1
269 2 128 64 42 0 40.0 1.101 24 0
270 0 102 52 0 0 25.1 0.078 21 0
271 2 146 0 0 0 27.5 0.24 28 1
272 10 101 86 37 0 45.6 1.136 38 1
273 2 108 62 32 56 25.2 0.128 21 0
274 3 122 78 0 0 23.0 0.254 40 0
275 1 71 78 50 45 33.2 0.42200000000000004 21 0
276 13 106 70 0 0 34.2 0.251 52 0
277 2 100 70 52 57 40.5 0.677 25 0
278 7 106 60 24 0 26.5 0.29600000000000004 29 1
279 0 104 64 23 116 27.8 0.45399999999999996 23 0
280 5 114 74 0 0 24.9 0.7440000000000001 57 0
281 2 108 62 10 278 25.3 0.8809999999999999 22 0
282 0 146 70 0 0 37.9 0.33399999999999996 28 1
283 10 129 76 28 122 35.9 0.28 39 0
284 7 133 88 15 155 32.4 0.262 37 0
285 7 161 86 0 0 30.4 0.165 47 1
286 2 108 80 0 0 27.0 0.259 52 1
287 7 136 74 26 135 26.0 0.647 51 0
288 5 155 84 44 545 38.7 0.619 34 0
289 1 119 86 39 220 45.6 0.8079999999999999 29 1
290 4 96 56 17 49 20.8 0.34 26 0
291 5 108 72 43 75 36.1 0.263 33 0
292 0 78 88 29 40 36.9 0.434 21 0
293 0 107 62 30 74 36.6 0.757 25 1
294 2 128 78 37 182 43.3 1.224 31 1
295 1 128 48 45 194 40.5 0.613 24 1
296 0 161 50 0 0 21.9 0.254 65 0
297 6 151 62 31 120 35.5 0.6920000000000001 28 0
298 2 146 70 38 360 28.0 0.337 29 1
299 0 126 84 29 215 30.7 0.52 24 0
300 14 100 78 25 184 36.6 0.41200000000000003 46 1
301 8 112 72 0 0 23.6 0.84 58 0
302 0 167 0 0 0 32.3 0.8390000000000001 30 1
303 2 144 58 33 135 31.6 0.42200000000000004 25 1
304 5 77 82 41 42 35.8 0.156 35 0
305 5 115 98 0 0 52.9 0.209 28 1
306 3 150 76 0 0 21.0 0.207 37 0
307 2 120 76 37 105 39.7 0.215 29 0
308 10 161 68 23 132 25.5 0.326 47 1
309 0 137 68 14 148 24.8 0.14300000000000002 21 0
310 0 128 68 19 180 30.5 1.391 25 1
311 2 124 68 28 205 32.9 0.875 30 1
312 6 80 66 30 0 26.2 0.313 41 0
313 0 106 70 37 148 39.4 0.605 22 0
314 2 155 74 17 96 26.6 0.433 27 1
315 3 113 50 10 85 29.5 0.626 25 0
316 7 109 80 31 0 35.9 1.127 43 1
317 2 112 68 22 94 34.1 0.315 26 0
318 3 99 80 11 64 19.3 0.284 30 0
319 3 182 74 0 0 30.5 0.345 29 1
320 3 115 66 39 140 38.1 0.15 28 0
321 6 194 78 0 0 23.5 0.129 59 1
322 4 129 60 12 231 27.5 0.527 31 0
323 3 112 74 30 0 31.6 0.19699999999999998 25 1
324 0 124 70 20 0 27.4 0.254 36 1
325 13 152 90 33 29 26.8 0.731 43 1
326 2 112 75 32 0 35.7 0.14800000000000002 21 0
327 1 157 72 21 168 25.6 0.12300000000000001 24 0
328 1 122 64 32 156 35.1 0.6920000000000001 30 1
329 10 179 70 0 0 35.1 0.2 37 0
330 2 102 86 36 120 45.5 0.127 23 1
331 6 105 70 32 68 30.8 0.122 37 0
332 8 118 72 19 0 23.1 1.476 46 0
333 2 87 58 16 52 32.7 0.166 25 0
334 1 180 0 0 0 43.3 0.282 41 1
335 12 106 80 0 0 23.6 0.13699999999999998 44 0
336 1 95 60 18 58 23.9 0.26 22 0
337 0 165 76 43 255 47.9 0.259 26 0
338 0 117 0 0 0 33.8 0.932 44 0
339 5 115 76 0 0 31.2 0.34299999999999997 44 1
340 9 152 78 34 171 34.2 0.893 33 1
341 7 178 84 0 0 39.9 0.331 41 1
342 1 130 70 13 105 25.9 0.47200000000000003 22 0
343 1 95 74 21 73 25.9 0.6729999999999999 36 0
344 1 0 68 35 0 32.0 0.389 22 0
345 5 122 86 0 0 34.7 0.29 33 0
346 8 95 72 0 0 36.8 0.485 57 0
347 8 126 88 36 108 38.5 0.349 49 0
348 1 139 46 19 83 28.7 0.654 22 0
349 3 116 0 0 0 23.5 0.187 23 0
350 3 99 62 19 74 21.8 0.27899999999999997 26 0
351 5 0 80 32 0 41.0 0.34600000000000003 37 1
352 4 92 80 0 0 42.2 0.237 29 0
353 4 137 84 0 0 31.2 0.252 30 0
354 3 61 82 28 0 34.4 0.243 46 0
355 1 90 62 12 43 27.2 0.58 24 0
356 3 90 78 0 0 42.7 0.5589999999999999 21 0
357 9 165 88 0 0 30.4 0.302 49 1
358 1 125 50 40 167 33.3 0.9620000000000001 28 1
359 13 129 0 30 0 39.9 0.569 44 1
360 12 88 74 40 54 35.3 0.37799999999999995 48 0
361 1 196 76 36 249 36.5 0.875 29 1
362 5 189 64 33 325 31.2 0.583 29 1
363 5 158 70 0 0 29.8 0.207 63 0
364 5 103 108 37 0 39.2 0.305 65 0
365 4 146 78 0 0 38.5 0.52 67 1
366 4 147 74 25 293 34.9 0.385 30 0
367 5 99 54 28 83 34.0 0.499 30 0
368 6 124 72 0 0 27.6 0.368 29 1
369 0 101 64 17 0 21.0 0.252 21 0
370 3 81 86 16 66 27.5 0.306 22 0
371 1 133 102 28 140 32.8 0.23399999999999999 45 1
372 3 173 82 48 465 38.4 2.137 25 1
373 0 118 64 23 89 0.0 1.7309999999999999 21 0
374 0 84 64 22 66 35.8 0.545 21 0
375 2 105 58 40 94 34.9 0.225 25 0
376 2 122 52 43 158 36.2 0.816 28 0
377 12 140 82 43 325 39.2 0.528 58 1
378 0 98 82 15 84 25.2 0.299 22 0
379 1 87 60 37 75 37.2 0.509 22 0
380 4 156 75 0 0 48.3 0.23800000000000002 32 1
381 0 93 100 39 72 43.4 1.021 35 0
382 1 107 72 30 82 30.8 0.821 24 0
383 0 105 68 22 0 20.0 0.23600000000000002 22 0
384 1 109 60 8 182 25.4 0.9470000000000001 21 0
385 1 90 62 18 59 25.1 1.268 25 0
386 1 125 70 24 110 24.3 0.221 25 0
387 1 119 54 13 50 22.3 0.205 24 0
388 5 116 74 29 0 32.3 0.66 35 1
389 8 105 100 36 0 43.3 0.239 45 1
390 5 144 82 26 285 32.0 0.452 58 1
391 3 100 68 23 81 31.6 0.9490000000000001 28 0
392 1 100 66 29 196 32.0 0.444 42 0
393 5 166 76 0 0 45.7 0.34 27 1
394 1 131 64 14 415 23.7 0.389 21 0
395 4 116 72 12 87 22.1 0.46299999999999997 37 0
396 4 158 78 0 0 32.9 0.8029999999999999 31 1
397 2 127 58 24 275 27.7 1.6 25 0
398 3 96 56 34 115 24.7 0.9440000000000001 39 0
399 0 131 66 40 0 34.3 0.196 22 1
400 3 82 70 0 0 21.1 0.389 25 0
401 3 193 70 31 0 34.9 0.24100000000000002 25 1
402 4 95 64 0 0 32.0 0.161 31 1
403 6 137 61 0 0 24.2 0.151 55 0
404 5 136 84 41 88 35.0 0.28600000000000003 35 1
405 9 72 78 25 0 31.6 0.28 38 0
406 5 168 64 0 0 32.9 0.135 41 1
407 2 123 48 32 165 42.1 0.52 26 0
408 4 115 72 0 0 28.9 0.376 46 1
409 0 101 62 0 0 21.9 0.336 25 0
410 8 197 74 0 0 25.9 1.1909999999999998 39 1
411 1 172 68 49 579 42.4 0.7020000000000001 28 1
412 6 102 90 39 0 35.7 0.674 28 0
413 1 112 72 30 176 34.4 0.528 25 0
414 1 143 84 23 310 42.4 1.0759999999999998 22 0
415 1 143 74 22 61 26.2 0.256 21 0
416 0 138 60 35 167 34.6 0.534 21 1
417 3 173 84 33 474 35.7 0.258 22 1
418 1 97 68 21 0 27.2 1.095 22 0
419 4 144 82 32 0 38.5 0.5539999999999999 37 1
420 1 83 68 0 0 18.2 0.624 27 0
421 3 129 64 29 115 26.4 0.21899999999999997 28 1
422 1 119 88 41 170 45.3 0.507 26 0
423 2 94 68 18 76 26.0 0.561 21 0
424 0 102 64 46 78 40.6 0.496 21 0
425 2 115 64 22 0 30.8 0.42100000000000004 21 0
426 8 151 78 32 210 42.9 0.516 36 1
427 4 184 78 39 277 37.0 0.264 31 1
428 0 94 0 0 0 0.0 0.256 25 0
429 1 181 64 30 180 34.1 0.32799999999999996 38 1
430 0 135 94 46 145 40.6 0.284 26 0
431 1 95 82 25 180 35.0 0.233 43 1
432 2 99 0 0 0 22.2 0.10800000000000001 23 0
433 3 89 74 16 85 30.4 0.551 38 0
434 1 80 74 11 60 30.0 0.527 22 0
435 2 139 75 0 0 25.6 0.16699999999999998 29 0
436 1 90 68 8 0 24.5 1.138 36 0
437 0 141 0 0 0 42.4 0.205 29 1
438 12 140 85 33 0 37.4 0.244 41 0
439 5 147 75 0 0 29.9 0.434 28 0
440 1 97 70 15 0 18.2 0.147 21 0
441 6 107 88 0 0 36.8 0.727 31 0
442 0 189 104 25 0 34.3 0.435 41 1
443 2 83 66 23 50 32.2 0.49700000000000005 22 0
444 4 117 64 27 120 33.2 0.23 24 0
445 8 108 70 0 0 30.5 0.955 33 1
446 4 117 62 12 0 29.7 0.38 30 1
447 0 180 78 63 14 59.4 2.42 25 1
448 1 100 72 12 70 25.3 0.6579999999999999 28 0
449 0 95 80 45 92 36.5 0.33 26 0
450 0 104 64 37 64 33.6 0.51 22 1
451 0 120 74 18 63 30.5 0.285 26 0
452 1 82 64 13 95 21.2 0.415 23 0
453 2 134 70 0 0 28.9 0.542 23 1
454 0 91 68 32 210 39.9 0.381 25 0
455 2 119 0 0 0 19.6 0.8320000000000001 72 0
456 2 100 54 28 105 37.8 0.498 24 0
457 14 175 62 30 0 33.6 0.212 38 1
458 1 135 54 0 0 26.7 0.687 62 0
459 5 86 68 28 71 30.2 0.364 24 0
460 10 148 84 48 237 37.6 1.001 51 1
461 9 134 74 33 60 25.9 0.46 81 0
462 9 120 72 22 56 20.8 0.733 48 0
463 1 71 62 0 0 21.8 0.41600000000000004 26 0
464 8 74 70 40 49 35.3 0.705 39 0
465 5 88 78 30 0 27.6 0.258 37 0
466 10 115 98 0 0 24.0 1.022 34 0
467 0 124 56 13 105 21.8 0.452 21 0
468 0 74 52 10 36 27.8 0.26899999999999996 22 0
469 0 97 64 36 100 36.8 0.6 25 0
470 8 120 0 0 0 30.0 0.183 38 1
471 6 154 78 41 140 46.1 0.5710000000000001 27 0
472 1 144 82 40 0 41.3 0.607 28 0
473 0 137 70 38 0 33.2 0.17 22 0
474 0 119 66 27 0 38.8 0.259 22 0
475 7 136 90 0 0 29.9 0.21 50 0
476 4 114 64 0 0 28.9 0.126 24 0
477 0 137 84 27 0 27.3 0.231 59 0
478 2 105 80 45 191 33.7 0.711 29 1
479 7 114 76 17 110 23.8 0.466 31 0
480 8 126 74 38 75 25.9 0.162 39 0
481 4 132 86 31 0 28.0 0.419 63 0
482 3 158 70 30 328 35.5 0.344 35 1
483 0 123 88 37 0 35.2 0.19699999999999998 29 0
484 4 85 58 22 49 27.8 0.306 28 0
485 0 84 82 31 125 38.2 0.233 23 0
486 0 145 0 0 0 44.2 0.63 31 1
487 0 135 68 42 250 42.3 0.365 24 1
488 1 139 62 41 480 40.7 0.536 21 0
489 0 173 78 32 265 46.5 1.159 58 0
490 4 99 72 17 0 25.6 0.294 28 0
491 8 194 80 0 0 26.1 0.551 67 0
492 2 83 65 28 66 36.8 0.629 24 0
493 2 89 90 30 0 33.5 0.292 42 0
494 4 99 68 38 0 32.8 0.145 33 0
495 4 125 70 18 122 28.9 1.1440000000000001 45 1
496 3 80 0 0 0 0.0 0.174 22 0
497 6 166 74 0 0 26.6 0.304 66 0
498 5 110 68 0 0 26.0 0.292 30 0
499 2 81 72 15 76 30.1 0.547 25 0
500 7 195 70 33 145 25.1 0.163 55 1
501 6 154 74 32 193 29.3 0.8390000000000001 39 0
502 2 117 90 19 71 25.2 0.313 21 0
503 3 84 72 32 0 37.2 0.267 28 0
504 6 0 68 41 0 39.0 0.727 41 1
505 7 94 64 25 79 33.3 0.738 41 0
506 3 96 78 39 0 37.3 0.23800000000000002 40 0
507 10 75 82 0 0 33.3 0.263 38 0
508 0 180 90 26 90 36.5 0.314 35 1
509 1 130 60 23 170 28.6 0.6920000000000001 21 0
510 2 84 50 23 76 30.4 0.968 21 0
511 8 120 78 0 0 25.0 0.409 64 0
512 12 84 72 31 0 29.7 0.297 46 1
513 0 139 62 17 210 22.1 0.207 21 0
514 9 91 68 0 0 24.2 0.2 58 0
515 2 91 62 0 0 27.3 0.525 22 0
516 3 99 54 19 86 25.6 0.154 24 0
517 3 163 70 18 105 31.6 0.268 28 1
518 9 145 88 34 165 30.3 0.7709999999999999 53 1
519 7 125 86 0 0 37.6 0.304 51 0
520 13 76 60 0 0 32.8 0.18 41 0
521 6 129 90 7 326 19.6 0.5820000000000001 60 0
522 2 68 70 32 66 25.0 0.187 25 0
523 3 124 80 33 130 33.2 0.305 26 0
524 6 114 0 0 0 0.0 0.18899999999999997 26 0
525 9 130 70 0 0 34.2 0.652 45 1
526 3 125 58 0 0 31.6 0.151 24 0
527 3 87 60 18 0 21.8 0.444 21 0
528 1 97 64 19 82 18.2 0.299 21 0
529 3 116 74 15 105 26.3 0.107 24 0
530 0 117 66 31 188 30.8 0.493 22 0
531 0 111 65 0 0 24.6 0.66 31 0
532 2 122 60 18 106 29.8 0.7170000000000001 22 0
533 0 107 76 0 0 45.3 0.6859999999999999 24 0
534 1 86 66 52 65 41.3 0.917 29 0
535 6 91 0 0 0 29.8 0.501 31 0
536 1 77 56 30 56 33.3 1.251 24 0
537 4 132 0 0 0 32.9 0.302 23 1
538 0 105 90 0 0 29.6 0.19699999999999998 46 0
539 0 57 60 0 0 21.7 0.735 67 0
540 0 127 80 37 210 36.3 0.804 23 0
541 3 129 92 49 155 36.4 0.968 32 1
542 8 100 74 40 215 39.4 0.6609999999999999 43 1
543 3 128 72 25 190 32.4 0.5489999999999999 27 1
544 10 90 85 32 0 34.9 0.825 56 1
545 4 84 90 23 56 39.5 0.159 25 0
546 1 88 78 29 76 32.0 0.365 29 0
547 8 186 90 35 225 34.5 0.423 37 1
548 5 187 76 27 207 43.6 1.034 53 1
549 4 131 68 21 166 33.1 0.16 28 0
550 1 164 82 43 67 32.8 0.341 50 0
551 4 189 110 31 0 28.5 0.68 37 0
552 1 116 70 28 0 27.4 0.204 21 0
553 3 84 68 30 106 31.9 0.591 25 0
554 6 114 88 0 0 27.8 0.247 66 0
555 1 88 62 24 44 29.9 0.42200000000000004 23 0
556 1 84 64 23 115 36.9 0.47100000000000003 28 0
557 7 124 70 33 215 25.5 0.161 37 0
558 1 97 70 40 0 38.1 0.218 30 0
559 8 110 76 0 0 27.8 0.237 58 0
560 11 103 68 40 0 46.2 0.126 42 0
561 11 85 74 0 0 30.1 0.3 35 0
562 6 125 76 0 0 33.8 0.121 54 1
563 0 198 66 32 274 41.3 0.502 28 1
564 1 87 68 34 77 37.6 0.401 24 0
565 6 99 60 19 54 26.9 0.49700000000000005 32 0
566 0 91 80 0 0 32.4 0.601 27 0
567 2 95 54 14 88 26.1 0.748 22 0
568 1 99 72 30 18 38.6 0.41200000000000003 21 0
569 6 92 62 32 126 32.0 0.085 46 0
570 4 154 72 29 126 31.3 0.33799999999999997 37 0
571 0 121 66 30 165 34.3 0.203 33 1
572 3 78 70 0 0 32.5 0.27 39 0
573 2 130 96 0 0 22.6 0.268 21 0
574 3 111 58 31 44 29.5 0.43 22 0
575 2 98 60 17 120 34.7 0.198 22 0
576 1 143 86 30 330 30.1 0.892 23 0
577 1 119 44 47 63 35.5 0.28 25 0
578 6 108 44 20 130 24.0 0.813 35 0
579 2 118 80 0 0 42.9 0.693 21 1
580 10 133 68 0 0 27.0 0.245 36 0
581 2 197 70 99 0 34.7 0.575 62 1
582 0 151 90 46 0 42.1 0.371 21 1
583 6 109 60 27 0 25.0 0.20600000000000002 27 0
584 12 121 78 17 0 26.5 0.259 62 0
585 8 100 76 0 0 38.7 0.19 42 0
586 8 124 76 24 600 28.7 0.687 52 1
587 1 93 56 11 0 22.5 0.41700000000000004 22 0
588 8 143 66 0 0 34.9 0.129 41 1
589 6 103 66 0 0 24.3 0.249 29 0
590 3 176 86 27 156 33.3 1.1540000000000001 52 1
591 0 73 0 0 0 21.1 0.342 25 0
592 11 111 84 40 0 46.8 0.925 45 1
593 2 112 78 50 140 39.4 0.175 24 0
594 3 132 80 0 0 34.4 0.402 44 1
595 2 82 52 22 115 28.5 1.699 25 0
596 6 123 72 45 230 33.6 0.733 34 0
597 0 188 82 14 185 32.0 0.682 22 1
598 0 67 76 0 0 45.3 0.19399999999999998 46 0
599 1 89 24 19 25 27.8 0.5589999999999999 21 0
600 1 173 74 0 0 36.8 0.08800000000000001 38 1
601 1 109 38 18 120 23.1 0.40700000000000003 26 0
602 1 108 88 19 0 27.1 0.4 24 0
603 6 96 0 0 0 23.7 0.19 28 0
604 1 124 74 36 0 27.8 0.1 30 0
605 7 150 78 29 126 35.2 0.6920000000000001 54 1
606 4 183 0 0 0 28.4 0.212 36 1
607 1 124 60 32 0 35.8 0.514 21 0
608 1 181 78 42 293 40.0 1.258 22 1
609 1 92 62 25 41 19.5 0.48200000000000004 25 0
610 0 152 82 39 272 41.5 0.27 27 0
611 1 111 62 13 182 24.0 0.138 23 0
612 3 106 54 21 158 30.9 0.292 24 0
613 3 174 58 22 194 32.9 0.593 36 1
614 7 168 88 42 321 38.2 0.787 40 1
615 6 105 80 28 0 32.5 0.878 26 0
616 11 138 74 26 144 36.1 0.557 50 1
617 3 106 72 0 0 25.8 0.207 27 0
618 6 117 96 0 0 28.7 0.157 30 0
619 2 68 62 13 15 20.1 0.257 23 0
620 9 112 82 24 0 28.2 1.2819999999999998 50 1
621 0 119 0 0 0 32.4 0.141 24 1
622 2 112 86 42 160 38.4 0.24600000000000002 28 0
623 2 92 76 20 0 24.2 1.6980000000000002 28 0
624 6 183 94 0 0 40.8 1.4609999999999999 45 0
625 0 94 70 27 115 43.5 0.34700000000000003 21 0
626 2 108 64 0 0 30.8 0.158 21 0
627 4 90 88 47 54 37.7 0.36200000000000004 29 0
628 0 125 68 0 0 24.7 0.20600000000000002 21 0
629 0 132 78 0 0 32.4 0.39299999999999996 21 0
630 5 128 80 0 0 34.6 0.14400000000000002 45 0
631 4 94 65 22 0 24.7 0.14800000000000002 21 0
632 7 114 64 0 0 27.4 0.732 34 1
633 0 102 78 40 90 34.5 0.23800000000000002 24 0
634 2 111 60 0 0 26.2 0.34299999999999997 23 0
635 1 128 82 17 183 27.5 0.115 22 0
636 10 92 62 0 0 25.9 0.16699999999999998 31 0
637 13 104 72 0 0 31.2 0.465 38 1
638 5 104 74 0 0 28.8 0.153 48 0
639 2 94 76 18 66 31.6 0.649 23 0
640 7 97 76 32 91 40.9 0.871 32 1
641 1 100 74 12 46 19.5 0.149 28 0
642 0 102 86 17 105 29.3 0.695 27 0
643 4 128 70 0 0 34.3 0.303 24 0
644 6 147 80 0 0 29.5 0.17800000000000002 50 1
645 4 90 0 0 0 28.0 0.61 31 0
646 3 103 72 30 152 27.6 0.73 27 0
647 2 157 74 35 440 39.4 0.134 30 0
648 1 167 74 17 144 23.4 0.447 33 1
649 0 179 50 36 159 37.8 0.455 22 1
650 11 136 84 35 130 28.3 0.26 42 1
651 0 107 60 25 0 26.4 0.133 23 0
652 1 91 54 25 100 25.2 0.23399999999999999 23 0
653 1 117 60 23 106 33.8 0.466 27 0
654 5 123 74 40 77 34.1 0.26899999999999996 28 0
655 2 120 54 0 0 26.8 0.455 27 0
656 1 106 70 28 135 34.2 0.142 22 0
657 2 155 52 27 540 38.7 0.24 25 1
658 2 101 58 35 90 21.8 0.155 22 0
659 1 120 80 48 200 38.9 1.162 41 0
660 11 127 106 0 0 39.0 0.19 51 0
661 3 80 82 31 70 34.2 1.2919999999999998 27 1
662 10 162 84 0 0 27.7 0.182 54 0
663 1 199 76 43 0 42.9 1.3940000000000001 22 1
664 8 167 106 46 231 37.6 0.165 43 1
665 9 145 80 46 130 37.9 0.637 40 1
666 6 115 60 39 0 33.7 0.245 40 1
667 1 112 80 45 132 34.8 0.217 24 0
668 4 145 82 18 0 32.5 0.235 70 1
669 10 111 70 27 0 27.5 0.141 40 1
670 6 98 58 33 190 34.0 0.43 43 0
671 9 154 78 30 100 30.9 0.16399999999999998 45 0
672 6 165 68 26 168 33.6 0.631 49 0
673 1 99 58 10 0 25.4 0.551 21 0
674 10 68 106 23 49 35.5 0.285 47 0
675 3 123 100 35 240 57.3 0.88 22 0
676 8 91 82 0 0 35.6 0.5870000000000001 68 0
677 6 195 70 0 0 30.9 0.32799999999999996 31 1
678 9 156 86 0 0 24.8 0.23 53 1
679 0 93 60 0 0 35.3 0.263 25 0
680 3 121 52 0 0 36.0 0.127 25 1
681 2 101 58 17 265 24.2 0.614 23 0
682 2 56 56 28 45 24.2 0.332 22 0
683 0 162 76 36 0 49.6 0.364 26 1
684 0 95 64 39 105 44.6 0.366 22 0
685 4 125 80 0 0 32.3 0.536 27 1
686 5 136 82 0 0 0.0 0.64 69 0
687 2 129 74 26 205 33.2 0.591 25 0
688 3 130 64 0 0 23.1 0.314 22 0
689 1 107 50 19 0 28.3 0.18100000000000002 29 0
690 1 140 74 26 180 24.1 0.828 23 0
691 1 144 82 46 180 46.1 0.335 46 1
692 8 107 80 0 0 24.6 0.856 34 0
693 13 158 114 0 0 42.3 0.257 44 1
694 2 121 70 32 95 39.1 0.8859999999999999 23 0
695 7 129 68 49 125 38.5 0.439 43 1
696 2 90 60 0 0 23.5 0.191 25 0
697 7 142 90 24 480 30.4 0.128 43 1
698 3 169 74 19 125 29.9 0.268 31 1
699 0 99 0 0 0 25.0 0.253 22 0
700 4 127 88 11 155 34.5 0.598 28 0
701 4 118 70 0 0 44.5 0.904 26 0
702 2 122 76 27 200 35.9 0.483 26 0
703 6 125 78 31 0 27.6 0.565 49 1
704 1 168 88 29 0 35.0 0.905 52 1
705 2 129 0 0 0 38.5 0.304 41 0
706 4 110 76 20 100 28.4 0.11800000000000001 27 0
707 6 80 80 36 0 39.8 0.177 28 0
708 10 115 0 0 0 0.0 0.261 30 1
709 2 127 46 21 335 34.4 0.17600000000000002 22 0
710 9 164 78 0 0 32.8 0.14800000000000002 45 1
711 2 93 64 32 160 38.0 0.674 23 1
712 3 158 64 13 387 31.2 0.295 24 0
713 5 126 78 27 22 29.6 0.439 40 0
714 10 129 62 36 0 41.2 0.441 38 1
715 0 134 58 20 291 26.4 0.35200000000000004 21 0
716 3 102 74 0 0 29.5 0.121 32 0
717 7 187 50 33 392 33.9 0.826 34 1
718 3 173 78 39 185 33.8 0.97 31 1
719 10 94 72 18 0 23.1 0.595 56 0
720 1 108 60 46 178 35.5 0.415 24 0
721 5 97 76 27 0 35.6 0.37799999999999995 52 1
722 4 83 86 19 0 29.3 0.317 34 0
723 1 114 66 36 200 38.1 0.289 21 0
724 1 149 68 29 127 29.3 0.349 42 1
725 5 117 86 30 105 39.1 0.251 42 0
726 1 111 94 0 0 32.8 0.265 45 0
727 4 112 78 40 0 39.4 0.23600000000000002 38 0
728 1 116 78 29 180 36.1 0.496 25 0
729 0 141 84 26 0 32.4 0.433 22 0
730 2 175 88 0 0 22.9 0.326 22 0
731 2 92 52 0 0 30.1 0.141 22 0
732 3 130 78 23 79 28.4 0.32299999999999995 34 1
733 8 120 86 0 0 28.4 0.259 22 1
734 2 174 88 37 120 44.5 0.6459999999999999 24 1
735 2 106 56 27 165 29.0 0.426 22 0
736 2 105 75 0 0 23.3 0.56 53 0
737 4 95 60 32 0 35.4 0.284 28 0
738 0 126 86 27 120 27.4 0.515 21 0
739 8 65 72 23 0 32.0 0.6 42 0
740 2 99 60 17 160 36.6 0.45299999999999996 21 0
741 1 102 74 0 0 39.5 0.293 42 1
742 11 120 80 37 150 42.3 0.785 48 1
743 3 102 44 20 94 30.8 0.4 26 0
744 1 109 58 18 116 28.5 0.21899999999999997 22 0
745 9 140 94 0 0 32.7 0.7340000000000001 45 1
746 13 153 88 37 140 40.6 1.1740000000000002 39 0
747 12 100 84 33 105 30.0 0.488 46 0
748 1 147 94 41 0 49.3 0.358 27 1
749 1 81 74 41 57 46.3 1.0959999999999999 32 0
750 3 187 70 22 200 36.4 0.408 36 1
751 6 162 62 0 0 24.3 0.17800000000000002 50 1
752 4 136 70 0 0 31.2 1.182 22 1
753 1 121 78 39 74 39.0 0.261 28 0
754 3 108 62 24 0 26.0 0.223 25 0
755 0 181 88 44 510 43.3 0.222 26 1
756 8 154 78 32 0 32.4 0.44299999999999995 45 1
757 1 128 88 39 110 36.5 1.057 37 1
758 7 137 90 41 0 32.0 0.391 39 0
759 0 123 72 0 0 36.3 0.258 52 1
760 1 106 76 0 0 37.5 0.19699999999999998 26 0
761 6 190 92 0 0 35.5 0.278 66 1
762 2 88 58 26 16 28.4 0.7659999999999999 22 0
763 9 170 74 31 0 44.0 0.40299999999999997 43 1
764 9 89 62 0 0 22.5 0.142 33 0
765 10 101 76 48 180 32.9 0.171 63 0
766 2 122 70 27 0 36.8 0.34 27 0
767 5 121 72 23 112 26.2 0.245 30 0
768 1 126 60 0 0 30.1 0.349 47 1
769 1 93 70 31 0 30.4 0.315 23 0

Просмотреть файл

@ -1,88 +0,0 @@
PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked
904,1,1,"Snyder, Mrs. John Pillsbury (Nelle Stevenson)",female,23.0,1,0,21228,82.2667,B45,S
906,1,1,"Chaffee, Mrs. Herbert Fuller (Carrie Constance Toogood)",female,47.0,1,0,W.E.P. 5734,61.175,E31,S
916,1,1,"Ryerson, Mrs. Arthur Larned (Emily Maria Borie)",female,48.0,1,3,PC 17608,262.375,B57 B59 B63 B66,C
918,1,1,"Ostby, Miss. Helene Ragnhild",female,22.0,0,1,113509,61.9792,B36,C
920,0,1,"Brady, Mr. John Bertram",male,41.0,0,0,113054,30.5,A21,S
926,0,1,"Mock, Mr. Philipp Edmund",male,30.0,1,0,13236,57.75,C78,C
936,1,1,"Kimball, Mrs. Edwin Nelson Jr (Gertrude Parsons)",female,45.0,1,0,11753,52.5542,D19,S
938,0,1,"Chevre, Mr. Paul Romaine",male,45.0,0,0,PC 17594,29.7,A9,C
940,1,1,"Bucknell, Mrs. William Robert (Emma Eliza Ward)",female,60.0,0,0,11813,76.2917,D15,C
942,0,1,"Smith, Mr. Lucien Philip",male,24.0,1,0,13695,60.0,C31,S
945,1,1,"Fortune, Miss. Ethel Flora",female,28.0,3,2,19950,263.0,C23 C25 C27,S
949,0,3,"Abelseth, Mr. Olaus Jorgensen",male,25.0,0,0,348122,7.65,F G63,S
951,1,1,"Chaudanson, Miss. Victorine",female,36.0,0,0,PC 17608,262.375,B61,C
956,0,1,"Ryerson, Master. John Borie",male,13.0,2,2,PC 17608,262.375,B57 B59 B63 B66,C
960,0,1,"Tucker, Mr. Gilbert Milligan Jr",male,31.0,0,0,2543,28.5375,C53,C
961,1,1,"Fortune, Mrs. Mark (Mary McDougald)",female,60.0,1,4,19950,263.0,C23 C25 C27,S
965,0,1,"Ovies y Rodriguez, Mr. Servando",male,28.5,0,0,PC 17562,27.7208,D43,C
966,1,1,"Geiger, Miss. Amalie",female,35.0,0,0,113503,211.5,C130,C
967,0,1,"Keeping, Mr. Edwin",male,32.5,0,0,113503,211.5,C132,C
969,1,1,"Cornell, Mrs. Robert Clifford (Malvina Helen Lamson)",female,55.0,2,0,11770,25.7,C101,S
973,0,1,"Straus, Mr. Isidor",male,67.0,1,0,PC 17483,221.7792,C55 C57,S
984,1,1,"Davidson, Mrs. Thornton (Orian Hays)",female,27.0,1,2,F.C. 12750,52.0,B71,S
988,1,1,"Cavendish, Mrs. Tyrell William (Julia Florence Siegel)",female,76.0,1,0,19877,78.85,C46,S
992,1,1,"Stengel, Mrs. Charles Emil Henry (Annie May Morris)",female,43.0,1,0,11778,55.4417,C116,C
1001,0,2,"Swane, Mr. George",male,18.5,0,0,248734,13.0,F,S
1004,1,1,"Evans, Miss. Edith Corse",female,36.0,0,0,PC 17531,31.6792,A29,C
1006,1,1,"Straus, Mrs. Isidor (Rosalie Ida Blun)",female,63.0,1,0,PC 17483,221.7792,C55 C57,S
1009,1,3,"Sandstrom, Miss. Beatrice Irene",female,1.0,1,1,PP 9549,16.7,G6,S
1010,0,1,"Beattie, Mr. Thomson",male,36.0,0,0,13050,75.2417,C6,C
1014,1,1,"Schabert, Mrs. Paul (Emma Mock)",female,35.0,1,0,13236,57.75,C28,C
1023,0,1,"Gracie, Col. Archibald IV",male,53.0,0,0,113780,28.5,C51,C
1034,0,1,"Ryerson, Mr. Arthur Larned",male,61.0,1,3,PC 17608,262.375,B57 B59 B63 B66,C
1042,1,1,"Earnshaw, Mrs. Boulton (Olive Potter)",female,23.0,0,1,11767,83.1583,C54,C
1048,1,1,"Bird, Miss. Ellen",female,29.0,0,0,PC 17483,221.7792,C97,S
1050,0,1,"Borebank, Mr. John James",male,42.0,0,0,110489,26.55,D22,S
1058,0,1,"Brandeis, Mr. Emil",male,48.0,0,0,PC 17591,50.4958,B10,C
1069,0,1,"Stengel, Mr. Charles Emil Henry",male,54.0,1,0,11778,55.4417,C116,C
1070,1,2,"Becker, Mrs. Allen Oliver (Nellie E Baumgardner)",female,36.0,0,3,230136,39.0,F4,S
1071,1,1,"Compton, Mrs. Alexander Taylor (Mary Eliza Ingersoll)",female,64.0,0,2,PC 17756,83.1583,E45,C
1073,0,1,"Compton, Mr. Alexander Taylor Jr",male,37.0,1,1,PC 17756,83.1583,E52,C
1074,1,1,"Marvin, Mrs. Daniel Warner (Mary Graham Carmichael Farquarson)",female,18.0,1,0,113773,53.1,D30,S
1076,1,1,"Douglas, Mrs. Frederick Charles (Mary Helene Baxter)",female,27.0,1,1,PC 17558,247.5208,B58 B60,C
1088,0,1,"Spedden, Master. Robert Douglas",male,6.0,0,2,16966,134.5,E34,C
1094,0,1,"Astor, Col. John Jacob",male,47.0,1,0,PC 17757,227.525,C62 C64,C
1100,1,1,"Rosenbaum, Miss. Edith Louise",female,33.0,0,0,PC 17613,27.7208,A11,C
1107,0,1,"Head, Mr. Christopher",male,42.0,0,0,113038,42.5,B11,S
1110,1,1,"Widener, Mrs. George Dunton (Eleanor Elkins)",female,50.0,1,1,113503,211.5,C80,C
1114,1,2,"Cook, Mrs. (Selena Rogers)",female,22.0,0,0,W./C. 14266,10.5,F33,S
1126,0,1,"Cumings, Mr. John Bradley",male,39.0,1,0,PC 17599,71.2833,C85,C
1128,0,1,"Warren, Mr. Frank Manley",male,64.0,1,0,110813,75.25,D37,C
1131,1,1,"Douglas, Mrs. Walter Donald (Mahala Dutton)",female,48.0,1,0,PC 17761,106.425,C86,C
1134,0,1,"Spedden, Mr. Frederic Oakley",male,45.0,1,1,16966,134.5,E34,C
1137,0,1,"Kenyon, Mr. Frederick R",male,41.0,1,0,17464,51.8625,D21,S
1144,0,1,"Clark, Mr. Walter Miller",male,27.0,1,0,13508,136.7792,C89,C
1162,0,1,"McCaffry, Mr. Thomas Francis",male,46.0,0,0,13050,75.2417,C6,C
1164,1,1,"Clark, Mrs. Walter Miller (Virginia McDowell)",female,26.0,1,0,13508,136.7792,C89,C
1179,0,1,"Snyder, Mr. John Pillsbury",male,24.0,1,0,21228,82.2667,B45,S
1185,0,1,"Dodge, Dr. Washington",male,53.0,1,1,33638,81.8583,A34,S
1197,1,1,"Crosby, Mrs. Edward Gifford (Catherine Elizabeth Halstead)",female,64.0,1,1,112901,26.55,B26,S
1198,0,1,"Allison, Mr. Hudson Joshua Creighton",male,30.0,1,2,113781,151.55,C22 C26,S
1200,0,1,"Hays, Mr. Charles Melville",male,55.0,1,1,12749,93.5,B69,S
1206,1,1,"White, Mrs. John Stuart (Ella Holmes)",female,55.0,0,0,PC 17760,135.6333,C32,C
1208,0,1,"Spencer, Mr. William Augustus",male,57.0,1,0,PC 17569,146.5208,B78,C
1213,0,3,"Krekorian, Mr. Neshan",male,25.0,0,0,2654,7.2292,F E57,C
1214,0,2,"Nesson, Mr. Israel",male,26.0,0,0,244368,13.0,F2,S
1218,1,2,"Becker, Miss. Ruth Elizabeth",female,12.0,2,1,230136,39.0,F4,S
1223,0,1,"Dulles, Mr. William Crothers",male,39.0,0,0,PC 17580,29.7,A18,C
1227,0,1,"Maguire, Mr. John Edward",male,30.0,0,0,110469,26.0,C106,S
1235,1,1,"Cardeza, Mrs. James Warburton Martinez (Charlotte Wardle Drake)",female,58.0,0,1,PC 17755,512.3292,B51 B53 B55,C
1242,1,1,"Greenfield, Mrs. Leo David (Blanche Strouse)",female,45.0,0,1,PC 17759,63.3583,D10 D12,C
1247,0,1,"Julian, Mr. Henry Forbes",male,50.0,0,0,113044,26.0,E60,S
1248,1,1,"Brown, Mrs. John Murray (Caroline Lane Lamson)",female,59.0,2,0,11769,51.4792,C101,S
1256,1,1,"Harder, Mrs. George Achilles (Dorothy Annan)",female,25.0,1,0,11765,55.4417,E50,C
1263,1,1,"Wilson, Miss. Helen Alice",female,31.0,0,0,16966,134.5,E39 E41,C
1264,0,1,"Ismay, Mr. Joseph Bruce",male,49.0,0,0,112058,0.0,B52 B54 B56,S
1266,1,1,"Dodge, Mrs. Washington (Ruth Vidaver)",female,54.0,1,1,33638,81.8583,A34,S
1270,0,1,"Hipkins, Mr. William Edward",male,55.0,0,0,680,50.0,C39,S
1282,0,1,"Payne, Mr. Vivian Ponsonby",male,23.0,0,0,12749,93.5,B24,S
1283,1,1,"Lines, Mrs. Ernest H (Elizabeth Lindsey James)",female,51.0,0,1,PC 17592,39.4,D28,S
1287,1,1,"Smith, Mrs. Lucien Philip (Mary Eloise Hughes)",female,18.0,1,0,13695,60.0,C31,S
1289,1,1,"Frolicher-Stehli, Mrs. Maxmillian (Margaretha Emerentia Stehli)",female,48.0,1,1,13567,79.2,B41,C
1292,1,1,"Bonnell, Miss. Caroline",female,30.0,0,0,36928,164.8667,C7,S
1296,0,1,"Frauenthal, Mr. Isaac Gerald",male,43.0,1,0,17765,27.7208,D40,C
1297,0,2,"Nourney, Mr. Alfred (Baron von Drachstedt"")""",male,20.0,0,0,SC/PARIS 2166,13.8625,D38,C
1299,0,1,"Widener, Mr. George Dunton",male,50.0,1,1,113503,211.5,C80,C
1303,1,1,"Minahan, Mrs. William Edward (Lillian E Thorpe)",female,37.0,1,0,19928,90.0,C78,Q
1306,1,1,"Oliva y Ocana, Dona. Fermina",female,39.0,0,0,PC 17758,108.9,C105,C
1 PassengerId Survived Pclass Name Sex Age SibSp Parch Ticket Fare Cabin Embarked
2 904 1 1 Snyder, Mrs. John Pillsbury (Nelle Stevenson) female 23.0 1 0 21228 82.2667 B45 S
3 906 1 1 Chaffee, Mrs. Herbert Fuller (Carrie Constance Toogood) female 47.0 1 0 W.E.P. 5734 61.175 E31 S
4 916 1 1 Ryerson, Mrs. Arthur Larned (Emily Maria Borie) female 48.0 1 3 PC 17608 262.375 B57 B59 B63 B66 C
5 918 1 1 Ostby, Miss. Helene Ragnhild female 22.0 0 1 113509 61.9792 B36 C
6 920 0 1 Brady, Mr. John Bertram male 41.0 0 0 113054 30.5 A21 S
7 926 0 1 Mock, Mr. Philipp Edmund male 30.0 1 0 13236 57.75 C78 C
8 936 1 1 Kimball, Mrs. Edwin Nelson Jr (Gertrude Parsons) female 45.0 1 0 11753 52.5542 D19 S
9 938 0 1 Chevre, Mr. Paul Romaine male 45.0 0 0 PC 17594 29.7 A9 C
10 940 1 1 Bucknell, Mrs. William Robert (Emma Eliza Ward) female 60.0 0 0 11813 76.2917 D15 C
11 942 0 1 Smith, Mr. Lucien Philip male 24.0 1 0 13695 60.0 C31 S
12 945 1 1 Fortune, Miss. Ethel Flora female 28.0 3 2 19950 263.0 C23 C25 C27 S
13 949 0 3 Abelseth, Mr. Olaus Jorgensen male 25.0 0 0 348122 7.65 F G63 S
14 951 1 1 Chaudanson, Miss. Victorine female 36.0 0 0 PC 17608 262.375 B61 C
15 956 0 1 Ryerson, Master. John Borie male 13.0 2 2 PC 17608 262.375 B57 B59 B63 B66 C
16 960 0 1 Tucker, Mr. Gilbert Milligan Jr male 31.0 0 0 2543 28.5375 C53 C
17 961 1 1 Fortune, Mrs. Mark (Mary McDougald) female 60.0 1 4 19950 263.0 C23 C25 C27 S
18 965 0 1 Ovies y Rodriguez, Mr. Servando male 28.5 0 0 PC 17562 27.7208 D43 C
19 966 1 1 Geiger, Miss. Amalie female 35.0 0 0 113503 211.5 C130 C
20 967 0 1 Keeping, Mr. Edwin male 32.5 0 0 113503 211.5 C132 C
21 969 1 1 Cornell, Mrs. Robert Clifford (Malvina Helen Lamson) female 55.0 2 0 11770 25.7 C101 S
22 973 0 1 Straus, Mr. Isidor male 67.0 1 0 PC 17483 221.7792 C55 C57 S
23 984 1 1 Davidson, Mrs. Thornton (Orian Hays) female 27.0 1 2 F.C. 12750 52.0 B71 S
24 988 1 1 Cavendish, Mrs. Tyrell William (Julia Florence Siegel) female 76.0 1 0 19877 78.85 C46 S
25 992 1 1 Stengel, Mrs. Charles Emil Henry (Annie May Morris) female 43.0 1 0 11778 55.4417 C116 C
26 1001 0 2 Swane, Mr. George male 18.5 0 0 248734 13.0 F S
27 1004 1 1 Evans, Miss. Edith Corse female 36.0 0 0 PC 17531 31.6792 A29 C
28 1006 1 1 Straus, Mrs. Isidor (Rosalie Ida Blun) female 63.0 1 0 PC 17483 221.7792 C55 C57 S
29 1009 1 3 Sandstrom, Miss. Beatrice Irene female 1.0 1 1 PP 9549 16.7 G6 S
30 1010 0 1 Beattie, Mr. Thomson male 36.0 0 0 13050 75.2417 C6 C
31 1014 1 1 Schabert, Mrs. Paul (Emma Mock) female 35.0 1 0 13236 57.75 C28 C
32 1023 0 1 Gracie, Col. Archibald IV male 53.0 0 0 113780 28.5 C51 C
33 1034 0 1 Ryerson, Mr. Arthur Larned male 61.0 1 3 PC 17608 262.375 B57 B59 B63 B66 C
34 1042 1 1 Earnshaw, Mrs. Boulton (Olive Potter) female 23.0 0 1 11767 83.1583 C54 C
35 1048 1 1 Bird, Miss. Ellen female 29.0 0 0 PC 17483 221.7792 C97 S
36 1050 0 1 Borebank, Mr. John James male 42.0 0 0 110489 26.55 D22 S
37 1058 0 1 Brandeis, Mr. Emil male 48.0 0 0 PC 17591 50.4958 B10 C
38 1069 0 1 Stengel, Mr. Charles Emil Henry male 54.0 1 0 11778 55.4417 C116 C
39 1070 1 2 Becker, Mrs. Allen Oliver (Nellie E Baumgardner) female 36.0 0 3 230136 39.0 F4 S
40 1071 1 1 Compton, Mrs. Alexander Taylor (Mary Eliza Ingersoll) female 64.0 0 2 PC 17756 83.1583 E45 C
41 1073 0 1 Compton, Mr. Alexander Taylor Jr male 37.0 1 1 PC 17756 83.1583 E52 C
42 1074 1 1 Marvin, Mrs. Daniel Warner (Mary Graham Carmichael Farquarson) female 18.0 1 0 113773 53.1 D30 S
43 1076 1 1 Douglas, Mrs. Frederick Charles (Mary Helene Baxter) female 27.0 1 1 PC 17558 247.5208 B58 B60 C
44 1088 0 1 Spedden, Master. Robert Douglas male 6.0 0 2 16966 134.5 E34 C
45 1094 0 1 Astor, Col. John Jacob male 47.0 1 0 PC 17757 227.525 C62 C64 C
46 1100 1 1 Rosenbaum, Miss. Edith Louise female 33.0 0 0 PC 17613 27.7208 A11 C
47 1107 0 1 Head, Mr. Christopher male 42.0 0 0 113038 42.5 B11 S
48 1110 1 1 Widener, Mrs. George Dunton (Eleanor Elkins) female 50.0 1 1 113503 211.5 C80 C
49 1114 1 2 Cook, Mrs. (Selena Rogers) female 22.0 0 0 W./C. 14266 10.5 F33 S
50 1126 0 1 Cumings, Mr. John Bradley male 39.0 1 0 PC 17599 71.2833 C85 C
51 1128 0 1 Warren, Mr. Frank Manley male 64.0 1 0 110813 75.25 D37 C
52 1131 1 1 Douglas, Mrs. Walter Donald (Mahala Dutton) female 48.0 1 0 PC 17761 106.425 C86 C
53 1134 0 1 Spedden, Mr. Frederic Oakley male 45.0 1 1 16966 134.5 E34 C
54 1137 0 1 Kenyon, Mr. Frederick R male 41.0 1 0 17464 51.8625 D21 S
55 1144 0 1 Clark, Mr. Walter Miller male 27.0 1 0 13508 136.7792 C89 C
56 1162 0 1 McCaffry, Mr. Thomas Francis male 46.0 0 0 13050 75.2417 C6 C
57 1164 1 1 Clark, Mrs. Walter Miller (Virginia McDowell) female 26.0 1 0 13508 136.7792 C89 C
58 1179 0 1 Snyder, Mr. John Pillsbury male 24.0 1 0 21228 82.2667 B45 S
59 1185 0 1 Dodge, Dr. Washington male 53.0 1 1 33638 81.8583 A34 S
60 1197 1 1 Crosby, Mrs. Edward Gifford (Catherine Elizabeth Halstead) female 64.0 1 1 112901 26.55 B26 S
61 1198 0 1 Allison, Mr. Hudson Joshua Creighton male 30.0 1 2 113781 151.55 C22 C26 S
62 1200 0 1 Hays, Mr. Charles Melville male 55.0 1 1 12749 93.5 B69 S
63 1206 1 1 White, Mrs. John Stuart (Ella Holmes) female 55.0 0 0 PC 17760 135.6333 C32 C
64 1208 0 1 Spencer, Mr. William Augustus male 57.0 1 0 PC 17569 146.5208 B78 C
65 1213 0 3 Krekorian, Mr. Neshan male 25.0 0 0 2654 7.2292 F E57 C
66 1214 0 2 Nesson, Mr. Israel male 26.0 0 0 244368 13.0 F2 S
67 1218 1 2 Becker, Miss. Ruth Elizabeth female 12.0 2 1 230136 39.0 F4 S
68 1223 0 1 Dulles, Mr. William Crothers male 39.0 0 0 PC 17580 29.7 A18 C
69 1227 0 1 Maguire, Mr. John Edward male 30.0 0 0 110469 26.0 C106 S
70 1235 1 1 Cardeza, Mrs. James Warburton Martinez (Charlotte Wardle Drake) female 58.0 0 1 PC 17755 512.3292 B51 B53 B55 C
71 1242 1 1 Greenfield, Mrs. Leo David (Blanche Strouse) female 45.0 0 1 PC 17759 63.3583 D10 D12 C
72 1247 0 1 Julian, Mr. Henry Forbes male 50.0 0 0 113044 26.0 E60 S
73 1248 1 1 Brown, Mrs. John Murray (Caroline Lane Lamson) female 59.0 2 0 11769 51.4792 C101 S
74 1256 1 1 Harder, Mrs. George Achilles (Dorothy Annan) female 25.0 1 0 11765 55.4417 E50 C
75 1263 1 1 Wilson, Miss. Helen Alice female 31.0 0 0 16966 134.5 E39 E41 C
76 1264 0 1 Ismay, Mr. Joseph Bruce male 49.0 0 0 112058 0.0 B52 B54 B56 S
77 1266 1 1 Dodge, Mrs. Washington (Ruth Vidaver) female 54.0 1 1 33638 81.8583 A34 S
78 1270 0 1 Hipkins, Mr. William Edward male 55.0 0 0 680 50.0 C39 S
79 1282 0 1 Payne, Mr. Vivian Ponsonby male 23.0 0 0 12749 93.5 B24 S
80 1283 1 1 Lines, Mrs. Ernest H (Elizabeth Lindsey James) female 51.0 0 1 PC 17592 39.4 D28 S
81 1287 1 1 Smith, Mrs. Lucien Philip (Mary Eloise Hughes) female 18.0 1 0 13695 60.0 C31 S
82 1289 1 1 Frolicher-Stehli, Mrs. Maxmillian (Margaretha Emerentia Stehli) female 48.0 1 1 13567 79.2 B41 C
83 1292 1 1 Bonnell, Miss. Caroline female 30.0 0 0 36928 164.8667 C7 S
84 1296 0 1 Frauenthal, Mr. Isaac Gerald male 43.0 1 0 17765 27.7208 D40 C
85 1297 0 2 Nourney, Mr. Alfred (Baron von Drachstedt")" male 20.0 0 0 SC/PARIS 2166 13.8625 D38 C
86 1299 0 1 Widener, Mr. George Dunton male 50.0 1 1 113503 211.5 C80 C
87 1303 1 1 Minahan, Mrs. William Edward (Lillian E Thorpe) female 37.0 1 0 19928 90.0 C78 Q
88 1306 1 1 Oliva y Ocana, Dona. Fermina female 39.0 0 0 PC 17758 108.9 C105 C