feat: uploaded all files

This commit is contained in:
Rushil Umaretiya 2022-09-19 22:37:08 -04:00
parent ca35acb05f
commit af5c9d4af4
No known key found for this signature in database
GPG Key ID: 4E8FAF9C926AF959
37 changed files with 13308 additions and 8747 deletions

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,816 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "Salary_ML_Project.ipynb",
"provenance": [],
"collapsed_sections": []
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "LXy6hmTOhed1"
},
"source": [
"Setting up Colab"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "PiiiSef_6tle",
"outputId": "7fedf5f1-1b59-441c-d6ff-a32fd9987468"
},
"source": [
"from google.colab import drive\n",
"\n",
"drive.mount('/content/drive', force_remount = True)"
],
"execution_count": 132,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Mounted at /content/drive\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "NIraf5LmHxm-"
},
"source": [
"### Read Dataset"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 307
},
"id": "6YndzY3swBQj",
"outputId": "a342b5b8-cc12-4979-ec15-4a09000d3151"
},
"source": [
"import pandas as pd\n",
"df = pd.read_csv('/content/drive/MyDrive/ML Salary Project /Data/project-final.csv')\n",
"df.head()"
],
"execution_count": 133,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>SalaryUSD</th>\n",
" <th>Country</th>\n",
" <th>PrimaryDatabase</th>\n",
" <th>YearsWithThisDatabase</th>\n",
" <th>EmploymentStatus</th>\n",
" <th>JobTitle</th>\n",
" <th>ManageStaff</th>\n",
" <th>YearsWithThisTypeOfJob</th>\n",
" <th>HowManyCompanies</th>\n",
" <th>OtherPeopleOnYourTeam</th>\n",
" <th>EmploymentSector</th>\n",
" <th>CareerPlansThisYear</th>\n",
" <th>Gender</th>\n",
" <th>DatabaseServers</th>\n",
" <th>Education</th>\n",
" <th>Certifications</th>\n",
" <th>HoursWorkedPerWeek</th>\n",
" <th>TelecommuteDaysPerWeek</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Low</td>\n",
" <td>Sweden</td>\n",
" <td>M</td>\n",
" <td>4</td>\n",
" <td>E</td>\n",
" <td>D</td>\n",
" <td>Y</td>\n",
" <td>4</td>\n",
" <td>1.0</td>\n",
" <td>0</td>\n",
" <td>PB</td>\n",
" <td>S</td>\n",
" <td>M</td>\n",
" <td>373.411662</td>\n",
" <td>'Bachelors (4 years)'</td>\n",
" <td>N</td>\n",
" <td>43.16509</td>\n",
" <td>zero</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>High</td>\n",
" <td>USA</td>\n",
" <td>M</td>\n",
" <td>15</td>\n",
" <td>E</td>\n",
" <td>DBAP</td>\n",
" <td>N</td>\n",
" <td>25</td>\n",
" <td>5.0</td>\n",
" <td>0</td>\n",
" <td>PB</td>\n",
" <td>S</td>\n",
" <td>M</td>\n",
" <td>373.411662</td>\n",
" <td>'Bachelors (4 years)'</td>\n",
" <td>N</td>\n",
" <td>43.16509</td>\n",
" <td>zero</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>High-Mid</td>\n",
" <td>USA</td>\n",
" <td>M</td>\n",
" <td>12</td>\n",
" <td>E</td>\n",
" <td>DBAG</td>\n",
" <td>Y</td>\n",
" <td>6</td>\n",
" <td>4.0</td>\n",
" <td>1</td>\n",
" <td>PB</td>\n",
" <td>S</td>\n",
" <td>M</td>\n",
" <td>373.411662</td>\n",
" <td>'Bachelors (4 years)'</td>\n",
" <td>N</td>\n",
" <td>43.16509</td>\n",
" <td>zero</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>Low</td>\n",
" <td>UK</td>\n",
" <td>M</td>\n",
" <td>10</td>\n",
" <td>E</td>\n",
" <td>DBAP</td>\n",
" <td>N</td>\n",
" <td>5</td>\n",
" <td>2.0</td>\n",
" <td>0</td>\n",
" <td>E</td>\n",
" <td>S</td>\n",
" <td>M</td>\n",
" <td>373.411662</td>\n",
" <td>'Bachelors (4 years)'</td>\n",
" <td>N</td>\n",
" <td>43.16509</td>\n",
" <td>zero</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>High-Mid</td>\n",
" <td>USA</td>\n",
" <td>M</td>\n",
" <td>5</td>\n",
" <td>E</td>\n",
" <td>D</td>\n",
" <td>N</td>\n",
" <td>5</td>\n",
" <td>1.0</td>\n",
" <td>0</td>\n",
" <td>PB</td>\n",
" <td>S</td>\n",
" <td>M</td>\n",
" <td>373.411662</td>\n",
" <td>'Bachelors (4 years)'</td>\n",
" <td>N</td>\n",
" <td>43.16509</td>\n",
" <td>zero</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" SalaryUSD Country ... HoursWorkedPerWeek TelecommuteDaysPerWeek\n",
"0 Low Sweden ... 43.16509 zero\n",
"1 High USA ... 43.16509 zero\n",
"2 High-Mid USA ... 43.16509 zero\n",
"3 Low UK ... 43.16509 zero\n",
"4 High-Mid USA ... 43.16509 zero\n",
"\n",
"[5 rows x 18 columns]"
]
},
"metadata": {},
"execution_count": 133
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "GnlW4u1wxi7l"
},
"source": [
"categorical_attributes = [\"SalaryUSD\", \"Country\", \"PrimaryDatabase\", \"EmploymentStatus\", \"JobTitle\", \"ManageStaff\", \"EmploymentSector\", \"CareerPlansThisYear\", \"Gender\", \"Education\", \"Certifications\", \"TelecommuteDaysPerWeek\"]\n",
"num_attributes = [name for name in list(df.columns) if name not in categorical_attributes]"
],
"execution_count": 134,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "j-V2E0q-zagM"
},
"source": [
"from sklearn.pipeline import Pipeline\n",
"from sklearn.preprocessing import OrdinalEncoder\n",
"from sklearn.compose import ColumnTransformer\n",
"from sklearn.preprocessing import MinMaxScaler\n",
"\n",
"#normalizes numerical attributes\n",
"num_pipeline = Pipeline([('min_max_scaler', MinMaxScaler())]) \n",
"\n",
"#converts categories into numbers\n",
"cat_pipeline = Pipeline([\n",
" ('number_converter', OrdinalEncoder()),\n",
" ])\n",
"\n",
"#combining both pipelines\n",
"full_pipeline = ColumnTransformer([\n",
" (\"cat\", cat_pipeline, categorical_attributes),\n",
" (\"num\", num_pipeline, num_attributes)\n",
" ]) \n",
"\n",
"# fit_transform calculates the standard deviation of the whole training set\n",
"df_prep = full_pipeline.fit_transform(df) \n",
"df_prep = pd.DataFrame(df_prep, columns=categorical_attributes+num_attributes)\n",
"df_prep.head()\n",
"\n",
"# Split data to X and Y\n",
"X = df_prep.drop(['SalaryUSD'], axis=1, inplace=False)\n",
"Y = df_prep['SalaryUSD']"
],
"execution_count": 135,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "AZCzlB6KcNgd"
},
"source": [
"### Attribute Selection Algorithim\n",
"\n",
"---\n",
"\n"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 417
},
"id": "TqM0BzEp0Vu1",
"outputId": "8914f90a-0885-43a2-b40d-f2ef7807678d"
},
"source": [
"from sklearn.feature_selection import SelectKBest, SelectFromModel\n",
"from sklearn.feature_selection import chi2, RFE, VarianceThreshold\n",
"from sklearn.ensemble import RandomForestClassifier\n",
"from sklearn.decomposition import PCA\n",
"\n",
"selector = None\n",
"\n",
"# selector = VarianceThreshold(threshold=(.1))\n",
"# selector = SelectKBest(chi2, k=10)\n",
"# selector = RFE(estimator=RandomForestClassifier())\n",
"selector = PCA(n_components=4)\n",
"\n",
"selector.fit(X, Y)\n",
"\n",
"X_selected = X if selector == None else pd.DataFrame(selector.transform(X))\n",
"\n",
"if type(selector) != PCA:\n",
" features = selector.get_support(indices=True)\n",
" features = [column for column in X.columns[features]]\n",
" X_selected.columns = features\n",
"\n",
"\n",
"# X_selected = X\n",
"\n",
"X = X_selected\n",
"\n",
"X_selected"
],
"execution_count": 136,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>0</th>\n",
" <th>1</th>\n",
" <th>2</th>\n",
" <th>3</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>-4.138638</td>\n",
" <td>-4.141160</td>\n",
" <td>-1.060148</td>\n",
" <td>-0.344151</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>-12.162499</td>\n",
" <td>1.815431</td>\n",
" <td>-0.994463</td>\n",
" <td>-0.261570</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>-12.157918</td>\n",
" <td>0.826832</td>\n",
" <td>-1.003102</td>\n",
" <td>-0.267811</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>-11.176152</td>\n",
" <td>1.820139</td>\n",
" <td>-1.147436</td>\n",
" <td>0.215920</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>-12.138983</td>\n",
" <td>-4.182848</td>\n",
" <td>-1.095431</td>\n",
" <td>-0.348160</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>10334</th>\n",
" <td>-11.146052</td>\n",
" <td>-2.188204</td>\n",
" <td>-0.909104</td>\n",
" <td>-0.143865</td>\n",
" </tr>\n",
" <tr>\n",
" <th>10335</th>\n",
" <td>3.841024</td>\n",
" <td>-2.151027</td>\n",
" <td>1.931727</td>\n",
" <td>-0.344638</td>\n",
" </tr>\n",
" <tr>\n",
" <th>10336</th>\n",
" <td>17.828090</td>\n",
" <td>0.889711</td>\n",
" <td>2.020452</td>\n",
" <td>-0.317144</td>\n",
" </tr>\n",
" <tr>\n",
" <th>10337</th>\n",
" <td>-12.161796</td>\n",
" <td>-2.293752</td>\n",
" <td>4.563015</td>\n",
" <td>0.136997</td>\n",
" </tr>\n",
" <tr>\n",
" <th>10338</th>\n",
" <td>8.880702</td>\n",
" <td>-8.122048</td>\n",
" <td>-0.633003</td>\n",
" <td>-0.221737</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>10339 rows × 4 columns</p>\n",
"</div>"
],
"text/plain": [
" 0 1 2 3\n",
"0 -4.138638 -4.141160 -1.060148 -0.344151\n",
"1 -12.162499 1.815431 -0.994463 -0.261570\n",
"2 -12.157918 0.826832 -1.003102 -0.267811\n",
"3 -11.176152 1.820139 -1.147436 0.215920\n",
"4 -12.138983 -4.182848 -1.095431 -0.348160\n",
"... ... ... ... ...\n",
"10334 -11.146052 -2.188204 -0.909104 -0.143865\n",
"10335 3.841024 -2.151027 1.931727 -0.344638\n",
"10336 17.828090 0.889711 2.020452 -0.317144\n",
"10337 -12.161796 -2.293752 4.563015 0.136997\n",
"10338 8.880702 -8.122048 -0.633003 -0.221737\n",
"\n",
"[10339 rows x 4 columns]"
]
},
"metadata": {},
"execution_count": 136
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "grd5N391HlHf"
},
"source": [
"### Split into training and Testing "
]
},
{
"cell_type": "code",
"metadata": {
"id": "Ik3lh2A4zQme"
},
"source": [
"from sklearn.model_selection import train_test_split\n",
"\n",
"X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2, random_state=42)\n",
"X_train.to_csv('/content/drive/MyDrive/ML Salary Project /Data/x-train.csv')\n",
"X_test.to_csv('/content/drive/MyDrive/ML Salary Project /Data/x-test.csv')\n",
"Y_train.to_csv('/content/drive/MyDrive/ML Salary Project /Data/Y-train.csv')\n",
"Y_test.to_csv('/content/drive/MyDrive/ML Salary Project /Data/Y-test.csv')"
],
"execution_count": 137,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "2Wthf8O2JAGW"
},
"source": [
"### Trying Different Classifiers"
]
},
{
"cell_type": "code",
"metadata": {
"id": "v5um6ThxKPUO"
},
"source": [
"from sklearn.metrics import accuracy_score, confusion_matrix\n",
"def get_train_test_acc(model, X_train, y_train, X_test, y_test):\n",
" train_preds = model.predict(X_train)\n",
" print(f'Training Accuracy: {accuracy_score(y_train, train_preds)*100}%')\n",
" print('Confusion Matrix(training): \\n', confusion_matrix(y_train, train_preds))\n",
" test_preds = model.predict(X_test)\n",
" print(f'Testing Accuracy: {accuracy_score(y_test, test_preds)*100}%')\n",
" print('Confusion Matrix(testing): \\n', confusion_matrix(y_test, test_preds))"
],
"execution_count": 138,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
"id": "c-RSkJF5Msg0"
},
"source": [
"###Decision Trees"
]
},
{
"cell_type": "code",
"metadata": {
"id": "ygsKlYkIZICQ"
},
"source": [
"from sklearn.tree import DecisionTreeClassifier\n",
"clf = DecisionTreeClassifier(criterion=\"entropy\", random_state=4, max_depth = 8, min_samples_split=7)\n",
"clf = clf.fit(X_train, Y_train)"
],
"execution_count": 139,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "ITsJedWSZisU",
"outputId": "8986a4c4-b8ea-4e45-d77f-013bb93708ad"
},
"source": [
"y_pred = clf.predict(X_train)\n",
"get_train_test_acc(clf, X_train, Y_train, X_test, Y_test)"
],
"execution_count": 140,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Training Accuracy: 51.23926973763753%\n",
"Confusion Matrix(training): \n",
" [[ 917 1008 80 98]\n",
" [ 417 1344 125 145]\n",
" [ 43 306 1450 259]\n",
" [ 199 934 419 527]]\n",
"Testing Accuracy: 45.84139264990329%\n",
"Confusion Matrix(testing): \n",
" [[184 253 26 19]\n",
" [123 321 34 44]\n",
" [ 18 92 343 74]\n",
" [ 61 258 118 100]]\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "WfnkIwWUoTRw"
},
"source": [
"### Random **Forest**"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "yvFL2YFxoWKD",
"outputId": "e6e84b16-f7c7-4568-cb78-0c347c92ebe4"
},
"source": [
"from sklearn.ensemble import RandomForestClassifier\n",
"from sklearn.metrics import accuracy_score, confusion_matrix\n",
"rfc = RandomForestClassifier(max_depth = 12)\n",
"rfc.fit(X_train, Y_train)"
],
"execution_count": 141,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"RandomForestClassifier(bootstrap=True, ccp_alpha=0.0, class_weight=None,\n",
" criterion='gini', max_depth=12, max_features='auto',\n",
" max_leaf_nodes=None, max_samples=None,\n",
" min_impurity_decrease=0.0, min_impurity_split=None,\n",
" min_samples_leaf=1, min_samples_split=2,\n",
" min_weight_fraction_leaf=0.0, n_estimators=100,\n",
" n_jobs=None, oob_score=False, random_state=None,\n",
" verbose=0, warm_start=False)"
]
},
"metadata": {},
"execution_count": 141
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "5-8gDS1QouhZ",
"outputId": "0d6f8b91-a371-4f16-b450-2a6ec1362982"
},
"source": [
"get_train_test_acc(rfc, X_train, Y_train, X_test, Y_test)"
],
"execution_count": 142,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Training Accuracy: 71.38193688792165%\n",
"Confusion Matrix(training): \n",
" [[1460 476 93 74]\n",
" [ 265 1579 103 84]\n",
" [ 43 210 1696 109]\n",
" [ 172 492 246 1169]]\n",
"Testing Accuracy: 47.87234042553192%\n",
"Confusion Matrix(testing): \n",
" [[237 183 28 34]\n",
" [161 253 47 61]\n",
" [ 20 63 366 78]\n",
" [ 77 191 135 134]]\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "7leaD--ksHuQ"
},
"source": [
"###Naive Bayes "
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "teGGylJBsX3H",
"outputId": "28ed7dd4-1cf5-4007-d594-f291294d7a88"
},
"source": [
"from sklearn.naive_bayes import GaussianNB\n",
"nbc = GaussianNB()\n",
"nbc.fit(X_train, Y_train)"
],
"execution_count": 143,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"GaussianNB(priors=None, var_smoothing=1e-09)"
]
},
"metadata": {},
"execution_count": 143
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "l4WhrBZ4suHP",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "47826a95-0b38-46a2-abfb-0fcdf08c77bf"
},
"source": [
"get_train_test_acc(nbc, X_train, Y_train, X_test, Y_test)"
],
"execution_count": 144,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Training Accuracy: 36.065771974368275%\n",
"Confusion Matrix(training): \n",
" [[1045 936 111 11]\n",
" [ 678 1189 143 21]\n",
" [ 409 875 725 49]\n",
" [ 609 1022 424 24]]\n",
"Testing Accuracy: 35.686653771760156%\n",
"Confusion Matrix(testing): \n",
" [[247 212 20 3]\n",
" [192 290 38 2]\n",
" [105 212 195 15]\n",
" [141 274 116 6]]\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "jXB9OE_MudDQ"
},
"source": [
"###KNeighbors Classifer"
]
},
{
"cell_type": "code",
"metadata": {
"id": "vYG2FQlGuyaP",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "81394f38-ebac-4d46-89a0-fca92f8a6fd0"
},
"source": [
"from sklearn.neighbors import KNeighborsClassifier\n",
"kfc = KNeighborsClassifier(algorithm='auto')\n",
"kfc.fit(X_train, Y_train)"
],
"execution_count": 145,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"KNeighborsClassifier(algorithm='auto', leaf_size=30, metric='minkowski',\n",
" metric_params=None, n_jobs=None, n_neighbors=5, p=2,\n",
" weights='uniform')"
]
},
"metadata": {},
"execution_count": 145
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "MPMgHDGLvFHu",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "38a9d0cf-e939-4620-d964-7eab781fc086"
},
"source": [
"get_train_test_acc(kfc, X_train, Y_train, X_test, Y_test)"
],
"execution_count": 146,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Training Accuracy: 63.075807036634025%\n",
"Confusion Matrix(training): \n",
" [[1586 322 73 122]\n",
" [ 583 1132 101 215]\n",
" [ 119 166 1515 258]\n",
" [ 363 405 327 984]]\n",
"Testing Accuracy: 43.32688588007737%\n",
"Confusion Matrix(testing): \n",
" [[266 156 21 39]\n",
" [228 165 38 91]\n",
" [ 46 55 325 101]\n",
" [126 153 118 140]]\n"
]
}
]
}
]
}

Binary file not shown.

View File

@ -0,0 +1,127 @@
@relation automobile-cs699-weka.filters.unsupervised.attribute.ReplaceMissingValues-weka.filters.unsupervised.attribute.Discretize-B3-M-1.0-R26-precision6-unset-class-temporarily
@attribute symbolling numeric
@attribute normalized-losses numeric
@attribute make {alfa-romero,audi,bmw,chevrolet,dodge,honda,isuzu,jaguar,mazda,mercedes-benz,mercury,mitsubishi,nissan}
@attribute fuel-type {gas,diesel}
@attribute aspiration {std,turbo}
@attribute num-of-doors {two,four}
@attribute body-style {sedan,wagon,hatchback,hardtop,convertible}
@attribute drive-wheels {rwd,fwd,4wd}
@attribute engine-location {front}
@attribute wheel-base numeric
@attribute length numeric
@attribute width numeric
@attribute height numeric
@attribute curb-weight numeric
@attribute engine-type {dohc,ohcv,ohc,l,rotor}
@attribute num-of-cylinders {four,six,five,three,twelve,two,eight}
@attribute engine-size numeric
@attribute fuel-system {mpfi,2bbl,mfi,1bbl,spfi,4bbl,idi,spdi}
@attribute bore numeric
@attribute stroke numeric
@attribute compression-ratio numeric
@attribute horsepower numeric
@attribute peak-rpm numeric
@attribute city-mpg numeric
@attribute highway-mpg numeric
@attribute price {'\'(-inf-18567.333333]\'','\'(18567.333333-31983.666667]\'','\'(31983.666667-inf)\''}
@data
3,127.84,alfa-romero,gas,std,two,sedan,rwd,front,98.304255,168.8,64.1,48.8,2548,dohc,four,130,mpfi,3.47,2.68,9,111,5000,21,27,'\'(-inf-18567.333333]\''
3,127.84,alfa-romero,gas,std,two,sedan,rwd,front,98.304255,168.8,64.1,48.8,2548,dohc,four,130,mpfi,3.47,2.68,9,111,5000,21,27,'\'(-inf-18567.333333]\''
1,127.84,alfa-romero,gas,std,two,sedan,rwd,front,98.304255,171.2,65.5,52.4,2823,ohcv,six,152,mpfi,2.68,3.47,9,154,5000,19,26,'\'(-inf-18567.333333]\''
2,164,audi,gas,std,four,sedan,fwd,front,99.8,176.6,66.2,54.3,2337,ohc,four,109,mpfi,3.19,3.4,10,102,5500,24,30,'\'(-inf-18567.333333]\''
2,164,audi,gas,std,four,sedan,4wd,front,99.4,176.6,66.4,54.3,2824,ohc,five,136,mpfi,3.19,3.4,8,115,5500,18,22,'\'(-inf-18567.333333]\''
2,127.84,audi,gas,std,two,sedan,fwd,front,99.8,177.3,66.3,53.1,2507,ohc,five,136,mpfi,3.19,3.4,8.5,110,5500,19,25,'\'(-inf-18567.333333]\''
1,158,audi,gas,std,four,sedan,fwd,front,105.8,192.7,71.4,55.7,2844,ohc,five,136,mpfi,3.19,3.4,8.5,110,5500,19,25,'\'(-inf-18567.333333]\''
1,127.84,audi,gas,std,four,wagon,fwd,front,105.8,192.7,71.4,55.7,2954,ohc,five,136,mpfi,3.19,3.4,8.5,110,5500,19,25,'\'(18567.333333-31983.666667]\''
1,158,audi,gas,turbo,four,sedan,fwd,front,105.8,192.7,71.4,55.9,3086,ohc,five,131,mpfi,3.13,3.4,8.3,140,5500,17,20,'\'(18567.333333-31983.666667]\''
2,192,bmw,gas,std,two,sedan,rwd,front,101.2,176.8,64.8,54.3,2395,ohc,four,108,mpfi,3.5,2.8,8.8,101,5800,23,29,'\'(-inf-18567.333333]\''
0,192,bmw,gas,std,four,sedan,rwd,front,101.2,176.8,64.8,54.3,2395,ohc,four,108,mpfi,3.5,2.8,8.8,101,5800,23,29,'\'(-inf-18567.333333]\''
0,188,bmw,gas,std,two,sedan,rwd,front,101.2,176.8,64.8,54.3,2710,ohc,six,164,mpfi,3.31,3.19,9,121,4250,21,28,'\'(18567.333333-31983.666667]\''
0,188,bmw,gas,std,four,sedan,rwd,front,101.2,176.8,64.8,54.3,2765,ohc,six,164,mpfi,3.31,3.19,9,121,4250,21,28,'\'(18567.333333-31983.666667]\''
1,127.84,bmw,gas,std,four,sedan,rwd,front,103.5,189,66.9,55.7,3055,ohc,six,164,mpfi,3.31,3.19,9,121,4250,20,25,'\'(18567.333333-31983.666667]\''
0,127.84,bmw,gas,std,four,sedan,rwd,front,103.5,189,66.9,55.7,3230,ohc,six,209,mpfi,3.62,3.39,8,182,5400,16,22,'\'(18567.333333-31983.666667]\''
0,127.84,bmw,gas,std,two,sedan,rwd,front,103.5,193.8,67.9,53.7,3380,ohc,six,209,mpfi,3.62,3.39,8,182,5400,16,22,'\'(31983.666667-inf)\''
0,127.84,bmw,gas,std,four,sedan,rwd,front,110,197,70.9,56.3,3505,ohc,six,209,mpfi,3.62,3.39,8,182,5400,15,20,'\'(31983.666667-inf)\''
2,121,chevrolet,gas,std,two,hatchback,fwd,front,88.4,141.1,60.3,53.2,1488,l,three,61,2bbl,2.91,3.03,9.5,48,5100,47,53,'\'(-inf-18567.333333]\''
1,98,chevrolet,gas,std,two,hatchback,fwd,front,94.5,155.9,63.6,52,1874,ohc,four,90,2bbl,3.03,3.11,9.6,70,5400,38,43,'\'(-inf-18567.333333]\''
0,81,chevrolet,gas,std,four,sedan,fwd,front,94.5,158.8,63.6,52,1909,ohc,four,90,2bbl,3.03,3.11,9.6,70,5400,38,43,'\'(-inf-18567.333333]\''
1,118,dodge,gas,std,two,hatchback,fwd,front,93.7,157.3,63.8,50.8,1876,ohc,four,90,2bbl,2.97,3.23,9.41,68,5500,37,41,'\'(-inf-18567.333333]\''
1,118,dodge,gas,std,two,hatchback,fwd,front,93.7,157.3,63.8,50.8,1876,ohc,four,90,2bbl,2.97,3.23,9.4,68,5500,31,38,'\'(-inf-18567.333333]\''
1,118,dodge,gas,turbo,two,hatchback,fwd,front,93.7,157.3,63.8,50.8,2128,ohc,four,98,mpfi,3.03,3.39,7.6,102,5500,24,30,'\'(-inf-18567.333333]\''
1,148,dodge,gas,std,four,hatchback,fwd,front,93.7,157.3,63.8,50.6,1967,ohc,four,90,2bbl,2.97,3.23,9.4,68,5500,31,38,'\'(-inf-18567.333333]\''
1,148,dodge,gas,std,four,sedan,fwd,front,93.7,157.3,63.8,50.6,1989,ohc,four,90,2bbl,2.97,3.23,9.4,68,5500,31,38,'\'(-inf-18567.333333]\''
1,148,dodge,gas,std,four,sedan,fwd,front,93.7,157.3,63.8,50.6,1989,ohc,four,90,2bbl,2.97,3.23,9.4,68,5500,31,38,'\'(-inf-18567.333333]\''
1,148,dodge,gas,turbo,two,sedan,fwd,front,93.7,157.3,63.8,50.6,2191,ohc,four,98,mpfi,3.03,3.39,7.6,102,5500,24,30,'\'(-inf-18567.333333]\''
-1,110,dodge,gas,std,four,wagon,fwd,front,103.3,174.6,64.6,59.8,2535,ohc,four,122,2bbl,3.34,3.46,8.5,88,5000,24,30,'\'(-inf-18567.333333]\''
3,145,dodge,gas,turbo,two,hatchback,fwd,front,95.9,173.2,66.3,50.2,2811,ohc,four,156,mfi,3.6,3.9,7,145,5000,19,24,'\'(-inf-18567.333333]\''
2,137,honda,gas,std,two,hatchback,fwd,front,86.6,144.6,63.9,50.8,1713,ohc,four,92,1bbl,2.91,3.41,9.6,58,4800,49,54,'\'(-inf-18567.333333]\''
2,137,honda,gas,std,two,hatchback,fwd,front,86.6,144.6,63.9,50.8,1819,ohc,four,92,1bbl,2.91,3.41,9.2,76,6000,31,38,'\'(-inf-18567.333333]\''
1,101,honda,gas,std,two,hatchback,fwd,front,93.7,150,64,52.6,1837,ohc,four,79,1bbl,2.91,3.07,10.1,60,5500,38,42,'\'(-inf-18567.333333]\''
1,101,honda,gas,std,two,hatchback,fwd,front,93.7,150,64,52.6,1940,ohc,four,92,1bbl,2.91,3.41,9.2,76,6000,30,34,'\'(-inf-18567.333333]\''
1,101,honda,gas,std,two,hatchback,fwd,front,93.7,150,64,52.6,1956,ohc,four,92,1bbl,2.91,3.41,9.2,76,6000,30,34,'\'(-inf-18567.333333]\''
0,110,honda,gas,std,four,sedan,fwd,front,96.5,163.4,64,54.5,2010,ohc,four,92,1bbl,2.91,3.41,9.2,76,6000,30,34,'\'(-inf-18567.333333]\''
0,78,honda,gas,std,four,wagon,fwd,front,96.5,157.1,63.9,58.3,2024,ohc,four,92,1bbl,2.92,3.41,9.2,76,6000,30,34,'\'(-inf-18567.333333]\''
0,106,honda,gas,std,two,hatchback,fwd,front,96.5,167.5,65.2,53.3,2236,ohc,four,110,1bbl,3.15,3.58,9,86,5800,27,33,'\'(-inf-18567.333333]\''
0,106,honda,gas,std,two,hatchback,fwd,front,96.5,167.5,65.2,53.3,2289,ohc,four,110,1bbl,3.15,3.58,9,86,5800,27,33,'\'(-inf-18567.333333]\''
0,85,honda,gas,std,four,sedan,fwd,front,96.5,175.4,65.2,54.1,2304,ohc,four,110,1bbl,3.15,3.58,9,86,5800,27,33,'\'(-inf-18567.333333]\''
0,85,honda,gas,std,four,sedan,fwd,front,96.5,175.4,62.5,54.1,2372,ohc,four,110,1bbl,3.15,3.58,9,86,5800,27,33,'\'(-inf-18567.333333]\''
0,85,honda,gas,std,four,sedan,fwd,front,96.5,175.4,65.2,54.1,2465,ohc,four,110,mpfi,3.15,3.58,9,101,5800,24,28,'\'(-inf-18567.333333]\''
1,107,honda,gas,std,two,sedan,fwd,front,96.5,169.1,66,51,2293,ohc,four,110,2bbl,3.15,3.58,9.1,100,5500,25,31,'\'(-inf-18567.333333]\''
0,127.84,isuzu,gas,std,four,sedan,rwd,front,94.3,170.7,61.8,53.5,2337,ohc,four,111,2bbl,3.31,3.23,8.5,78,4800,24,29,'\'(-inf-18567.333333]\''
2,127.84,isuzu,gas,std,two,hatchback,rwd,front,96,172.6,65.2,51.4,2734,ohc,four,119,spfi,3.43,3.23,9.2,90,5000,24,29,'\'(-inf-18567.333333]\''
0,145,jaguar,gas,std,four,sedan,rwd,front,113,199.6,69.6,52.8,4066,dohc,six,258,mpfi,3.63,4.17,8.1,176,4750,15,19,'\'(31983.666667-inf)\''
0,127.84,jaguar,gas,std,four,sedan,rwd,front,113,199.6,69.6,52.8,4066,dohc,six,258,mpfi,3.63,4.17,8.1,176,4750,15,19,'\'(31983.666667-inf)\''
0,127.84,jaguar,gas,std,two,sedan,rwd,front,102,191.7,70.6,47.8,3950,ohcv,twelve,326,mpfi,3.54,2.76,11.5,262,5000,13,17,'\'(31983.666667-inf)\''
1,104,mazda,gas,std,two,hatchback,fwd,front,93.1,159.1,64.2,54.1,1890,ohc,four,91,2bbl,3.03,3.15,9,68,5000,30,31,'\'(-inf-18567.333333]\''
1,104,mazda,gas,std,two,hatchback,fwd,front,93.1,159.1,64.2,54.1,1900,ohc,four,91,2bbl,3.03,3.15,9,68,5000,31,38,'\'(-inf-18567.333333]\''
1,104,mazda,gas,std,two,hatchback,fwd,front,93.1,159.1,64.2,54.1,1905,ohc,four,91,2bbl,3.03,3.15,9,68,5000,31,38,'\'(-inf-18567.333333]\''
1,113,mazda,gas,std,four,sedan,fwd,front,93.1,166.8,64.2,54.1,1945,ohc,four,91,2bbl,3.03,3.15,9,68,5000,31,38,'\'(-inf-18567.333333]\''
1,113,mazda,gas,std,four,sedan,fwd,front,93.1,166.8,64.2,54.1,1950,ohc,four,91,2bbl,3.08,3.15,9,68,5000,31,38,'\'(-inf-18567.333333]\''
3,150,mazda,gas,std,two,hatchback,rwd,front,95.3,169,65.7,49.6,2380,rotor,two,70,4bbl,3.25172,3.356129,9.4,101,6000,17,23,'\'(-inf-18567.333333]\''
3,150,mazda,gas,std,two,hatchback,rwd,front,95.3,169,65.7,49.6,2380,rotor,two,70,4bbl,3.25172,3.356129,9.4,101,6000,17,23,'\'(-inf-18567.333333]\''
3,150,mazda,gas,std,two,hatchback,rwd,front,95.3,169,65.7,49.6,2385,rotor,two,70,4bbl,3.25172,3.356129,9.4,101,6000,17,23,'\'(-inf-18567.333333]\''
3,150,mazda,gas,std,two,hatchback,rwd,front,95.3,169,65.7,49.6,2500,rotor,two,80,mpfi,3.25172,3.356129,9.4,135,6000,16,23,'\'(-inf-18567.333333]\''
1,129,mazda,gas,std,two,hatchback,fwd,front,98.8,177.8,66.5,53.7,2385,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,'\'(-inf-18567.333333]\''
0,115,mazda,gas,std,four,sedan,fwd,front,98.8,177.8,66.5,55.5,2410,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,'\'(-inf-18567.333333]\''
1,129,mazda,gas,std,two,hatchback,fwd,front,98.8,177.8,66.5,53.7,2385,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,'\'(-inf-18567.333333]\''
0,115,mazda,gas,std,four,sedan,fwd,front,98.8,177.8,66.5,55.5,2410,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,'\'(-inf-18567.333333]\''
0,127.84,mazda,diesel,std,two,sedan,fwd,front,98.8,177.8,66.5,55.5,2443,ohc,four,122,idi,3.39,3.39,22.7,64,4650,36,42,'\'(-inf-18567.333333]\''
0,115,mazda,gas,std,four,hatchback,fwd,front,98.8,177.8,66.5,55.5,2425,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,'\'(-inf-18567.333333]\''
0,118,mazda,gas,std,four,sedan,rwd,front,104.9,175,66.1,54.4,2670,ohc,four,140,mpfi,3.76,3.16,8,120,5000,19,27,'\'(-inf-18567.333333]\''
0,127.84,mazda,diesel,std,four,sedan,rwd,front,104.9,175,66.1,54.4,2700,ohc,four,134,idi,3.43,3.64,22,72,4200,31,39,'\'(-inf-18567.333333]\''
-1,93,mercedes-benz,diesel,turbo,four,sedan,rwd,front,110,190.9,70.3,56.5,3515,ohc,five,183,idi,3.58,3.64,21.5,123,4350,22,25,'\'(18567.333333-31983.666667]\''
-1,93,mercedes-benz,diesel,turbo,four,wagon,rwd,front,110,190.9,70.3,58.7,3750,ohc,five,183,idi,3.58,3.64,21.5,123,4350,22,25,'\'(18567.333333-31983.666667]\''
0,93,mercedes-benz,diesel,turbo,two,hardtop,rwd,front,106.7,187.5,70.3,54.9,3495,ohc,five,183,idi,3.58,3.64,21.5,123,4350,22,25,'\'(18567.333333-31983.666667]\''
-1,93,mercedes-benz,diesel,turbo,four,sedan,rwd,front,115.6,202.6,71.7,56.3,3770,ohc,five,183,idi,3.58,3.64,21.5,123,4350,22,25,'\'(18567.333333-31983.666667]\''
-1,127.84,mercedes-benz,gas,std,four,sedan,rwd,front,115.6,202.6,71.7,56.5,3740,ohcv,eight,234,mpfi,3.46,3.1,8.3,155,4750,16,18,'\'(31983.666667-inf)\''
3,142,mercedes-benz,gas,std,two,convertible,rwd,front,96.6,180.3,70.5,50.8,3685,ohcv,eight,234,mpfi,3.46,3.1,8.3,155,4750,16,18,'\'(31983.666667-inf)\''
0,127.84,mercedes-benz,gas,std,four,sedan,rwd,front,120.9,208.1,71.7,56.7,3900,ohcv,eight,308,mpfi,3.8,3.35,8,184,4500,14,16,'\'(31983.666667-inf)\''
1,127.84,mercedes-benz,gas,std,two,hardtop,rwd,front,112,199.2,72,55.4,3715,ohcv,eight,304,mpfi,3.8,3.35,8,184,4500,14,16,'\'(31983.666667-inf)\''
1,127.84,mercury,gas,turbo,two,hatchback,rwd,front,102.7,178.4,68,54.8,2910,ohc,four,140,mpfi,3.78,3.12,8,175,5000,19,24,'\'(-inf-18567.333333]\''
2,161,mitsubishi,gas,std,two,hatchback,fwd,front,93.7,157.3,64.4,50.8,1918,ohc,four,92,2bbl,2.97,3.23,9.4,68,5500,37,41,'\'(-inf-18567.333333]\''
2,161,mitsubishi,gas,std,two,hatchback,fwd,front,93.7,157.3,64.4,50.8,1944,ohc,four,92,2bbl,2.97,3.23,9.4,68,5500,31,38,'\'(-inf-18567.333333]\''
2,161,mitsubishi,gas,std,two,hatchback,fwd,front,93.7,157.3,64.4,50.8,2004,ohc,four,92,2bbl,2.97,3.23,9.4,68,5500,31,38,'\'(-inf-18567.333333]\''
1,161,mitsubishi,gas,turbo,two,hatchback,fwd,front,93,157.3,63.8,50.8,2145,ohc,four,98,spdi,3.03,3.39,7.6,102,5500,24,30,'\'(-inf-18567.333333]\''
3,153,mitsubishi,gas,turbo,two,hatchback,fwd,front,96.3,173,65.4,49.4,2370,ohc,four,110,spdi,3.17,3.46,7.5,116,5500,23,30,'\'(-inf-18567.333333]\''
3,153,mitsubishi,gas,std,two,hatchback,fwd,front,96.3,173,65.4,49.4,2328,ohc,four,122,2bbl,3.35,3.46,8.5,88,5000,25,32,'\'(-inf-18567.333333]\''
3,127.84,mitsubishi,gas,turbo,two,hatchback,fwd,front,95.9,173.2,66.3,50.2,2833,ohc,four,156,spdi,3.58,3.86,7,145,5000,19,24,'\'(-inf-18567.333333]\''
3,127.84,mitsubishi,gas,turbo,two,hatchback,fwd,front,95.9,173.2,66.3,50.2,2921,ohc,four,156,spdi,3.59,3.86,7,145,5000,19,24,'\'(-inf-18567.333333]\''
3,127.84,mitsubishi,gas,turbo,two,hatchback,fwd,front,95.9,173.2,66.3,50.2,2926,ohc,four,156,spdi,3.59,3.86,7,145,5000,19,24,'\'(-inf-18567.333333]\''
1,125,mitsubishi,gas,std,four,sedan,fwd,front,96.3,172.4,65.4,51.6,2365,ohc,four,122,2bbl,3.35,3.46,8.5,88,5000,25,32,'\'(-inf-18567.333333]\''
1,125,mitsubishi,gas,std,four,sedan,fwd,front,96.3,172.4,65.4,51.6,2405,ohc,four,122,2bbl,3.35,3.46,8.5,88,5000,25,32,'\'(-inf-18567.333333]\''
1,125,mitsubishi,gas,turbo,four,sedan,fwd,front,96.3,172.4,65.4,51.6,2403,ohc,four,110,spdi,3.17,3.46,7.5,116,5500,23,30,'\'(-inf-18567.333333]\''
-1,137,mitsubishi,gas,std,four,sedan,fwd,front,96.3,172.4,65.4,51.6,2403,ohc,four,110,spdi,3.17,3.46,7.5,116,5500,23,30,'\'(-inf-18567.333333]\''
1,128,nissan,gas,std,two,sedan,fwd,front,94.5,165.3,63.8,54.5,1889,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
1,128,nissan,diesel,std,two,sedan,fwd,front,94.5,165.3,63.8,54.5,2017,ohc,four,103,idi,2.99,3.47,21.9,55,4800,45,50,'\'(-inf-18567.333333]\''
1,128,nissan,gas,std,two,sedan,fwd,front,94.5,165.3,63.8,54.5,1918,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
1,122,nissan,gas,std,four,sedan,fwd,front,94.5,165.3,63.8,54.5,1938,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
1,103,nissan,gas,std,four,wagon,fwd,front,94.5,170.2,63.8,53.5,2024,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
1,128,nissan,gas,std,two,sedan,fwd,front,94.5,165.3,63.8,54.5,1951,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
1,128,nissan,gas,std,two,hatchback,fwd,front,94.5,165.6,63.8,53.3,2028,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
1,122,nissan,gas,std,four,sedan,fwd,front,94.5,165.3,63.8,54.5,1971,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
1,103,nissan,gas,std,four,wagon,fwd,front,94.5,170.2,63.8,53.5,2037,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
2,168,nissan,gas,std,two,hardtop,fwd,front,95.1,162.4,63.8,53.3,2008,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,'\'(-inf-18567.333333]\''
0,106,nissan,gas,std,four,hatchback,fwd,front,97.2,173.4,65.2,54.7,2324,ohc,four,120,2bbl,3.33,3.47,8.5,97,5200,27,34,'\'(-inf-18567.333333]\''

View File

@ -0,0 +1,127 @@
@relation automobile-cs699-weka.filters.unsupervised.attribute.ReplaceMissingValues
@attribute symbolling numeric
@attribute normalized-losses numeric
@attribute make {alfa-romero,audi,bmw,chevrolet,dodge,honda,isuzu,jaguar,mazda,mercedes-benz,mercury,mitsubishi,nissan}
@attribute fuel-type {gas,diesel}
@attribute aspiration {std,turbo}
@attribute num-of-doors {two,four}
@attribute body-style {sedan,wagon,hatchback,hardtop,convertible}
@attribute drive-wheels {rwd,fwd,4wd}
@attribute engine-location {front}
@attribute wheel-base numeric
@attribute length numeric
@attribute width numeric
@attribute height numeric
@attribute curb-weight numeric
@attribute engine-type {dohc,ohcv,ohc,l,rotor}
@attribute num-of-cylinders {four,six,five,three,twelve,two,eight}
@attribute engine-size numeric
@attribute fuel-system {mpfi,2bbl,mfi,1bbl,spfi,4bbl,idi,spdi}
@attribute bore numeric
@attribute stroke numeric
@attribute compression-ratio numeric
@attribute horsepower numeric
@attribute peak-rpm numeric
@attribute city-mpg numeric
@attribute highway-mpg numeric
@attribute price numeric
@data
3,127.84,alfa-romero,gas,std,two,sedan,rwd,front,98.304255,168.8,64.1,48.8,2548,dohc,four,130,mpfi,3.47,2.68,9,111,5000,21,27,13495
3,127.84,alfa-romero,gas,std,two,sedan,rwd,front,98.304255,168.8,64.1,48.8,2548,dohc,four,130,mpfi,3.47,2.68,9,111,5000,21,27,16500
1,127.84,alfa-romero,gas,std,two,sedan,rwd,front,98.304255,171.2,65.5,52.4,2823,ohcv,six,152,mpfi,2.68,3.47,9,154,5000,19,26,16500
2,164,audi,gas,std,four,sedan,fwd,front,99.8,176.6,66.2,54.3,2337,ohc,four,109,mpfi,3.19,3.4,10,102,5500,24,30,13950
2,164,audi,gas,std,four,sedan,4wd,front,99.4,176.6,66.4,54.3,2824,ohc,five,136,mpfi,3.19,3.4,8,115,5500,18,22,17450
2,127.84,audi,gas,std,two,sedan,fwd,front,99.8,177.3,66.3,53.1,2507,ohc,five,136,mpfi,3.19,3.4,8.5,110,5500,19,25,15250
1,158,audi,gas,std,four,sedan,fwd,front,105.8,192.7,71.4,55.7,2844,ohc,five,136,mpfi,3.19,3.4,8.5,110,5500,19,25,17710
1,127.84,audi,gas,std,four,wagon,fwd,front,105.8,192.7,71.4,55.7,2954,ohc,five,136,mpfi,3.19,3.4,8.5,110,5500,19,25,18920
1,158,audi,gas,turbo,four,sedan,fwd,front,105.8,192.7,71.4,55.9,3086,ohc,five,131,mpfi,3.13,3.4,8.3,140,5500,17,20,23875
2,192,bmw,gas,std,two,sedan,rwd,front,101.2,176.8,64.8,54.3,2395,ohc,four,108,mpfi,3.5,2.8,8.8,101,5800,23,29,16430
0,192,bmw,gas,std,four,sedan,rwd,front,101.2,176.8,64.8,54.3,2395,ohc,four,108,mpfi,3.5,2.8,8.8,101,5800,23,29,16925
0,188,bmw,gas,std,two,sedan,rwd,front,101.2,176.8,64.8,54.3,2710,ohc,six,164,mpfi,3.31,3.19,9,121,4250,21,28,20970
0,188,bmw,gas,std,four,sedan,rwd,front,101.2,176.8,64.8,54.3,2765,ohc,six,164,mpfi,3.31,3.19,9,121,4250,21,28,21105
1,127.84,bmw,gas,std,four,sedan,rwd,front,103.5,189,66.9,55.7,3055,ohc,six,164,mpfi,3.31,3.19,9,121,4250,20,25,24565
0,127.84,bmw,gas,std,four,sedan,rwd,front,103.5,189,66.9,55.7,3230,ohc,six,209,mpfi,3.62,3.39,8,182,5400,16,22,30760
0,127.84,bmw,gas,std,two,sedan,rwd,front,103.5,193.8,67.9,53.7,3380,ohc,six,209,mpfi,3.62,3.39,8,182,5400,16,22,41315
0,127.84,bmw,gas,std,four,sedan,rwd,front,110,197,70.9,56.3,3505,ohc,six,209,mpfi,3.62,3.39,8,182,5400,15,20,36880
2,121,chevrolet,gas,std,two,hatchback,fwd,front,88.4,141.1,60.3,53.2,1488,l,three,61,2bbl,2.91,3.03,9.5,48,5100,47,53,5151
1,98,chevrolet,gas,std,two,hatchback,fwd,front,94.5,155.9,63.6,52,1874,ohc,four,90,2bbl,3.03,3.11,9.6,70,5400,38,43,6295
0,81,chevrolet,gas,std,four,sedan,fwd,front,94.5,158.8,63.6,52,1909,ohc,four,90,2bbl,3.03,3.11,9.6,70,5400,38,43,6575
1,118,dodge,gas,std,two,hatchback,fwd,front,93.7,157.3,63.8,50.8,1876,ohc,four,90,2bbl,2.97,3.23,9.41,68,5500,37,41,5572
1,118,dodge,gas,std,two,hatchback,fwd,front,93.7,157.3,63.8,50.8,1876,ohc,four,90,2bbl,2.97,3.23,9.4,68,5500,31,38,6377
1,118,dodge,gas,turbo,two,hatchback,fwd,front,93.7,157.3,63.8,50.8,2128,ohc,four,98,mpfi,3.03,3.39,7.6,102,5500,24,30,7957
1,148,dodge,gas,std,four,hatchback,fwd,front,93.7,157.3,63.8,50.6,1967,ohc,four,90,2bbl,2.97,3.23,9.4,68,5500,31,38,6229
1,148,dodge,gas,std,four,sedan,fwd,front,93.7,157.3,63.8,50.6,1989,ohc,four,90,2bbl,2.97,3.23,9.4,68,5500,31,38,6692
1,148,dodge,gas,std,four,sedan,fwd,front,93.7,157.3,63.8,50.6,1989,ohc,four,90,2bbl,2.97,3.23,9.4,68,5500,31,38,7609
1,148,dodge,gas,turbo,two,sedan,fwd,front,93.7,157.3,63.8,50.6,2191,ohc,four,98,mpfi,3.03,3.39,7.6,102,5500,24,30,8558
-1,110,dodge,gas,std,four,wagon,fwd,front,103.3,174.6,64.6,59.8,2535,ohc,four,122,2bbl,3.34,3.46,8.5,88,5000,24,30,8921
3,145,dodge,gas,turbo,two,hatchback,fwd,front,95.9,173.2,66.3,50.2,2811,ohc,four,156,mfi,3.6,3.9,7,145,5000,19,24,12964
2,137,honda,gas,std,two,hatchback,fwd,front,86.6,144.6,63.9,50.8,1713,ohc,four,92,1bbl,2.91,3.41,9.6,58,4800,49,54,6479
2,137,honda,gas,std,two,hatchback,fwd,front,86.6,144.6,63.9,50.8,1819,ohc,four,92,1bbl,2.91,3.41,9.2,76,6000,31,38,6855
1,101,honda,gas,std,two,hatchback,fwd,front,93.7,150,64,52.6,1837,ohc,four,79,1bbl,2.91,3.07,10.1,60,5500,38,42,5399
1,101,honda,gas,std,two,hatchback,fwd,front,93.7,150,64,52.6,1940,ohc,four,92,1bbl,2.91,3.41,9.2,76,6000,30,34,6529
1,101,honda,gas,std,two,hatchback,fwd,front,93.7,150,64,52.6,1956,ohc,four,92,1bbl,2.91,3.41,9.2,76,6000,30,34,7129
0,110,honda,gas,std,four,sedan,fwd,front,96.5,163.4,64,54.5,2010,ohc,four,92,1bbl,2.91,3.41,9.2,76,6000,30,34,7295
0,78,honda,gas,std,four,wagon,fwd,front,96.5,157.1,63.9,58.3,2024,ohc,four,92,1bbl,2.92,3.41,9.2,76,6000,30,34,7295
0,106,honda,gas,std,two,hatchback,fwd,front,96.5,167.5,65.2,53.3,2236,ohc,four,110,1bbl,3.15,3.58,9,86,5800,27,33,7895
0,106,honda,gas,std,two,hatchback,fwd,front,96.5,167.5,65.2,53.3,2289,ohc,four,110,1bbl,3.15,3.58,9,86,5800,27,33,9095
0,85,honda,gas,std,four,sedan,fwd,front,96.5,175.4,65.2,54.1,2304,ohc,four,110,1bbl,3.15,3.58,9,86,5800,27,33,8845
0,85,honda,gas,std,four,sedan,fwd,front,96.5,175.4,62.5,54.1,2372,ohc,four,110,1bbl,3.15,3.58,9,86,5800,27,33,10295
0,85,honda,gas,std,four,sedan,fwd,front,96.5,175.4,65.2,54.1,2465,ohc,four,110,mpfi,3.15,3.58,9,101,5800,24,28,12945
1,107,honda,gas,std,two,sedan,fwd,front,96.5,169.1,66,51,2293,ohc,four,110,2bbl,3.15,3.58,9.1,100,5500,25,31,10345
0,127.84,isuzu,gas,std,four,sedan,rwd,front,94.3,170.7,61.8,53.5,2337,ohc,four,111,2bbl,3.31,3.23,8.5,78,4800,24,29,6785
2,127.84,isuzu,gas,std,two,hatchback,rwd,front,96,172.6,65.2,51.4,2734,ohc,four,119,spfi,3.43,3.23,9.2,90,5000,24,29,11048
0,145,jaguar,gas,std,four,sedan,rwd,front,113,199.6,69.6,52.8,4066,dohc,six,258,mpfi,3.63,4.17,8.1,176,4750,15,19,32250
0,127.84,jaguar,gas,std,four,sedan,rwd,front,113,199.6,69.6,52.8,4066,dohc,six,258,mpfi,3.63,4.17,8.1,176,4750,15,19,35550
0,127.84,jaguar,gas,std,two,sedan,rwd,front,102,191.7,70.6,47.8,3950,ohcv,twelve,326,mpfi,3.54,2.76,11.5,262,5000,13,17,36000
1,104,mazda,gas,std,two,hatchback,fwd,front,93.1,159.1,64.2,54.1,1890,ohc,four,91,2bbl,3.03,3.15,9,68,5000,30,31,5195
1,104,mazda,gas,std,two,hatchback,fwd,front,93.1,159.1,64.2,54.1,1900,ohc,four,91,2bbl,3.03,3.15,9,68,5000,31,38,6095
1,104,mazda,gas,std,two,hatchback,fwd,front,93.1,159.1,64.2,54.1,1905,ohc,four,91,2bbl,3.03,3.15,9,68,5000,31,38,6795
1,113,mazda,gas,std,four,sedan,fwd,front,93.1,166.8,64.2,54.1,1945,ohc,four,91,2bbl,3.03,3.15,9,68,5000,31,38,6695
1,113,mazda,gas,std,four,sedan,fwd,front,93.1,166.8,64.2,54.1,1950,ohc,four,91,2bbl,3.08,3.15,9,68,5000,31,38,7395
3,150,mazda,gas,std,two,hatchback,rwd,front,95.3,169,65.7,49.6,2380,rotor,two,70,4bbl,3.25172,3.356129,9.4,101,6000,17,23,10945
3,150,mazda,gas,std,two,hatchback,rwd,front,95.3,169,65.7,49.6,2380,rotor,two,70,4bbl,3.25172,3.356129,9.4,101,6000,17,23,11845
3,150,mazda,gas,std,two,hatchback,rwd,front,95.3,169,65.7,49.6,2385,rotor,two,70,4bbl,3.25172,3.356129,9.4,101,6000,17,23,13645
3,150,mazda,gas,std,two,hatchback,rwd,front,95.3,169,65.7,49.6,2500,rotor,two,80,mpfi,3.25172,3.356129,9.4,135,6000,16,23,15645
1,129,mazda,gas,std,two,hatchback,fwd,front,98.8,177.8,66.5,53.7,2385,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,8845
0,115,mazda,gas,std,four,sedan,fwd,front,98.8,177.8,66.5,55.5,2410,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,8495
1,129,mazda,gas,std,two,hatchback,fwd,front,98.8,177.8,66.5,53.7,2385,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,10595
0,115,mazda,gas,std,four,sedan,fwd,front,98.8,177.8,66.5,55.5,2410,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,10245
0,127.84,mazda,diesel,std,two,sedan,fwd,front,98.8,177.8,66.5,55.5,2443,ohc,four,122,idi,3.39,3.39,22.7,64,4650,36,42,10795
0,115,mazda,gas,std,four,hatchback,fwd,front,98.8,177.8,66.5,55.5,2425,ohc,four,122,2bbl,3.39,3.39,8.6,84,4800,26,32,11245
0,118,mazda,gas,std,four,sedan,rwd,front,104.9,175,66.1,54.4,2670,ohc,four,140,mpfi,3.76,3.16,8,120,5000,19,27,18280
0,127.84,mazda,diesel,std,four,sedan,rwd,front,104.9,175,66.1,54.4,2700,ohc,four,134,idi,3.43,3.64,22,72,4200,31,39,18344
-1,93,mercedes-benz,diesel,turbo,four,sedan,rwd,front,110,190.9,70.3,56.5,3515,ohc,five,183,idi,3.58,3.64,21.5,123,4350,22,25,25552
-1,93,mercedes-benz,diesel,turbo,four,wagon,rwd,front,110,190.9,70.3,58.7,3750,ohc,five,183,idi,3.58,3.64,21.5,123,4350,22,25,28248
0,93,mercedes-benz,diesel,turbo,two,hardtop,rwd,front,106.7,187.5,70.3,54.9,3495,ohc,five,183,idi,3.58,3.64,21.5,123,4350,22,25,28176
-1,93,mercedes-benz,diesel,turbo,four,sedan,rwd,front,115.6,202.6,71.7,56.3,3770,ohc,five,183,idi,3.58,3.64,21.5,123,4350,22,25,31600
-1,127.84,mercedes-benz,gas,std,four,sedan,rwd,front,115.6,202.6,71.7,56.5,3740,ohcv,eight,234,mpfi,3.46,3.1,8.3,155,4750,16,18,34184
3,142,mercedes-benz,gas,std,two,convertible,rwd,front,96.6,180.3,70.5,50.8,3685,ohcv,eight,234,mpfi,3.46,3.1,8.3,155,4750,16,18,35056
0,127.84,mercedes-benz,gas,std,four,sedan,rwd,front,120.9,208.1,71.7,56.7,3900,ohcv,eight,308,mpfi,3.8,3.35,8,184,4500,14,16,40960
1,127.84,mercedes-benz,gas,std,two,hardtop,rwd,front,112,199.2,72,55.4,3715,ohcv,eight,304,mpfi,3.8,3.35,8,184,4500,14,16,45400
1,127.84,mercury,gas,turbo,two,hatchback,rwd,front,102.7,178.4,68,54.8,2910,ohc,four,140,mpfi,3.78,3.12,8,175,5000,19,24,16503
2,161,mitsubishi,gas,std,two,hatchback,fwd,front,93.7,157.3,64.4,50.8,1918,ohc,four,92,2bbl,2.97,3.23,9.4,68,5500,37,41,5389
2,161,mitsubishi,gas,std,two,hatchback,fwd,front,93.7,157.3,64.4,50.8,1944,ohc,four,92,2bbl,2.97,3.23,9.4,68,5500,31,38,6189
2,161,mitsubishi,gas,std,two,hatchback,fwd,front,93.7,157.3,64.4,50.8,2004,ohc,four,92,2bbl,2.97,3.23,9.4,68,5500,31,38,6669
1,161,mitsubishi,gas,turbo,two,hatchback,fwd,front,93,157.3,63.8,50.8,2145,ohc,four,98,spdi,3.03,3.39,7.6,102,5500,24,30,7689
3,153,mitsubishi,gas,turbo,two,hatchback,fwd,front,96.3,173,65.4,49.4,2370,ohc,four,110,spdi,3.17,3.46,7.5,116,5500,23,30,9959
3,153,mitsubishi,gas,std,two,hatchback,fwd,front,96.3,173,65.4,49.4,2328,ohc,four,122,2bbl,3.35,3.46,8.5,88,5000,25,32,8499
3,127.84,mitsubishi,gas,turbo,two,hatchback,fwd,front,95.9,173.2,66.3,50.2,2833,ohc,four,156,spdi,3.58,3.86,7,145,5000,19,24,12629
3,127.84,mitsubishi,gas,turbo,two,hatchback,fwd,front,95.9,173.2,66.3,50.2,2921,ohc,four,156,spdi,3.59,3.86,7,145,5000,19,24,14869
3,127.84,mitsubishi,gas,turbo,two,hatchback,fwd,front,95.9,173.2,66.3,50.2,2926,ohc,four,156,spdi,3.59,3.86,7,145,5000,19,24,14489
1,125,mitsubishi,gas,std,four,sedan,fwd,front,96.3,172.4,65.4,51.6,2365,ohc,four,122,2bbl,3.35,3.46,8.5,88,5000,25,32,6989
1,125,mitsubishi,gas,std,four,sedan,fwd,front,96.3,172.4,65.4,51.6,2405,ohc,four,122,2bbl,3.35,3.46,8.5,88,5000,25,32,8189
1,125,mitsubishi,gas,turbo,four,sedan,fwd,front,96.3,172.4,65.4,51.6,2403,ohc,four,110,spdi,3.17,3.46,7.5,116,5500,23,30,9279
-1,137,mitsubishi,gas,std,four,sedan,fwd,front,96.3,172.4,65.4,51.6,2403,ohc,four,110,spdi,3.17,3.46,7.5,116,5500,23,30,9279
1,128,nissan,gas,std,two,sedan,fwd,front,94.5,165.3,63.8,54.5,1889,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,5499
1,128,nissan,diesel,std,two,sedan,fwd,front,94.5,165.3,63.8,54.5,2017,ohc,four,103,idi,2.99,3.47,21.9,55,4800,45,50,7099
1,128,nissan,gas,std,two,sedan,fwd,front,94.5,165.3,63.8,54.5,1918,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,6649
1,122,nissan,gas,std,four,sedan,fwd,front,94.5,165.3,63.8,54.5,1938,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,6849
1,103,nissan,gas,std,four,wagon,fwd,front,94.5,170.2,63.8,53.5,2024,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,7349
1,128,nissan,gas,std,two,sedan,fwd,front,94.5,165.3,63.8,54.5,1951,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,7299
1,128,nissan,gas,std,two,hatchback,fwd,front,94.5,165.6,63.8,53.3,2028,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,7799
1,122,nissan,gas,std,four,sedan,fwd,front,94.5,165.3,63.8,54.5,1971,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,7499
1,103,nissan,gas,std,four,wagon,fwd,front,94.5,170.2,63.8,53.5,2037,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,7999
2,168,nissan,gas,std,two,hardtop,fwd,front,95.1,162.4,63.8,53.3,2008,ohc,four,97,2bbl,3.15,3.29,9.4,69,5200,31,37,8249
0,106,nissan,gas,std,four,hatchback,fwd,front,97.2,173.4,65.2,54.7,2324,ohc,four,120,2bbl,3.33,3.47,8.5,97,5200,27,34,8949

4
ML/cnn/lab1/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
test1/
test/
train/
*.zip

BIN
ML/cnn/lab1/cnn.h5 Normal file

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

BIN
ML/cnn/lab1/mnist.h5 Normal file

Binary file not shown.

BIN
ML/cnn/lab1/mnist_plots.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

BIN
ML/cnn/lab1/plots.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

BIN
ML/cnn/lab1/weights.h5 Normal file

Binary file not shown.

192
ML/lab6/lab6.py Normal file
View File

@ -0,0 +1,192 @@
import math
import pandas as pd
from sklearn import preprocessing
from sklearn.naive_bayes import GaussianNB
from sklearn import tree
filename = 'iris.csv'
needs_discretized = True
class_attr = 'class'
split = .67
classifier = 2
def main():
# Read CSV
df = pd.read_csv(filename)
# Randomize Order
df = df.sample(frac=1)
# Discretize
if needs_discretized:
for col in df:
if col != class_attr:
df[col] = pd.qcut(df[col], q=5)
# Split Data
if split != 1:
testing = df.head(-math.floor(len(df)*split))
data = df.head(math.floor(len(df)*split))
else:
testing = data = df
# Choose Classifier
if classifier == 1:
r1(data, testing)
elif classifier == 2:
decision_tree(data, testing)
else:
naive_bayes(data, testing)
def r1(data, testing):
# Set up big dictionary
rules = dict()
for attr in data:
if attr != class_attr:
rules[attr] = dict()
# Loop thru data
for attr in data:
if attr != class_attr:
freq = {v:{c:0 for c in data[class_attr].unique()} for v in data[attr].unique()}
for i, sample in data.iterrows():
freq[sample[attr]][sample[class_attr]] += 1
attr_rule = dict()
error = 0
for (k,v) in freq.items():
rule = max(v, key=v.get)
for c in v:
if c != rule:
error += v[c]
attr_rule[k] = rule
error /= len(data)
rules[attr] = (attr_rule, error)
# Select best attr
best_attr = min(rules, key=lambda x: rules[x][1])
rule = rules[best_attr][0]
print(f'R1 chose {best_attr}')
print(print_tree(rule))
print('---')
confusion = {v:{c:0 for c in data[class_attr].unique()} for v in data[class_attr].unique()}
correct = 0
for i, row in testing.iterrows():
confusion[row[class_attr]][rule[row[best_attr]]] += 1
if row[class_attr] == rule[row[best_attr]]: correct += 1
print("Confusion Matrix")
for (actual,guess) in confusion.items():
print(guess)
print()
print(f'Accuracy: {round((correct/len(testing))*100, 3)}%')
def decision_tree(data, testing):
print(f'Decision Tree examined {len(data)} samples and built the following tree:', end='')
rules = recur_tree(data)
print_tree(rules)
print('\n---')
print("Confusion Matrix")
confusion, correct = {v:{c:0 for c in data[class_attr].unique()} for v in data[class_attr].unique()}, 0
for i, row in testing.iterrows():
guess = test_tree(row, rules)
confusion[row[class_attr]][guess] += 1
if row[class_attr] == guess: correct += 1
for (actual,guess) in confusion.items():
print(guess)
print()
print(f'Accuracy: {round((correct/len(testing))*100, 3)}%')
# Test with sklearn tree
dtc = tree.DecisionTreeClassifier()
x,y = load_iris(return_X_y=True)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=(1-split), random_state=0)
y_pred = dtc.fit(x_train, y_train).predict(x_test)
print(f'skLearn accuracy: {sum(y_pred == y_test)*100/len(y_pred)}%')
def recur_tree(data):
rules = {}
# Find info gain per attrT
info = calc_info(data)
if info == 0:
return data[class_attr].unique()[0]
# gain = {attr:sum([info - calc_info(data[data[attr] == v]) for v in data[attr].unique()]) for attr in data if attr != class_attr}
gain = {attr:0 for attr in data if attr != class_attr}
for attr in gain:
for v in data[attr].unique():
gain[attr] += info - calc_info(data[data[attr] == v])
# Choose highest info gain
attr = max(gain, key=gain.get)
if (gain[attr] == 0):
return data[class_attr].unique()[0]
# Split data based on values of attr and recur
rules[attr] = {}
for v in data[attr].unique():
rules[attr][v] = recur_tree(data[data[attr] == v])
return rules
def calc_info(data):
return abs(sum([(count/len(data))*math.log((count/len(data)), 2) for count in data[class_attr].value_counts()]))
def print_tree(rules, indent=0):
if type(rules) != dict: return rules
for key in rules.keys():
print('\n'+' '*3*indent + f'* {key}', end='')
s = print_tree(rules[key], indent + 1)
if s: print(f' --> {s}', end='')
return None
def test_tree(row, rules):
if type(rules) != dict: return rules
attr = list(rules.keys())[0]
return test_tree(row, rules[attr][row[attr]])
def naive_bayes(data, testing):
confusion, correct = {v:{c:0 for c in data[class_attr].unique()} for v in data[class_attr].unique()}, 0
class_freq = {c:(len(data[data[class_attr] == c])) for c in data[class_attr].unique()}
for i, row in testing.iterrows():
probs = {c:(len(data[data[class_attr] == c]))/len(data) for c in data[class_attr].unique()}
for attr in data:
if attr != class_attr:
same_value = data[data[attr] == row[attr]]
for c in class_freq.keys():
probs[c] *= len(same_value[same_value[class_attr] == c])/class_freq[c]
guess = max(probs, key=probs.get)
confusion[row[class_attr]][guess] += 1
if row[class_attr] == guess: correct += 1
print(f'Naive Bayes examined {len(data)} samples')
print('---')
print("Confusion Matrix")
for (actual,guess) in confusion.items():
print(guess)
print()
print(f'Accuracy: {round((correct/len(testing))*100, 3)}%')
# Test with sklearn GaussianNaiveBayes
nb = GaussianNB()
x,y = load_iris(return_X_y=True)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=(1-split), random_state=0)
y_pred = nb.fit(x_train, y_train).predict(x_test)
print(f'skLearn accuracy: {sum(y_pred == y_test)*100/len(y_pred)}%')
if __name__ == '__main__':
main()

View File

@ -0,0 +1,50 @@
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
def main():
filename = 'iris.csv'
class_attr = 'class'
k = 10
split = 0.2
train, test = load_data(split)
knn(train, test, k, class_attr)
def knn(train, test, k, class_attr, output=True):
confusion, correct = {v:{c:0 for c in test[class_attr].unique()} for v in test[class_attr].unique()}, 0
for index, row in test.iterrows():
prediction = predict(train, row, k, class_attr)
confusion[row[class_attr]][prediction] += 1
if prediction == row[class_attr]: correct += 1
acc = round((correct/len(test))*100, 3)
if output:
print(f'KNN examined {len(test)} samples')
print('---')
print("Confusion Matrix")
for (actual,guess) in confusion.items():
print(guess)
print()
print(f'Accuracy: {acc}%')
return acc
def predict(train, point, k, class_attr):
attributes = [i for i in train.columns.values if i != class_attr]
return max(list(zip(*sorted([(distance(point[attributes], row[attributes]), row[class_attr]) for index, row in train.iterrows()], key=lambda x: x[0])[:k]))[1])
def distance(test, train):
return sum([(test[i] - train[i])**2 for i in range(len(test))])**0.5
def load_data(split, filename='iris.csv'):
df = pd.read_csv(filename)
train, test = train_test_split(df, test_size=split)
return train, test
if __name__ == '__main__':
main()

View File

@ -0,0 +1,31 @@
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
def main():
filename = 'iris.csv'
class_attr = 'class'
k = 10
split = 0.2
train, test = load_data(split)
sklearn_knn(train, test, k, class_attr)
def sklearn_knn(test, train, k, class_attr):
attributes = [i for i in train.columns.values if i != class_attr]
knn = KNeighborsClassifier(n_neighbors=k)
knn.fit(train[attributes], train[class_attr])
acc = round(knn.score(test[attributes], test[class_attr])*100, 3)
print(f'sklearn Accuracy: {acc}%')
return acc
def load_data(split, filename='iris.csv'):
df = pd.read_csv(filename)
train, test = train_test_split(df, test_size=split)
return train, test
if __name__ == '__main__':
main()

View File

@ -0,0 +1,54 @@
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
def main():
filename = 'iris.csv'
class_attr = 'class'
k = 10
split = 0.2
train, test = load_data(split)
knn(train, test, k, class_attr)
sklearn_knn(train, test, k, class_attr)
def knn(train, test, k, class_attr):
confusion, correct = {v:{c:0 for c in test[class_attr].unique()} for v in test[class_attr].unique()}, 0
for index, row in test.iterrows():
prediction = predict(train, row, k, class_attr)
confusion[row[class_attr]][prediction] += 1
if prediction == row[class_attr]: correct += 1
print(f'KNN examined {len(test)} samples')
print('---')
print("Confusion Matrix")
for (actual,guess) in confusion.items():
print(guess)
print()
print(f'Accuracy: {round((correct/len(test))*100, 3)}%')
def predict(train, point, k, class_attr):
attributes = [i for i in train.columns.values if i != class_attr]
return max(list(zip(*sorted([(distance(point[attributes], row[attributes]), row[class_attr]) for index, row in train.iterrows()], key=lambda x: x[0])[:k]))[1])
def distance(test, train):
return sum([(test[i] - train[i])**2 for i in range(len(test))])**0.5
def sklearn_knn(test, train, k, class_attr):
attributes = [i for i in train.columns.values if i != class_attr]
knn = KNeighborsClassifier(n_neighbors=k)
knn.fit(train[attributes], train[class_attr])
print(f'sklearn Accuracy: {round(knn.score(test[attributes], test[class_attr])*100, 3)}%')
def load_data(split):
df = pd.read_csv('iris.csv')
train, test = train_test_split(df, test_size=split)
return train, test
if __name__ == '__main__':
main()

19
ML/lab7/best_k.py Normal file
View File

@ -0,0 +1,19 @@
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from Rushil_Umaretiya_Lab7P1 import knn, load_data
from math import sqrt
def main():
train, test = load_data(0.2, 'iris.csv')
class_attr = 'class'
x = [knn(train, test, k, class_attr, output=False) for k in range(1, round(sqrt(len(train))))]
y = list(range(1, len(x) + 1))
print(len(x) == len(y))
if __name__ == '__main__':
main()

78
ML/nn/iris_perceptron.py Normal file
View File

@ -0,0 +1,78 @@
import random
from matplotlib import pyplot as plt
from sklearn import datasets
from sklearn.linear_model import Perceptron
def load_data():
iris = datasets.load_iris()
X = iris.data[:, :2].tolist()[:-50]
y = iris.target.tolist()[:-50]
temp = list(zip(X, y))
random.shuffle(temp)
return zip(*temp)
def activation(n):
# return 1 / (1 + math.exp(-n)) # log-sigmoid
return 1 if n > 0 else 0 # hard limit
def perceptron(X,t,epochs=100,learning_rate=.1):
b = random.uniform(-10,10)
w = [random.uniform(-10,10), random.uniform(-10,10)]
print("initial: ", w,b)
for aadarsh in range(epochs):
for i in range(len(X)):
# calculate the sum
n = X[i][0]*w[0] + X[i][1]*w[1] + b
e = t[i] - activation(n)
w[0] = w[0] + learning_rate*X[i][0]*e #jambalaya
w[1] = w[1] + learning_rate*X[i][1]*e
b = b + learning_rate*e
return w,b
def test(w,b,X,y):
correct = 0
for i in range(len(X)):
n = X[i][0]*w[0] + X[i][1]*w[1] + b
e = y[i] - activation(n)
if e == 0: correct += 1
return correct/len(X)
def main():
X,y = load_data()
X_0 = [x[0] for x in X]
X_1 = [x[1] for x in X]
test_X, test_y, train_X, train_y = X[:20], y[:20], X[20:], y[20:]
plt.title("iris")
plt.scatter(X_0, X_1, c=y,cmap='brg')
plt.show()
w,b = perceptron(train_X, train_y)
print("final: ", w, b)
plt.title("prediction")
plt.xlim(min(X_0), max(X_0))
plt.ylim(min(X_1), max(X_1))
plt.axline((0,-b/w[1]), slope=(-w[0]/w[1]))
plt.scatter(X_0, X_1,c=y,cmap='brg')
plt.show()
acc = test(w,b,test_X, test_y)
print("testing acc(%):", acc*100)
print('---')
clf = Perceptron(tol=1e-3, random_state=0)
clf.fit(train_X,train_y)
print("scikit acc(%):",clf.score(test_X, test_y)*100)
if __name__ == "__main__":
main()

63
ML/nn/lab4/lab4.py Normal file
View File

@ -0,0 +1,63 @@
import matplotlib.pyplot as plt
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import accuracy_score
def nn(data, hidden_layer_sizes=(100,), max_iter=200, learning_rate=0.1, return_model=False):
clf = MLPClassifier(hidden_layer_sizes=hidden_layer_sizes, max_iter=max_iter, learning_rate_init=learning_rate, verbose=False, random_state=1)
clf.fit(data[0], data[1])
y_pred = clf.predict(data[2])
accuracy = accuracy_score(data[3], y_pred)
if return_model:
return clf, accuracy
else:
return accuracy
def load_data():
iris = load_iris()
X, y = iris.data, iris.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
return (X_train, y_train, X_test, y_test)
def main():
data = load_data()
# print("acc(%):", nn(data, hidden_layer_sizes=(3,), max_iter=10))
# a)
# X = list(range(10))
# y = [nn(data, hidden_layer_sizes=tuple([10]*x), max_iter=200)*100 for x in X]
# plt.plot(X,y)
# plt.xlabel("# of hidden layers")
# plt.ylabel("accuracy")
# plt.show()
# b)
X = list(range(1,201,10))
y = [nn(data, hidden_layer_sizes=(100,), max_iter=x)*100 for x in X]
plt.plot(X,y)
plt.xlabel("MAX_EPOCH")
plt.ylabel("accuracy")
plt.show()
# c)
# clf, accuracy = nn(data, hidden_layer_sizes=(100,), max_iter=10, learning_rate=.01, return_model=True)
# print(clf.coefs_[0][0][0])
# y = [-0.029861371725764158,-0.02264642442306576,-0.026370648234539128,-0.03010906327805069,-0.0369403344661864,-0.04425032195146008,-0.04701612022155189,-0.0480744552027113,-0.04924013779012516,-0.04613149635077437]
# X = list(range(10))
# plt.plot(X,y)
# plt.xlabel("epoch")
# plt.ylabel("weight")
# plt.show()
if __name__ == "__main__":
main()

125
ML/nn/lab6/bp.py Normal file
View File

@ -0,0 +1,125 @@
from tarfile import GNUTYPE_SPARSE
from matplotlib import pyplot as plt
import time
import pandas as pd
import numpy as np
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import accuracy_score
class NeuralNetwork:
def __init__(self, layers=(784, 128, 64, 10,), learning_rate=0.1):
self.W = []
self.b = []
self.layers = layers
self.learning_rate = learning_rate
for i in range(len(layers) - 2):
w = np.random.randn(layers[i], layers[i + 1])
self.W.append(w / np.sqrt(layers[i]))
w = np.random.randn(layers[-2], layers[-1])
self.W.append(w / np.sqrt(layers[-2]))
self.b = [np.random.uniform(-1, 1, (1,layers[i+1])) for i in range(len(layers)-1)]
def sigmoid(self, x_raw, derivative=False):
if derivative:
x = self.sigmoid(x_raw)
return x * (1 - x)
else:
x = np.clip(x_raw, -500, 500)
return 1.0 / (1 + np.exp(-x))
def forward(self, x):
a = [np.atleast_2d(x)]
for layer in range(len(self.W)):
a.append(self.sigmoid(np.dot(a[layer], self.W[layer]) + self.b[layer]))
return a
def backward(self, x, target, out):
y = np.zeros((1, 10))
y[0][int(target)] = 1
error = y - out[-1]
e = [error * self.sigmoid(out[-1], derivative=True)]
for layer in range(len(self.W) - 1, 0, -1):
e.append(np.dot(e[-1], self.W[layer].T) * self.sigmoid(out[layer], derivative=True))
e.reverse()
for layer in range(len(self.W)):
self.W[layer] += self.learning_rate * np.dot(out[layer].T, e[layer])
self.b[layer] += self.learning_rate * e[layer]
return np.sum(np.square(error))
def partial_fit(self, x, target):
out = self.forward(x)
loss = self.backward(x, target, out)
return loss
def accuracy(self, X, y):
predictions = []
for k in range(X.shape[0]):
out = self.forward(X[k])
pred = np.argmax(out[-1])
predictions.append(pred == int(y[[k]]))
return np.mean(predictions)
def fit(self, X, y, X_test, y_test, epochs=1000):
accuracy = []
losses = []
for epoch in range(epochs):
start = time.time()
loss_sum = 0
for k in range(len(X)):
if k % 10000 == 0:
print(f'{k} elements seen...')
loss = self.partial_fit(X[k], y[k])
loss_sum += loss
losses.append(loss_sum / len(X))
acc = self.accuracy(X_test, y_test)
accuracy.append(acc)
end = time.time()
print("Epoch: {}, Accuracy: {}%".format(epoch, acc*100))
print("Time: {}".format(end - start))
print()
return accuracy, losses
def main():
epochs = 10
test = pd.read_csv('mnist_test.csv')
train = pd.read_csv('mnist_train.csv')
print("loading data...")
X_train = train.iloc[:,1:].to_numpy()
y_train = train.iloc[:,0].to_numpy()
X_test = test.iloc[:,1:].to_numpy()
y_test = test.iloc[:,0].to_numpy()
print("data loaded!")
print()
#nn = neural_network(X_train=X_train, y_train=y_train, X_test=X_test, y_test=y_test)
nn = NeuralNetwork()
accuracy, loss = nn.fit(X_train, y_train, X_test, y_test, epochs=epochs)
plt.plot(list(range(1,epochs+1)),accuracy)
plt.title("accuracy vs epochs")
plt.show()
plt.plot(list(range(1,epochs+1)),loss)
plt.title("loss vs epochs")
plt.show()
if __name__ == '__main__':
main()

56
ML/nn/lab7/lab7/lab7.py Normal file
View File

@ -0,0 +1,56 @@
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import accuracy_score
import numpy as np
import matplotlib.pyplot as plt
def main():
seed = 42
# layer_sizes = (5,)
# layer_sizes = tuple([5]*i)
activation = "relu"
X, y = datasets.make_circles(n_samples=1000, random_state=seed)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=seed)
clf = MLPClassifier(hidden_layer_sizes=layer_sizes, max_iter=500, random_state=seed, early_stopping=False)
# Initialize statistic lists
X = list(range(1,500+1))
y_loss, y_acc, train_acc = [], [], []
for k in range(500):
print(f"Epoch {k}")
# Partial fit dataset by doing forward pass and then backwards pass
clf = clf.partial_fit(X_train, y_train, classes=np.unique(y_train))
# Add loss and accuracy values to statistics
y_loss.append(clf.loss_)
train_acc.append(clf.score(X_train, y_train))
y_acc.append(clf.score(X_test, y_test))
print(f"accuracy: {clf.score(X_test,y_test)*100}%")
# Plot epochs vs loss
plt.plot(X,y_loss)
plt.xlabel("epochs")
plt.ylabel("loss")
plt.show()
# Plot epochs vs accuracy
plt.plot(X, train_acc, label="train")
plt.plot(X, y_acc, label="test")
plt.legend(loc='upper left')
plt.xlabel("epochs")
plt.ylabel("acc")
plt.title("accuracy vs epochs")
plt.show()
if __name__ == "__main__":
main()

View File

@ -0,0 +1,69 @@
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import accuracy_score
from statistics import mean
import numpy as np
import math
import matplotlib.pyplot as plt
def main():
seed = 42
# layer_sizes = (5,)
# layer_sizes = tuple([5]*i)
activation = "relu"
X, y = datasets.make_circles(n_samples=1000, random_state=seed)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=seed)
# Initialize statistic lists
X = list(range(1,10))
y_loss, y_acc, train_acc = [], [], []
for k in range(1,10):
print(f"Layer Size (n={k})")
layer_sizes = tuple([5]*k)
clf = MLPClassifier(hidden_layer_sizes=layer_sizes, max_iter=500, random_state=seed, early_stopping=False)
# Partial fit dataset by doing forward pass and then backwards pass
curr_loss = []
for k in range(500):
print(f"Epoch {k}")
# Partial fit dataset by doing forward pass and then backwards pass
clf = clf.partial_fit(X_train, y_train, classes=np.unique(y_train))
# Add loss and accuracy values to statistics
curr_loss.append(clf.loss_)
# Add loss and accuracy values to statistics
y_loss.append(sum(curr_loss)/len(curr_loss))
# print(f"accuracy: {clf.score(X_test,y_test)*100}%")
# Plot epochs vs loss
plt.plot(X,y_loss[::-1])
plt.xlabel("layer number")
plt.ylabel("loss")
plt.title("loss vs layer number")
plt.show()
# Plot layers vs accuracy
# plt.plot(X, train_acc, label="train")
# plt.plot(X, y_acc, label="test")
plt.legend(loc='upper left')
plt.xlabel("layer number")
plt.ylabel("acc")
plt.title("accuracy vs layer number")
plt.show()
if __name__ == "__main__":
main()

View File

@ -0,0 +1,56 @@
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import accuracy_score
import numpy as np
import matplotlib.pyplot as plt
def main():
seed = 42
# layer_sizes = (5,)
# layer_sizes = tuple([5]*i)
activation = "relu"
X, y = datasets.make_circles(n_samples=1000, random_state=seed)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=seed)
clf = MLPClassifier(hidden_layer_sizes=layer_sizes, max_iter=500, random_state=seed, early_stopping=False)
# Initialize statistic lists
X = list(range(1,500+1))
y_loss, y_acc, train_acc = [], [], []
for k in range(500):
print(f"Epoch {k}")
# Partial fit dataset by doing forward pass and then backwards pass
clf = clf.partial_fit(X_train, y_train, classes=np.unique(y_train))
# Add loss and accuracy values to statistics
y_loss.append(clf.loss_)
train_acc.append(clf.score(X_train, y_train))
y_acc.append(clf.score(X_test, y_test))
print(f"accuracy: {clf.score(X_test,y_test)*100}%")
# Plot epochs vs loss
plt.plot(X,y_loss)
plt.xlabel("epochs")
plt.ylabel("loss")
plt.show()
# Plot epochs vs accuracy
plt.plot(X, train_acc, label="train")
plt.plot(X, y_acc, label="test")
plt.legend(loc='upper left')
plt.xlabel("epochs")
plt.ylabel("acc")
plt.title("accuracy vs epochs")
plt.show()
if __name__ == "__main__":
main()

131
ML/nn/lab8/lab8.py Normal file
View File

@ -0,0 +1,131 @@
import time
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
class NeuralNetwork:
def __init__(self, layers=(784, 128, 64, 10,), learning_rate=0.001, clipping=False, norm=False):
self.W = []
self.b = []
self.layers = layers
self.learning_rate = learning_rate
self.clipping = clipping
self.norm = norm
for i in range(len(layers) - 2):
w = np.random.randn(layers[i], layers[i + 1])
self.W.append(w / np.sqrt(layers[i]))
w = np.random.randn(layers[-2], layers[-1])
self.W.append(w / np.sqrt(layers[-2]))
self.b = [np.random.uniform(-1, 1, (1,layers[i+1])) for i in range(len(layers)-1)]
def sigmoid(self, x_raw, derivative=False):
if derivative:
x = self.sigmoid(x_raw)
return x * (1 - x)
else:
x = np.clip(x_raw, -500, 500)
return 1.0 / (1 + np.exp(-x))
def forward(self, x):
a = [np.atleast_2d(x)]
for layer in range(len(self.W)):
a.append(self.sigmoid(np.dot(a[layer], self.W[layer]) + self.b[layer]))
return a
def backward(self, x, target, out):
y = np.zeros((1, 10))
y[0][int(target)] = 1
error = y - out[-1]
e = [error * self.sigmoid(out[-1], derivative=True)]
for layer in range(len(self.W) - 1, 0, -1):
a = np.dot(e[-1], self.W[layer].T) * self.sigmoid(out[layer], derivative=True)
if self.norm: e.append(a / np.linalg.norm(a))
elif self.clipping: e.append(np.clip(a, -1, 1))
else: e.append(a)
e.reverse()
for layer in range(len(self.W)):
self.W[layer] += self.learning_rate * np.dot(out[layer].T, e[layer])
self.b[layer] += self.learning_rate * e[layer]
return np.sum(np.square(error))
def partial_fit(self, x, target):
out = self.forward(x)
loss = self.backward(x, target, out)
return loss
def accuracy(self, X, y):
predictions = []
for k in range(X.shape[0]):
out = self.forward(X[k])
pred = np.argmax(out[-1])
predictions.append(pred == int(y[[k]]))
return np.mean(predictions)
def fit(self, X, y, X_test, y_test, epochs=1000):
accuracy = []
losses = []
for epoch in range(epochs):
start = time.time()
loss_sum = 0
for k in range(len(X)):
if k % 10000 == 0:
print(f'{k} elements seen...')
loss = self.partial_fit(X[k], y[k])
loss_sum += loss
losses.append(loss_sum / len(X))
acc = self.accuracy(X_test, y_test)
accuracy.append(acc)
end = time.time()
print("Epoch: {}, Accuracy: {}%".format(epoch, acc*100))
print("Time: {}".format(end - start))
print()
return accuracy, losses
def main():
epochs = 5
test = pd.read_csv('mnist_test.csv')
train = pd.read_csv('mnist_train.csv')
print("loading data...")
X_train = train.iloc[:,1:30000].to_numpy()
y_train = train.iloc[:,0].to_numpy()
X_test = test.iloc[:,1:30000].to_numpy()
y_test = test.iloc[:,0].to_numpy()
print("data loaded!")
print()
nn = NeuralNetwork()
# nn = NeuralNetwork(clipping=True) # Use clipping
# nn = NeuralNetwork(norm=True) # Use norm clipping
# for w in nn.W: w *= 100000 # Create exploding gradient problem
accuracy, loss = nn.fit(X_train, y_train, X_test, y_test, epochs=epochs)
plt.plot(list(range(1,epochs+1)),accuracy)
plt.title("accuracy vs epochs")
plt.show()
plt.plot(list(range(1,epochs+1)),loss)
plt.title("loss vs epochs")
plt.show()
if __name__ == '__main__':
main()

57
ML/nn/mnist/mnist.py Normal file
View File

@ -0,0 +1,57 @@
from matplotlib import pyplot as plt
import pandas as pd
import numpy as np
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import accuracy_score
def main():
# NUMBER OF MAX EPOCHS
epochs = 80
# Load data from csv
test = pd.read_csv('mnist_test.csv')
train = pd.read_csv('mnist_train.csv')
# Split data to test and train, X and y
X_train = train.iloc[:,1:].to_numpy()
y_train = train.iloc[:,0].to_numpy()
X_test = test.iloc[:,1:].to_numpy()
y_test = test.iloc[:,0].to_numpy()
# Initialize Multi-Perceptron Classifier for partial fitting
clf = MLPClassifier(hidden_layer_sizes=(128,64,), max_iter=epochs, learning_rate_init=0.001, verbose=True, random_state=1, early_stopping=False)
# Initialize statistic lists
X = list(range(1,epochs+1))
y_loss, y_acc = [], []
for k in range(epochs):
print(f"Epoch {k}")
# Partial fit dataset by doing forward pass and then backwards pass
clf = clf.partial_fit(X_train, y_train, classes=np.unique(y_train))
# Add loss and accuracy values to statistics
y_loss.append(clf.loss_)
y_acc.append(clf.score(X_test, y_test))
print(f"accuracy: {clf.score(X_test,y_test)*100}%")
# Plot epochs vs loss
plt.plot(X,y_loss)
plt.xlabel("epochs")
plt.ylabel("loss")
plt.show()
# Plot epochs vs accuracy
plt.plot(X,y_acc)
plt.xlabel("epochs")
plt.ylabel("acc")
plt.show()
if __name__ == '__main__':
main()

86
ML/nn/nn.py Normal file
View File

@ -0,0 +1,86 @@
import random
import math
from matplotlib import pyplot as plt
points = 20
test_points = int(points/4)
def main():
# generate test slope
slope = random.uniform(0.5, 2)
# generate data
X,y,t = gen_data(slope)
plt.title("dataset")
plt.xlim(0,1)
plt.ylim(0,1)
plt.scatter(X,y,c=t,cmap='brg')
plt.axline((0,0), slope=slope)
plt.show()
# train perceptron
w,b = perceptron(X,y,t)
print("final: ", w, b)
plt.title("prediction")
plt.xlim(0,1)
plt.ylim(0,1)
plt.scatter(X,y,c=t,cmap='brg')
plt.axline((0,-b/w[1]), slope=(-w[0]/w[1]))
print("acc(%):", test(w,b,slope)*100)
def gen_data(slope):
X = [random.uniform(0 ,1) for x in range(points)]
y = [random.uniform(0 ,1) for x in range(points)]
t = [0 if (y[i]/X[i] < slope) else 1 for i in range (points)]
return X,y,t
def activation(n):
# return 1 / (1 + math.exp(-n)) # log-sigmoid
return 1 if n > 0 else 0 # hard limit
def perceptron(X,y,t):
b = random.uniform(0,1)
w = [random.uniform(-1,1), random.uniform(-1,1)]
print("initial: ", w,b)
learning_rate = 1
epochs = 10
for i in range(epochs):
for j in range(points):
# calculate the sum
n = X[j]*w[0] + y[j]*w[1] + b
e = t[j] - activation(n)
w[0] = w[0] + learning_rate*X[j]*e
w[1] = w[1] + learning_rate*y[j]*e
b = b + learning_rate*e
return w,b
def test(w,b,slope):
X = [random.uniform(0 ,1) for x in range(test_points)]
y = [random.uniform(0 ,1) for x in range(test_points)]
t = [0 if (y[i]/X[i] < slope) else 1 for i in range (test_points)]
correct = 0
for j in range(test_points):
n = X[j]*w[0] + y[j]*w[1] + b
e = t[j] - activation(n)
if e == 0: correct += 1
return correct/test_points
if __name__ == "__main__":
main()

View File

@ -1,151 +0,0 @@
sepallength,sepalwidth,petallength,petalwidth,class
5.1,3.5,1.4,0.2,Iris-setosa
4.9,3,1.4,0.2,Iris-setosa
4.7,3.2,1.3,0.2,Iris-setosa
4.6,3.1,1.5,0.2,Iris-setosa
5,3.6,1.4,0.2,Iris-setosa
5.4,3.9,1.7,0.4,Iris-setosa
4.6,3.4,1.4,0.3,Iris-setosa
5,3.4,1.5,0.2,Iris-setosa
4.4,2.9,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5.4,3.7,1.5,0.2,Iris-setosa
4.8,3.4,1.6,0.2,Iris-setosa
4.8,3,1.4,0.1,Iris-setosa
4.3,3,1.1,0.1,Iris-setosa
5.8,4,1.2,0.2,Iris-setosa
5.7,4.4,1.5,0.4,Iris-setosa
5.4,3.9,1.3,0.4,Iris-setosa
5.1,3.5,1.4,0.3,Iris-setosa
5.7,3.8,1.7,0.3,Iris-setosa
5.1,3.8,1.5,0.3,Iris-setosa
5.4,3.4,1.7,0.2,Iris-setosa
5.1,3.7,1.5,0.4,Iris-setosa
4.6,3.6,1,0.2,Iris-setosa
5.1,3.3,1.7,0.5,Iris-setosa
4.8,3.4,1.9,0.2,Iris-setosa
5,3,1.6,0.2,Iris-setosa
5,3.4,1.6,0.4,Iris-setosa
5.2,3.5,1.5,0.2,Iris-setosa
5.2,3.4,1.4,0.2,Iris-setosa
4.7,3.2,1.6,0.2,Iris-setosa
4.8,3.1,1.6,0.2,Iris-setosa
5.4,3.4,1.5,0.4,Iris-setosa
5.2,4.1,1.5,0.1,Iris-setosa
5.5,4.2,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5,3.2,1.2,0.2,Iris-setosa
5.5,3.5,1.3,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
4.4,3,1.3,0.2,Iris-setosa
5.1,3.4,1.5,0.2,Iris-setosa
5,3.5,1.3,0.3,Iris-setosa
4.5,2.3,1.3,0.3,Iris-setosa
4.4,3.2,1.3,0.2,Iris-setosa
5,3.5,1.6,0.6,Iris-setosa
5.1,3.8,1.9,0.4,Iris-setosa
4.8,3,1.4,0.3,Iris-setosa
5.1,3.8,1.6,0.2,Iris-setosa
4.6,3.2,1.4,0.2,Iris-setosa
5.3,3.7,1.5,0.2,Iris-setosa
5,3.3,1.4,0.2,Iris-setosa
7,3.2,4.7,1.4,Iris-versicolor
6.4,3.2,4.5,1.5,Iris-versicolor
6.9,3.1,4.9,1.5,Iris-versicolor
5.5,2.3,4,1.3,Iris-versicolor
6.5,2.8,4.6,1.5,Iris-versicolor
5.7,2.8,4.5,1.3,Iris-versicolor
6.3,3.3,4.7,1.6,Iris-versicolor
4.9,2.4,3.3,1,Iris-versicolor
6.6,2.9,4.6,1.3,Iris-versicolor
5.2,2.7,3.9,1.4,Iris-versicolor
5,2,3.5,1,Iris-versicolor
5.9,3,4.2,1.5,Iris-versicolor
6,2.2,4,1,Iris-versicolor
6.1,2.9,4.7,1.4,Iris-versicolor
5.6,2.9,3.6,1.3,Iris-versicolor
6.7,3.1,4.4,1.4,Iris-versicolor
5.6,3,4.5,1.5,Iris-versicolor
5.8,2.7,4.1,1,Iris-versicolor
6.2,2.2,4.5,1.5,Iris-versicolor
5.6,2.5,3.9,1.1,Iris-versicolor
5.9,3.2,4.8,1.8,Iris-versicolor
6.1,2.8,4,1.3,Iris-versicolor
6.3,2.5,4.9,1.5,Iris-versicolor
6.1,2.8,4.7,1.2,Iris-versicolor
6.4,2.9,4.3,1.3,Iris-versicolor
6.6,3,4.4,1.4,Iris-versicolor
6.8,2.8,4.8,1.4,Iris-versicolor
6.7,3,5,1.7,Iris-versicolor
6,2.9,4.5,1.5,Iris-versicolor
5.7,2.6,3.5,1,Iris-versicolor
5.5,2.4,3.8,1.1,Iris-versicolor
5.5,2.4,3.7,1,Iris-versicolor
5.8,2.7,3.9,1.2,Iris-versicolor
6,2.7,5.1,1.6,Iris-versicolor
5.4,3,4.5,1.5,Iris-versicolor
6,3.4,4.5,1.6,Iris-versicolor
6.7,3.1,4.7,1.5,Iris-versicolor
6.3,2.3,4.4,1.3,Iris-versicolor
5.6,3,4.1,1.3,Iris-versicolor
5.5,2.5,4,1.3,Iris-versicolor
5.5,2.6,4.4,1.2,Iris-versicolor
6.1,3,4.6,1.4,Iris-versicolor
5.8,2.6,4,1.2,Iris-versicolor
5,2.3,3.3,1,Iris-versicolor
5.6,2.7,4.2,1.3,Iris-versicolor
5.7,3,4.2,1.2,Iris-versicolor
5.7,2.9,4.2,1.3,Iris-versicolor
6.2,2.9,4.3,1.3,Iris-versicolor
5.1,2.5,3,1.1,Iris-versicolor
5.7,2.8,4.1,1.3,Iris-versicolor
6.3,3.3,6,2.5,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
7.1,3,5.9,2.1,Iris-virginica
6.3,2.9,5.6,1.8,Iris-virginica
6.5,3,5.8,2.2,Iris-virginica
7.6,3,6.6,2.1,Iris-virginica
4.9,2.5,4.5,1.7,Iris-virginica
7.3,2.9,6.3,1.8,Iris-virginica
6.7,2.5,5.8,1.8,Iris-virginica
7.2,3.6,6.1,2.5,Iris-virginica
6.5,3.2,5.1,2,Iris-virginica
6.4,2.7,5.3,1.9,Iris-virginica
6.8,3,5.5,2.1,Iris-virginica
5.7,2.5,5,2,Iris-virginica
5.8,2.8,5.1,2.4,Iris-virginica
6.4,3.2,5.3,2.3,Iris-virginica
6.5,3,5.5,1.8,Iris-virginica
7.7,3.8,6.7,2.2,Iris-virginica
7.7,2.6,6.9,2.3,Iris-virginica
6,2.2,5,1.5,Iris-virginica
6.9,3.2,5.7,2.3,Iris-virginica
5.6,2.8,4.9,2,Iris-virginica
7.7,2.8,6.7,2,Iris-virginica
6.3,2.7,4.9,1.8,Iris-virginica
6.7,3.3,5.7,2.1,Iris-virginica
7.2,3.2,6,1.8,Iris-virginica
6.2,2.8,4.8,1.8,Iris-virginica
6.1,3,4.9,1.8,Iris-virginica
6.4,2.8,5.6,2.1,Iris-virginica
7.2,3,5.8,1.6,Iris-virginica
7.4,2.8,6.1,1.9,Iris-virginica
7.9,3.8,6.4,2,Iris-virginica
6.4,2.8,5.6,2.2,Iris-virginica
6.3,2.8,5.1,1.5,Iris-virginica
6.1,2.6,5.6,1.4,Iris-virginica
7.7,3,6.1,2.3,Iris-virginica
6.3,3.4,5.6,2.4,Iris-virginica
6.4,3.1,5.5,1.8,Iris-virginica
6,3,4.8,1.8,Iris-virginica
6.9,3.1,5.4,2.1,Iris-virginica
6.7,3.1,5.6,2.4,Iris-virginica
6.9,3.1,5.1,2.3,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
6.8,3.2,5.9,2.3,Iris-virginica
6.7,3.3,5.7,2.5,Iris-virginica
6.7,3,5.2,2.3,Iris-virginica
6.3,2.5,5,1.9,Iris-virginica
6.5,3,5.2,2,Iris-virginica
6.2,3.4,5.4,2.3,Iris-virginica
5.9,3,5.1,1.8,Iris-virginica
1 sepallength sepalwidth petallength petalwidth class
2 5.1 3.5 1.4 0.2 Iris-setosa
3 4.9 3 1.4 0.2 Iris-setosa
4 4.7 3.2 1.3 0.2 Iris-setosa
5 4.6 3.1 1.5 0.2 Iris-setosa
6 5 3.6 1.4 0.2 Iris-setosa
7 5.4 3.9 1.7 0.4 Iris-setosa
8 4.6 3.4 1.4 0.3 Iris-setosa
9 5 3.4 1.5 0.2 Iris-setosa
10 4.4 2.9 1.4 0.2 Iris-setosa
11 4.9 3.1 1.5 0.1 Iris-setosa
12 5.4 3.7 1.5 0.2 Iris-setosa
13 4.8 3.4 1.6 0.2 Iris-setosa
14 4.8 3 1.4 0.1 Iris-setosa
15 4.3 3 1.1 0.1 Iris-setosa
16 5.8 4 1.2 0.2 Iris-setosa
17 5.7 4.4 1.5 0.4 Iris-setosa
18 5.4 3.9 1.3 0.4 Iris-setosa
19 5.1 3.5 1.4 0.3 Iris-setosa
20 5.7 3.8 1.7 0.3 Iris-setosa
21 5.1 3.8 1.5 0.3 Iris-setosa
22 5.4 3.4 1.7 0.2 Iris-setosa
23 5.1 3.7 1.5 0.4 Iris-setosa
24 4.6 3.6 1 0.2 Iris-setosa
25 5.1 3.3 1.7 0.5 Iris-setosa
26 4.8 3.4 1.9 0.2 Iris-setosa
27 5 3 1.6 0.2 Iris-setosa
28 5 3.4 1.6 0.4 Iris-setosa
29 5.2 3.5 1.5 0.2 Iris-setosa
30 5.2 3.4 1.4 0.2 Iris-setosa
31 4.7 3.2 1.6 0.2 Iris-setosa
32 4.8 3.1 1.6 0.2 Iris-setosa
33 5.4 3.4 1.5 0.4 Iris-setosa
34 5.2 4.1 1.5 0.1 Iris-setosa
35 5.5 4.2 1.4 0.2 Iris-setosa
36 4.9 3.1 1.5 0.1 Iris-setosa
37 5 3.2 1.2 0.2 Iris-setosa
38 5.5 3.5 1.3 0.2 Iris-setosa
39 4.9 3.1 1.5 0.1 Iris-setosa
40 4.4 3 1.3 0.2 Iris-setosa
41 5.1 3.4 1.5 0.2 Iris-setosa
42 5 3.5 1.3 0.3 Iris-setosa
43 4.5 2.3 1.3 0.3 Iris-setosa
44 4.4 3.2 1.3 0.2 Iris-setosa
45 5 3.5 1.6 0.6 Iris-setosa
46 5.1 3.8 1.9 0.4 Iris-setosa
47 4.8 3 1.4 0.3 Iris-setosa
48 5.1 3.8 1.6 0.2 Iris-setosa
49 4.6 3.2 1.4 0.2 Iris-setosa
50 5.3 3.7 1.5 0.2 Iris-setosa
51 5 3.3 1.4 0.2 Iris-setosa
52 7 3.2 4.7 1.4 Iris-versicolor
53 6.4 3.2 4.5 1.5 Iris-versicolor
54 6.9 3.1 4.9 1.5 Iris-versicolor
55 5.5 2.3 4 1.3 Iris-versicolor
56 6.5 2.8 4.6 1.5 Iris-versicolor
57 5.7 2.8 4.5 1.3 Iris-versicolor
58 6.3 3.3 4.7 1.6 Iris-versicolor
59 4.9 2.4 3.3 1 Iris-versicolor
60 6.6 2.9 4.6 1.3 Iris-versicolor
61 5.2 2.7 3.9 1.4 Iris-versicolor
62 5 2 3.5 1 Iris-versicolor
63 5.9 3 4.2 1.5 Iris-versicolor
64 6 2.2 4 1 Iris-versicolor
65 6.1 2.9 4.7 1.4 Iris-versicolor
66 5.6 2.9 3.6 1.3 Iris-versicolor
67 6.7 3.1 4.4 1.4 Iris-versicolor
68 5.6 3 4.5 1.5 Iris-versicolor
69 5.8 2.7 4.1 1 Iris-versicolor
70 6.2 2.2 4.5 1.5 Iris-versicolor
71 5.6 2.5 3.9 1.1 Iris-versicolor
72 5.9 3.2 4.8 1.8 Iris-versicolor
73 6.1 2.8 4 1.3 Iris-versicolor
74 6.3 2.5 4.9 1.5 Iris-versicolor
75 6.1 2.8 4.7 1.2 Iris-versicolor
76 6.4 2.9 4.3 1.3 Iris-versicolor
77 6.6 3 4.4 1.4 Iris-versicolor
78 6.8 2.8 4.8 1.4 Iris-versicolor
79 6.7 3 5 1.7 Iris-versicolor
80 6 2.9 4.5 1.5 Iris-versicolor
81 5.7 2.6 3.5 1 Iris-versicolor
82 5.5 2.4 3.8 1.1 Iris-versicolor
83 5.5 2.4 3.7 1 Iris-versicolor
84 5.8 2.7 3.9 1.2 Iris-versicolor
85 6 2.7 5.1 1.6 Iris-versicolor
86 5.4 3 4.5 1.5 Iris-versicolor
87 6 3.4 4.5 1.6 Iris-versicolor
88 6.7 3.1 4.7 1.5 Iris-versicolor
89 6.3 2.3 4.4 1.3 Iris-versicolor
90 5.6 3 4.1 1.3 Iris-versicolor
91 5.5 2.5 4 1.3 Iris-versicolor
92 5.5 2.6 4.4 1.2 Iris-versicolor
93 6.1 3 4.6 1.4 Iris-versicolor
94 5.8 2.6 4 1.2 Iris-versicolor
95 5 2.3 3.3 1 Iris-versicolor
96 5.6 2.7 4.2 1.3 Iris-versicolor
97 5.7 3 4.2 1.2 Iris-versicolor
98 5.7 2.9 4.2 1.3 Iris-versicolor
99 6.2 2.9 4.3 1.3 Iris-versicolor
100 5.1 2.5 3 1.1 Iris-versicolor
101 5.7 2.8 4.1 1.3 Iris-versicolor
102 6.3 3.3 6 2.5 Iris-virginica
103 5.8 2.7 5.1 1.9 Iris-virginica
104 7.1 3 5.9 2.1 Iris-virginica
105 6.3 2.9 5.6 1.8 Iris-virginica
106 6.5 3 5.8 2.2 Iris-virginica
107 7.6 3 6.6 2.1 Iris-virginica
108 4.9 2.5 4.5 1.7 Iris-virginica
109 7.3 2.9 6.3 1.8 Iris-virginica
110 6.7 2.5 5.8 1.8 Iris-virginica
111 7.2 3.6 6.1 2.5 Iris-virginica
112 6.5 3.2 5.1 2 Iris-virginica
113 6.4 2.7 5.3 1.9 Iris-virginica
114 6.8 3 5.5 2.1 Iris-virginica
115 5.7 2.5 5 2 Iris-virginica
116 5.8 2.8 5.1 2.4 Iris-virginica
117 6.4 3.2 5.3 2.3 Iris-virginica
118 6.5 3 5.5 1.8 Iris-virginica
119 7.7 3.8 6.7 2.2 Iris-virginica
120 7.7 2.6 6.9 2.3 Iris-virginica
121 6 2.2 5 1.5 Iris-virginica
122 6.9 3.2 5.7 2.3 Iris-virginica
123 5.6 2.8 4.9 2 Iris-virginica
124 7.7 2.8 6.7 2 Iris-virginica
125 6.3 2.7 4.9 1.8 Iris-virginica
126 6.7 3.3 5.7 2.1 Iris-virginica
127 7.2 3.2 6 1.8 Iris-virginica
128 6.2 2.8 4.8 1.8 Iris-virginica
129 6.1 3 4.9 1.8 Iris-virginica
130 6.4 2.8 5.6 2.1 Iris-virginica
131 7.2 3 5.8 1.6 Iris-virginica
132 7.4 2.8 6.1 1.9 Iris-virginica
133 7.9 3.8 6.4 2 Iris-virginica
134 6.4 2.8 5.6 2.2 Iris-virginica
135 6.3 2.8 5.1 1.5 Iris-virginica
136 6.1 2.6 5.6 1.4 Iris-virginica
137 7.7 3 6.1 2.3 Iris-virginica
138 6.3 3.4 5.6 2.4 Iris-virginica
139 6.4 3.1 5.5 1.8 Iris-virginica
140 6 3 4.8 1.8 Iris-virginica
141 6.9 3.1 5.4 2.1 Iris-virginica
142 6.7 3.1 5.6 2.4 Iris-virginica
143 6.9 3.1 5.1 2.3 Iris-virginica
144 5.8 2.7 5.1 1.9 Iris-virginica
145 6.8 3.2 5.9 2.3 Iris-virginica
146 6.7 3.3 5.7 2.5 Iris-virginica
147 6.7 3 5.2 2.3 Iris-virginica
148 6.3 2.5 5 1.9 Iris-virginica
149 6.5 3 5.2 2 Iris-virginica
150 6.2 3.4 5.4 2.3 Iris-virginica
151 5.9 3 5.1 1.8 Iris-virginica

View File

@ -1,151 +0,0 @@
sepallength,sepalwidth,petallength,petalwidth,class
5.1,3.5,1.4,0.2,Iris-setosa
4.9,3,1.4,0.2,Iris-setosa
4.7,3.2,1.3,0.2,Iris-setosa
4.6,3.1,1.5,0.2,Iris-setosa
5,3.6,1.4,0.2,Iris-setosa
5.4,3.9,1.7,0.4,Iris-setosa
4.6,3.4,1.4,0.3,Iris-setosa
5,3.4,1.5,0.2,Iris-setosa
4.4,2.9,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5.4,3.7,1.5,0.2,Iris-setosa
4.8,3.4,1.6,0.2,Iris-setosa
4.8,3,1.4,0.1,Iris-setosa
4.3,3,1.1,0.1,Iris-setosa
5.8,4,1.2,0.2,Iris-setosa
5.7,4.4,1.5,0.4,Iris-setosa
5.4,3.9,1.3,0.4,Iris-setosa
5.1,3.5,1.4,0.3,Iris-setosa
5.7,3.8,1.7,0.3,Iris-setosa
5.1,3.8,1.5,0.3,Iris-setosa
5.4,3.4,1.7,0.2,Iris-setosa
5.1,3.7,1.5,0.4,Iris-setosa
4.6,3.6,1,0.2,Iris-setosa
5.1,3.3,1.7,0.5,Iris-setosa
4.8,3.4,1.9,0.2,Iris-setosa
5,3,1.6,0.2,Iris-setosa
5,3.4,1.6,0.4,Iris-setosa
5.2,3.5,1.5,0.2,Iris-setosa
5.2,3.4,1.4,0.2,Iris-setosa
4.7,3.2,1.6,0.2,Iris-setosa
4.8,3.1,1.6,0.2,Iris-setosa
5.4,3.4,1.5,0.4,Iris-setosa
5.2,4.1,1.5,0.1,Iris-setosa
5.5,4.2,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5,3.2,1.2,0.2,Iris-setosa
5.5,3.5,1.3,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
4.4,3,1.3,0.2,Iris-setosa
5.1,3.4,1.5,0.2,Iris-setosa
5,3.5,1.3,0.3,Iris-setosa
4.5,2.3,1.3,0.3,Iris-setosa
4.4,3.2,1.3,0.2,Iris-setosa
5,3.5,1.6,0.6,Iris-setosa
5.1,3.8,1.9,0.4,Iris-setosa
4.8,3,1.4,0.3,Iris-setosa
5.1,3.8,1.6,0.2,Iris-setosa
4.6,3.2,1.4,0.2,Iris-setosa
5.3,3.7,1.5,0.2,Iris-setosa
5,3.3,1.4,0.2,Iris-setosa
7,3.2,4.7,1.4,Iris-versicolor
6.4,3.2,4.5,1.5,Iris-versicolor
6.9,3.1,4.9,1.5,Iris-versicolor
5.5,2.3,4,1.3,Iris-versicolor
6.5,2.8,4.6,1.5,Iris-versicolor
5.7,2.8,4.5,1.3,Iris-versicolor
6.3,3.3,4.7,1.6,Iris-versicolor
4.9,2.4,3.3,1,Iris-versicolor
6.6,2.9,4.6,1.3,Iris-versicolor
5.2,2.7,3.9,1.4,Iris-versicolor
5,2,3.5,1,Iris-versicolor
5.9,3,4.2,1.5,Iris-versicolor
6,2.2,4,1,Iris-versicolor
6.1,2.9,4.7,1.4,Iris-versicolor
5.6,2.9,3.6,1.3,Iris-versicolor
6.7,3.1,4.4,1.4,Iris-versicolor
5.6,3,4.5,1.5,Iris-versicolor
5.8,2.7,4.1,1,Iris-versicolor
6.2,2.2,4.5,1.5,Iris-versicolor
5.6,2.5,3.9,1.1,Iris-versicolor
5.9,3.2,4.8,1.8,Iris-versicolor
6.1,2.8,4,1.3,Iris-versicolor
6.3,2.5,4.9,1.5,Iris-versicolor
6.1,2.8,4.7,1.2,Iris-versicolor
6.4,2.9,4.3,1.3,Iris-versicolor
6.6,3,4.4,1.4,Iris-versicolor
6.8,2.8,4.8,1.4,Iris-versicolor
6.7,3,5,1.7,Iris-versicolor
6,2.9,4.5,1.5,Iris-versicolor
5.7,2.6,3.5,1,Iris-versicolor
5.5,2.4,3.8,1.1,Iris-versicolor
5.5,2.4,3.7,1,Iris-versicolor
5.8,2.7,3.9,1.2,Iris-versicolor
6,2.7,5.1,1.6,Iris-versicolor
5.4,3,4.5,1.5,Iris-versicolor
6,3.4,4.5,1.6,Iris-versicolor
6.7,3.1,4.7,1.5,Iris-versicolor
6.3,2.3,4.4,1.3,Iris-versicolor
5.6,3,4.1,1.3,Iris-versicolor
5.5,2.5,4,1.3,Iris-versicolor
5.5,2.6,4.4,1.2,Iris-versicolor
6.1,3,4.6,1.4,Iris-versicolor
5.8,2.6,4,1.2,Iris-versicolor
5,2.3,3.3,1,Iris-versicolor
5.6,2.7,4.2,1.3,Iris-versicolor
5.7,3,4.2,1.2,Iris-versicolor
5.7,2.9,4.2,1.3,Iris-versicolor
6.2,2.9,4.3,1.3,Iris-versicolor
5.1,2.5,3,1.1,Iris-versicolor
5.7,2.8,4.1,1.3,Iris-versicolor
6.3,3.3,6,2.5,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
7.1,3,5.9,2.1,Iris-virginica
6.3,2.9,5.6,1.8,Iris-virginica
6.5,3,5.8,2.2,Iris-virginica
7.6,3,6.6,2.1,Iris-virginica
4.9,2.5,4.5,1.7,Iris-virginica
7.3,2.9,6.3,1.8,Iris-virginica
6.7,2.5,5.8,1.8,Iris-virginica
7.2,3.6,6.1,2.5,Iris-virginica
6.5,3.2,5.1,2,Iris-virginica
6.4,2.7,5.3,1.9,Iris-virginica
6.8,3,5.5,2.1,Iris-virginica
5.7,2.5,5,2,Iris-virginica
5.8,2.8,5.1,2.4,Iris-virginica
6.4,3.2,5.3,2.3,Iris-virginica
6.5,3,5.5,1.8,Iris-virginica
7.7,3.8,6.7,2.2,Iris-virginica
7.7,2.6,6.9,2.3,Iris-virginica
6,2.2,5,1.5,Iris-virginica
6.9,3.2,5.7,2.3,Iris-virginica
5.6,2.8,4.9,2,Iris-virginica
7.7,2.8,6.7,2,Iris-virginica
6.3,2.7,4.9,1.8,Iris-virginica
6.7,3.3,5.7,2.1,Iris-virginica
7.2,3.2,6,1.8,Iris-virginica
6.2,2.8,4.8,1.8,Iris-virginica
6.1,3,4.9,1.8,Iris-virginica
6.4,2.8,5.6,2.1,Iris-virginica
7.2,3,5.8,1.6,Iris-virginica
7.4,2.8,6.1,1.9,Iris-virginica
7.9,3.8,6.4,2,Iris-virginica
6.4,2.8,5.6,2.2,Iris-virginica
6.3,2.8,5.1,1.5,Iris-virginica
6.1,2.6,5.6,1.4,Iris-virginica
7.7,3,6.1,2.3,Iris-virginica
6.3,3.4,5.6,2.4,Iris-virginica
6.4,3.1,5.5,1.8,Iris-virginica
6,3,4.8,1.8,Iris-virginica
6.9,3.1,5.4,2.1,Iris-virginica
6.7,3.1,5.6,2.4,Iris-virginica
6.9,3.1,5.1,2.3,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
6.8,3.2,5.9,2.3,Iris-virginica
6.7,3.3,5.7,2.5,Iris-virginica
6.7,3,5.2,2.3,Iris-virginica
6.3,2.5,5,1.9,Iris-virginica
6.5,3,5.2,2,Iris-virginica
6.2,3.4,5.4,2.3,Iris-virginica
5.9,3,5.1,1.8,Iris-virginica
1 sepallength sepalwidth petallength petalwidth class
2 5.1 3.5 1.4 0.2 Iris-setosa
3 4.9 3 1.4 0.2 Iris-setosa
4 4.7 3.2 1.3 0.2 Iris-setosa
5 4.6 3.1 1.5 0.2 Iris-setosa
6 5 3.6 1.4 0.2 Iris-setosa
7 5.4 3.9 1.7 0.4 Iris-setosa
8 4.6 3.4 1.4 0.3 Iris-setosa
9 5 3.4 1.5 0.2 Iris-setosa
10 4.4 2.9 1.4 0.2 Iris-setosa
11 4.9 3.1 1.5 0.1 Iris-setosa
12 5.4 3.7 1.5 0.2 Iris-setosa
13 4.8 3.4 1.6 0.2 Iris-setosa
14 4.8 3 1.4 0.1 Iris-setosa
15 4.3 3 1.1 0.1 Iris-setosa
16 5.8 4 1.2 0.2 Iris-setosa
17 5.7 4.4 1.5 0.4 Iris-setosa
18 5.4 3.9 1.3 0.4 Iris-setosa
19 5.1 3.5 1.4 0.3 Iris-setosa
20 5.7 3.8 1.7 0.3 Iris-setosa
21 5.1 3.8 1.5 0.3 Iris-setosa
22 5.4 3.4 1.7 0.2 Iris-setosa
23 5.1 3.7 1.5 0.4 Iris-setosa
24 4.6 3.6 1 0.2 Iris-setosa
25 5.1 3.3 1.7 0.5 Iris-setosa
26 4.8 3.4 1.9 0.2 Iris-setosa
27 5 3 1.6 0.2 Iris-setosa
28 5 3.4 1.6 0.4 Iris-setosa
29 5.2 3.5 1.5 0.2 Iris-setosa
30 5.2 3.4 1.4 0.2 Iris-setosa
31 4.7 3.2 1.6 0.2 Iris-setosa
32 4.8 3.1 1.6 0.2 Iris-setosa
33 5.4 3.4 1.5 0.4 Iris-setosa
34 5.2 4.1 1.5 0.1 Iris-setosa
35 5.5 4.2 1.4 0.2 Iris-setosa
36 4.9 3.1 1.5 0.1 Iris-setosa
37 5 3.2 1.2 0.2 Iris-setosa
38 5.5 3.5 1.3 0.2 Iris-setosa
39 4.9 3.1 1.5 0.1 Iris-setosa
40 4.4 3 1.3 0.2 Iris-setosa
41 5.1 3.4 1.5 0.2 Iris-setosa
42 5 3.5 1.3 0.3 Iris-setosa
43 4.5 2.3 1.3 0.3 Iris-setosa
44 4.4 3.2 1.3 0.2 Iris-setosa
45 5 3.5 1.6 0.6 Iris-setosa
46 5.1 3.8 1.9 0.4 Iris-setosa
47 4.8 3 1.4 0.3 Iris-setosa
48 5.1 3.8 1.6 0.2 Iris-setosa
49 4.6 3.2 1.4 0.2 Iris-setosa
50 5.3 3.7 1.5 0.2 Iris-setosa
51 5 3.3 1.4 0.2 Iris-setosa
52 7 3.2 4.7 1.4 Iris-versicolor
53 6.4 3.2 4.5 1.5 Iris-versicolor
54 6.9 3.1 4.9 1.5 Iris-versicolor
55 5.5 2.3 4 1.3 Iris-versicolor
56 6.5 2.8 4.6 1.5 Iris-versicolor
57 5.7 2.8 4.5 1.3 Iris-versicolor
58 6.3 3.3 4.7 1.6 Iris-versicolor
59 4.9 2.4 3.3 1 Iris-versicolor
60 6.6 2.9 4.6 1.3 Iris-versicolor
61 5.2 2.7 3.9 1.4 Iris-versicolor
62 5 2 3.5 1 Iris-versicolor
63 5.9 3 4.2 1.5 Iris-versicolor
64 6 2.2 4 1 Iris-versicolor
65 6.1 2.9 4.7 1.4 Iris-versicolor
66 5.6 2.9 3.6 1.3 Iris-versicolor
67 6.7 3.1 4.4 1.4 Iris-versicolor
68 5.6 3 4.5 1.5 Iris-versicolor
69 5.8 2.7 4.1 1 Iris-versicolor
70 6.2 2.2 4.5 1.5 Iris-versicolor
71 5.6 2.5 3.9 1.1 Iris-versicolor
72 5.9 3.2 4.8 1.8 Iris-versicolor
73 6.1 2.8 4 1.3 Iris-versicolor
74 6.3 2.5 4.9 1.5 Iris-versicolor
75 6.1 2.8 4.7 1.2 Iris-versicolor
76 6.4 2.9 4.3 1.3 Iris-versicolor
77 6.6 3 4.4 1.4 Iris-versicolor
78 6.8 2.8 4.8 1.4 Iris-versicolor
79 6.7 3 5 1.7 Iris-versicolor
80 6 2.9 4.5 1.5 Iris-versicolor
81 5.7 2.6 3.5 1 Iris-versicolor
82 5.5 2.4 3.8 1.1 Iris-versicolor
83 5.5 2.4 3.7 1 Iris-versicolor
84 5.8 2.7 3.9 1.2 Iris-versicolor
85 6 2.7 5.1 1.6 Iris-versicolor
86 5.4 3 4.5 1.5 Iris-versicolor
87 6 3.4 4.5 1.6 Iris-versicolor
88 6.7 3.1 4.7 1.5 Iris-versicolor
89 6.3 2.3 4.4 1.3 Iris-versicolor
90 5.6 3 4.1 1.3 Iris-versicolor
91 5.5 2.5 4 1.3 Iris-versicolor
92 5.5 2.6 4.4 1.2 Iris-versicolor
93 6.1 3 4.6 1.4 Iris-versicolor
94 5.8 2.6 4 1.2 Iris-versicolor
95 5 2.3 3.3 1 Iris-versicolor
96 5.6 2.7 4.2 1.3 Iris-versicolor
97 5.7 3 4.2 1.2 Iris-versicolor
98 5.7 2.9 4.2 1.3 Iris-versicolor
99 6.2 2.9 4.3 1.3 Iris-versicolor
100 5.1 2.5 3 1.1 Iris-versicolor
101 5.7 2.8 4.1 1.3 Iris-versicolor
102 6.3 3.3 6 2.5 Iris-virginica
103 5.8 2.7 5.1 1.9 Iris-virginica
104 7.1 3 5.9 2.1 Iris-virginica
105 6.3 2.9 5.6 1.8 Iris-virginica
106 6.5 3 5.8 2.2 Iris-virginica
107 7.6 3 6.6 2.1 Iris-virginica
108 4.9 2.5 4.5 1.7 Iris-virginica
109 7.3 2.9 6.3 1.8 Iris-virginica
110 6.7 2.5 5.8 1.8 Iris-virginica
111 7.2 3.6 6.1 2.5 Iris-virginica
112 6.5 3.2 5.1 2 Iris-virginica
113 6.4 2.7 5.3 1.9 Iris-virginica
114 6.8 3 5.5 2.1 Iris-virginica
115 5.7 2.5 5 2 Iris-virginica
116 5.8 2.8 5.1 2.4 Iris-virginica
117 6.4 3.2 5.3 2.3 Iris-virginica
118 6.5 3 5.5 1.8 Iris-virginica
119 7.7 3.8 6.7 2.2 Iris-virginica
120 7.7 2.6 6.9 2.3 Iris-virginica
121 6 2.2 5 1.5 Iris-virginica
122 6.9 3.2 5.7 2.3 Iris-virginica
123 5.6 2.8 4.9 2 Iris-virginica
124 7.7 2.8 6.7 2 Iris-virginica
125 6.3 2.7 4.9 1.8 Iris-virginica
126 6.7 3.3 5.7 2.1 Iris-virginica
127 7.2 3.2 6 1.8 Iris-virginica
128 6.2 2.8 4.8 1.8 Iris-virginica
129 6.1 3 4.9 1.8 Iris-virginica
130 6.4 2.8 5.6 2.1 Iris-virginica
131 7.2 3 5.8 1.6 Iris-virginica
132 7.4 2.8 6.1 1.9 Iris-virginica
133 7.9 3.8 6.4 2 Iris-virginica
134 6.4 2.8 5.6 2.2 Iris-virginica
135 6.3 2.8 5.1 1.5 Iris-virginica
136 6.1 2.6 5.6 1.4 Iris-virginica
137 7.7 3 6.1 2.3 Iris-virginica
138 6.3 3.4 5.6 2.4 Iris-virginica
139 6.4 3.1 5.5 1.8 Iris-virginica
140 6 3 4.8 1.8 Iris-virginica
141 6.9 3.1 5.4 2.1 Iris-virginica
142 6.7 3.1 5.6 2.4 Iris-virginica
143 6.9 3.1 5.1 2.3 Iris-virginica
144 5.8 2.7 5.1 1.9 Iris-virginica
145 6.8 3.2 5.9 2.3 Iris-virginica
146 6.7 3.3 5.7 2.5 Iris-virginica
147 6.7 3 5.2 2.3 Iris-virginica
148 6.3 2.5 5 1.9 Iris-virginica
149 6.5 3 5.2 2 Iris-virginica
150 6.2 3.4 5.4 2.3 Iris-virginica
151 5.9 3 5.1 1.8 Iris-virginica

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +0,0 @@
Outlook,Temp,Humidity,Wind,class
Sunny,Hot,High,Weak,No
Sunny,Hot,High,Strong,No
Overcast,Hot,High,Weak,Yes
Rain,Mild,High,Weak,Yes
Rain,Cool,Normal,Weak,Yes
Rain,Cool,Normal,Strong,No
Overcast,Cool,Normal,Strong,Yes
Sunny,Mild,High,Weak,No
Sunny,Cool,Normal,Weak,Yes
Rain,Mild,Normal,Weak,Yes
Sunny,Mild,Normal,Strong,Yes
Overcast,Mild,High,Strong,Yes
Overcast,Hot,Normal,Weak,Yes
Rain,Mild,High,Strong,No
1 Outlook Temp Humidity Wind class
2 Sunny Hot High Weak No
3 Sunny Hot High Strong No
4 Overcast Hot High Weak Yes
5 Rain Mild High Weak Yes
6 Rain Cool Normal Weak Yes
7 Rain Cool Normal Strong No
8 Overcast Cool Normal Strong Yes
9 Sunny Mild High Weak No
10 Sunny Cool Normal Weak Yes
11 Rain Mild Normal Weak Yes
12 Sunny Mild Normal Strong Yes
13 Overcast Mild High Strong Yes
14 Overcast Hot Normal Weak Yes
15 Rain Mild High Strong No

View File

@ -1,13 +0,0 @@
Alternative Restaurant Nearby,Bar Area To Wait,Friday or Saturday,Hungry,Patrons,Price Range,Raining,Reservation,Type,Estimated Wait,WillWait
Yes,No,No,Yes,Some,$$$,No,Yes,French,0-10,Yes
Yes,No,No,Yes,Full,$,No,No,Thai,30-60,No
No,Yes,No,No,Some,$,No,No,Burger,0-10,Yes
Yes,No,Yes,Yes,Full,$,No,No,Thai,10-30,Yes
Yes,No,Yes,No,Full,$$$,No,Yes,French,>60,No
No,Yes,No,Yes,Some,$$,Yes,Yes,Italian,0-10,Yes
No,Yes,No,No,None,$,Yes,No,Burger,0-10,No
No,No,No,Yes,Some,$$,Yes,Yes,Thai,0-10,Yes
No,Yes,Yes,No,Full,$,Yes,No,Burger,>60,No
Yes,Yes,Yes,Yes,Full,$$$,No,Yes,Italian,10-30,No
No,No,No,No,None,$,No,No,Thai,0-10,No
Yes,Yes,Yes,Yes,Full,$,No,No,Burger,30-60,Yes
1 Alternative Restaurant Nearby Bar Area To Wait Friday or Saturday Hungry Patrons Price Range Raining Reservation Type Estimated Wait WillWait
2 Yes No No Yes Some $$$ No Yes French 0-10 Yes
3 Yes No No Yes Full $ No No Thai 30-60 No
4 No Yes No No Some $ No No Burger 0-10 Yes
5 Yes No Yes Yes Full $ No No Thai 10-30 Yes
6 Yes No Yes No Full $$$ No Yes French >60 No
7 No Yes No Yes Some $$ Yes Yes Italian 0-10 Yes
8 No Yes No No None $ Yes No Burger 0-10 No
9 No No No Yes Some $$ Yes Yes Thai 0-10 Yes
10 No Yes Yes No Full $ Yes No Burger >60 No
11 Yes Yes Yes Yes Full $$$ No Yes Italian 10-30 No
12 No No No No None $ No No Thai 0-10 No
13 Yes Yes Yes Yes Full $ No No Burger 30-60 Yes