Amazing-Python-Scripts

Форк
0
/
Gold Price Prediction.ipynb 
511 строк · 19.7 Кб
1
{
2
 "cells": [
3
  {
4
   "cell_type": "code",
5
   "execution_count": 1,
6
   "id": "484b0874",
7
   "metadata": {},
8
   "outputs": [],
9
   "source": [
10
    "import pandas as pd\n",
11
    "import numpy as np\n",
12
    "from sklearn.model_selection import train_test_split\n",
13
    "from sklearn.preprocessing import StandardScaler\n",
14
    "from sklearn.preprocessing import MinMaxScaler\n",
15
    "from sklearn.metrics import r2_score\n"
16
   ]
17
  },
18
  {
19
   "cell_type": "code",
20
   "execution_count": 2,
21
   "id": "9664573c",
22
   "metadata": {},
23
   "outputs": [],
24
   "source": [
25
    "import tensorflow as tf\n",
26
    "from tensorflow import keras\n",
27
    "from tensorflow.keras import Sequential\n",
28
    "from tensorflow.keras.layers import Dense, BatchNormalization, LeakyReLU\n",
29
    "from tensorflow.keras.optimizers import Adam\n",
30
    "from tensorflow.keras.regularizers import l2\n"
31
   ]
32
  },
33
  {
34
   "cell_type": "code",
35
   "execution_count": 3,
36
   "id": "919b6d04",
37
   "metadata": {},
38
   "outputs": [],
39
   "source": [
40
    "df = pd.read_csv(\"gld_price_data.csv\")"
41
   ]
42
  },
43
  {
44
   "cell_type": "code",
45
   "execution_count": 4,
46
   "id": "569f2287",
47
   "metadata": {},
48
   "outputs": [
49
    {
50
     "data": {
51
      "text/html": [
52
       "<div>\n",
53
       "<style scoped>\n",
54
       "    .dataframe tbody tr th:only-of-type {\n",
55
       "        vertical-align: middle;\n",
56
       "    }\n",
57
       "\n",
58
       "    .dataframe tbody tr th {\n",
59
       "        vertical-align: top;\n",
60
       "    }\n",
61
       "\n",
62
       "    .dataframe thead th {\n",
63
       "        text-align: right;\n",
64
       "    }\n",
65
       "</style>\n",
66
       "<table border=\"1\" class=\"dataframe\">\n",
67
       "  <thead>\n",
68
       "    <tr style=\"text-align: right;\">\n",
69
       "      <th></th>\n",
70
       "      <th>Date</th>\n",
71
       "      <th>SPX</th>\n",
72
       "      <th>GLD</th>\n",
73
       "      <th>USO</th>\n",
74
       "      <th>SLV</th>\n",
75
       "      <th>EUR/USD</th>\n",
76
       "    </tr>\n",
77
       "  </thead>\n",
78
       "  <tbody>\n",
79
       "    <tr>\n",
80
       "      <th>0</th>\n",
81
       "      <td>01/02/08</td>\n",
82
       "      <td>1447.160034</td>\n",
83
       "      <td>84.860001</td>\n",
84
       "      <td>78.470001</td>\n",
85
       "      <td>15.180</td>\n",
86
       "      <td>1.471692</td>\n",
87
       "    </tr>\n",
88
       "    <tr>\n",
89
       "      <th>1</th>\n",
90
       "      <td>01/03/08</td>\n",
91
       "      <td>1447.160034</td>\n",
92
       "      <td>85.570000</td>\n",
93
       "      <td>78.370003</td>\n",
94
       "      <td>15.285</td>\n",
95
       "      <td>1.474491</td>\n",
96
       "    </tr>\n",
97
       "    <tr>\n",
98
       "      <th>2</th>\n",
99
       "      <td>01/04/08</td>\n",
100
       "      <td>1411.630005</td>\n",
101
       "      <td>85.129997</td>\n",
102
       "      <td>77.309998</td>\n",
103
       "      <td>15.167</td>\n",
104
       "      <td>1.475492</td>\n",
105
       "    </tr>\n",
106
       "    <tr>\n",
107
       "      <th>3</th>\n",
108
       "      <td>01/07/08</td>\n",
109
       "      <td>1416.180054</td>\n",
110
       "      <td>84.769997</td>\n",
111
       "      <td>75.500000</td>\n",
112
       "      <td>15.053</td>\n",
113
       "      <td>1.468299</td>\n",
114
       "    </tr>\n",
115
       "    <tr>\n",
116
       "      <th>4</th>\n",
117
       "      <td>01/08/08</td>\n",
118
       "      <td>1390.189941</td>\n",
119
       "      <td>86.779999</td>\n",
120
       "      <td>76.059998</td>\n",
121
       "      <td>15.590</td>\n",
122
       "      <td>1.557099</td>\n",
123
       "    </tr>\n",
124
       "  </tbody>\n",
125
       "</table>\n",
126
       "</div>"
127
      ],
128
      "text/plain": [
129
       "       Date          SPX        GLD        USO     SLV   EUR/USD\n",
130
       "0  01/02/08  1447.160034  84.860001  78.470001  15.180  1.471692\n",
131
       "1  01/03/08  1447.160034  85.570000  78.370003  15.285  1.474491\n",
132
       "2  01/04/08  1411.630005  85.129997  77.309998  15.167  1.475492\n",
133
       "3  01/07/08  1416.180054  84.769997  75.500000  15.053  1.468299\n",
134
       "4  01/08/08  1390.189941  86.779999  76.059998  15.590  1.557099"
135
      ]
136
     },
137
     "execution_count": 4,
138
     "metadata": {},
139
     "output_type": "execute_result"
140
    }
141
   ],
142
   "source": [
143
    "df.head()"
144
   ]
145
  },
146
  {
147
   "cell_type": "code",
148
   "execution_count": 5,
149
   "id": "966876c8",
150
   "metadata": {},
151
   "outputs": [],
152
   "source": [
153
    "X = df[['SPX','USO','SLV','EUR/USD']]\n",
154
    "y = df['GLD']"
155
   ]
156
  },
157
  {
158
   "cell_type": "code",
159
   "execution_count": 6,
160
   "id": "c07a4522",
161
   "metadata": {},
162
   "outputs": [],
163
   "source": [
164
    "# Split the data into training and testing sets\n",
165
    "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)"
166
   ]
167
  },
168
  {
169
   "cell_type": "code",
170
   "execution_count": 7,
171
   "id": "7455e5f5",
172
   "metadata": {},
173
   "outputs": [],
174
   "source": [
175
    "scaler=MinMaxScaler()"
176
   ]
177
  },
178
  {
179
   "cell_type": "code",
180
   "execution_count": 8,
181
   "id": "6b7b2010",
182
   "metadata": {},
183
   "outputs": [],
184
   "source": [
185
    "X_train_scaled = scaler.fit_transform(X_train)\n",
186
    "X_test_scaled = scaler.transform(X_test)"
187
   ]
188
  },
189
  {
190
   "cell_type": "code",
191
   "execution_count": 9,
192
   "id": "48246523",
193
   "metadata": {},
194
   "outputs": [],
195
   "source": [
196
    "model = Sequential()\n",
197
    "\n",
198
    "model.add(Dense(32, activation='relu', input_dim=4))\n",
199
    "model.add(BatchNormalization())\n",
200
    "model.add(Dense(16, activation='relu'))\n",
201
    "model.add(BatchNormalization())\n",
202
    "model.add(Dense(8, activation='relu'))\n",
203
    "model.add(Dense(1, activation='linear'))"
204
   ]
205
  },
206
  {
207
   "cell_type": "code",
208
   "execution_count": 10,
209
   "id": "fe13061e",
210
   "metadata": {},
211
   "outputs": [],
212
   "source": [
213
    "# Compile the model with a lower learning rate and L2 regularization\n",
214
    "optimizer = Adam(learning_rate=0.001)\n",
215
    "model.compile(loss=\"mean_squared_error\", optimizer=optimizer)\n"
216
   ]
217
  },
218
  {
219
   "cell_type": "code",
220
   "execution_count": 11,
221
   "id": "373c0c23",
222
   "metadata": {},
223
   "outputs": [
224
    {
225
     "name": "stdout",
226
     "output_type": "stream",
227
     "text": [
228
      "Epoch 1/100\n",
229
      "52/52 [==============================] - 1s 5ms/step - loss: 15304.8730 - val_loss: 15708.4053\n",
230
      "Epoch 2/100\n",
231
      "52/52 [==============================] - 0s 2ms/step - loss: 14771.0381 - val_loss: 15542.6865\n",
232
      "Epoch 3/100\n",
233
      "52/52 [==============================] - 0s 2ms/step - loss: 14129.7539 - val_loss: 15140.6328\n",
234
      "Epoch 4/100\n",
235
      "52/52 [==============================] - 0s 2ms/step - loss: 13294.3955 - val_loss: 14458.8604\n",
236
      "Epoch 5/100\n",
237
      "52/52 [==============================] - 0s 2ms/step - loss: 12270.9502 - val_loss: 13607.0762\n",
238
      "Epoch 6/100\n",
239
      "52/52 [==============================] - 0s 2ms/step - loss: 11040.0410 - val_loss: 12287.5254\n",
240
      "Epoch 7/100\n",
241
      "52/52 [==============================] - 0s 2ms/step - loss: 9585.3516 - val_loss: 10525.4854\n",
242
      "Epoch 8/100\n",
243
      "52/52 [==============================] - 0s 2ms/step - loss: 8010.3799 - val_loss: 8836.4229\n",
244
      "Epoch 9/100\n",
245
      "52/52 [==============================] - 0s 2ms/step - loss: 6288.5825 - val_loss: 6929.2671\n",
246
      "Epoch 10/100\n",
247
      "52/52 [==============================] - 0s 2ms/step - loss: 4717.1387 - val_loss: 4874.6914\n",
248
      "Epoch 11/100\n",
249
      "52/52 [==============================] - 0s 2ms/step - loss: 3305.2129 - val_loss: 3241.6379\n",
250
      "Epoch 12/100\n",
251
      "52/52 [==============================] - 0s 2ms/step - loss: 2272.3542 - val_loss: 1957.0859\n",
252
      "Epoch 13/100\n",
253
      "52/52 [==============================] - 0s 2ms/step - loss: 1402.4030 - val_loss: 1069.7037\n",
254
      "Epoch 14/100\n",
255
      "52/52 [==============================] - 0s 2ms/step - loss: 987.7748 - val_loss: 525.3793\n",
256
      "Epoch 15/100\n",
257
      "52/52 [==============================] - 0s 2ms/step - loss: 730.0342 - val_loss: 325.7230\n",
258
      "Epoch 16/100\n",
259
      "52/52 [==============================] - 0s 2ms/step - loss: 450.3367 - val_loss: 187.5773\n",
260
      "Epoch 17/100\n",
261
      "52/52 [==============================] - 0s 2ms/step - loss: 370.8854 - val_loss: 154.5080\n",
262
      "Epoch 18/100\n",
263
      "52/52 [==============================] - 0s 2ms/step - loss: 414.2657 - val_loss: 94.6718\n",
264
      "Epoch 19/100\n",
265
      "52/52 [==============================] - 0s 2ms/step - loss: 376.8248 - val_loss: 49.1882\n",
266
      "Epoch 20/100\n",
267
      "52/52 [==============================] - 0s 2ms/step - loss: 309.6599 - val_loss: 58.1425\n",
268
      "Epoch 21/100\n",
269
      "52/52 [==============================] - 0s 2ms/step - loss: 389.4264 - val_loss: 123.5352\n",
270
      "Epoch 22/100\n",
271
      "52/52 [==============================] - 0s 2ms/step - loss: 307.7685 - val_loss: 47.8060\n",
272
      "Epoch 23/100\n",
273
      "52/52 [==============================] - 0s 2ms/step - loss: 228.6202 - val_loss: 50.7492\n",
274
      "Epoch 24/100\n",
275
      "52/52 [==============================] - 0s 2ms/step - loss: 245.1460 - val_loss: 56.2384\n",
276
      "Epoch 25/100\n",
277
      "52/52 [==============================] - 0s 2ms/step - loss: 178.6969 - val_loss: 77.5807\n",
278
      "Epoch 26/100\n",
279
      "52/52 [==============================] - 0s 2ms/step - loss: 162.8485 - val_loss: 92.8994\n",
280
      "Epoch 27/100\n",
281
      "52/52 [==============================] - 0s 2ms/step - loss: 188.3134 - val_loss: 80.0995\n",
282
      "Epoch 28/100\n",
283
      "52/52 [==============================] - 0s 2ms/step - loss: 220.8654 - val_loss: 44.0109\n",
284
      "Epoch 29/100\n",
285
      "52/52 [==============================] - 0s 2ms/step - loss: 197.3504 - val_loss: 66.1544\n",
286
      "Epoch 30/100\n",
287
      "52/52 [==============================] - 0s 2ms/step - loss: 236.7831 - val_loss: 99.2187\n",
288
      "Epoch 31/100\n",
289
      "52/52 [==============================] - 0s 2ms/step - loss: 176.2085 - val_loss: 85.3900\n",
290
      "Epoch 32/100\n",
291
      "52/52 [==============================] - 0s 2ms/step - loss: 175.3212 - val_loss: 155.0774\n",
292
      "Epoch 33/100\n",
293
      "52/52 [==============================] - 0s 2ms/step - loss: 116.0559 - val_loss: 137.2745\n",
294
      "Epoch 34/100\n",
295
      "52/52 [==============================] - 0s 2ms/step - loss: 128.9993 - val_loss: 181.3791\n",
296
      "Epoch 35/100\n",
297
      "52/52 [==============================] - 0s 2ms/step - loss: 208.2269 - val_loss: 48.8132\n",
298
      "Epoch 36/100\n",
299
      "52/52 [==============================] - 0s 2ms/step - loss: 137.9986 - val_loss: 189.3881\n",
300
      "Epoch 37/100\n",
301
      "52/52 [==============================] - 0s 2ms/step - loss: 163.9003 - val_loss: 57.6553\n",
302
      "Epoch 38/100\n",
303
      "52/52 [==============================] - 0s 2ms/step - loss: 148.5900 - val_loss: 52.2488\n",
304
      "Epoch 39/100\n",
305
      "52/52 [==============================] - 0s 2ms/step - loss: 119.2904 - val_loss: 47.5964\n",
306
      "Epoch 40/100\n",
307
      "52/52 [==============================] - 0s 2ms/step - loss: 125.1296 - val_loss: 329.1179\n",
308
      "Epoch 41/100\n",
309
      "52/52 [==============================] - 0s 2ms/step - loss: 93.7821 - val_loss: 70.6979\n",
310
      "Epoch 42/100\n",
311
      "52/52 [==============================] - 0s 2ms/step - loss: 125.1611 - val_loss: 142.3922\n",
312
      "Epoch 43/100\n",
313
      "52/52 [==============================] - 0s 2ms/step - loss: 119.5163 - val_loss: 64.7298\n",
314
      "Epoch 44/100\n",
315
      "52/52 [==============================] - 0s 2ms/step - loss: 119.9083 - val_loss: 214.3814\n",
316
      "Epoch 45/100\n",
317
      "52/52 [==============================] - 0s 2ms/step - loss: 170.1375 - val_loss: 215.4284\n",
318
      "Epoch 46/100\n",
319
      "52/52 [==============================] - 0s 2ms/step - loss: 112.2194 - val_loss: 167.3709\n",
320
      "Epoch 47/100\n",
321
      "52/52 [==============================] - 0s 2ms/step - loss: 80.0805 - val_loss: 282.3225\n",
322
      "Epoch 48/100\n",
323
      "52/52 [==============================] - 0s 2ms/step - loss: 109.6539 - val_loss: 98.2927\n",
324
      "Epoch 49/100\n",
325
      "52/52 [==============================] - 0s 2ms/step - loss: 92.5042 - val_loss: 167.2021\n",
326
      "Epoch 50/100\n",
327
      "52/52 [==============================] - 0s 2ms/step - loss: 90.2667 - val_loss: 105.5259\n",
328
      "Epoch 51/100\n",
329
      "52/52 [==============================] - 0s 2ms/step - loss: 108.0885 - val_loss: 175.4377\n",
330
      "Epoch 52/100\n",
331
      "52/52 [==============================] - 0s 2ms/step - loss: 98.9612 - val_loss: 105.4652\n",
332
      "Epoch 53/100\n",
333
      "52/52 [==============================] - 0s 2ms/step - loss: 88.3914 - val_loss: 178.7829\n",
334
      "Epoch 54/100\n",
335
      "52/52 [==============================] - 0s 2ms/step - loss: 94.3760 - val_loss: 63.4042\n",
336
      "Epoch 55/100\n",
337
      "52/52 [==============================] - 0s 2ms/step - loss: 75.2459 - val_loss: 255.6528\n",
338
      "Epoch 56/100\n",
339
      "52/52 [==============================] - 0s 2ms/step - loss: 82.6317 - val_loss: 108.2983\n",
340
      "Epoch 57/100\n",
341
      "52/52 [==============================] - 0s 2ms/step - loss: 102.6565 - val_loss: 67.3034\n",
342
      "Epoch 58/100\n",
343
      "52/52 [==============================] - 0s 2ms/step - loss: 95.1825 - val_loss: 86.6359\n",
344
      "Epoch 59/100\n",
345
      "52/52 [==============================] - 0s 2ms/step - loss: 81.8627 - val_loss: 182.8544\n",
346
      "Epoch 60/100\n",
347
      "52/52 [==============================] - 0s 2ms/step - loss: 94.4717 - val_loss: 130.0062\n",
348
      "Epoch 61/100\n",
349
      "52/52 [==============================] - 0s 2ms/step - loss: 75.6448 - val_loss: 145.8914\n",
350
      "Epoch 62/100\n",
351
      "52/52 [==============================] - 0s 2ms/step - loss: 75.3606 - val_loss: 47.7060\n",
352
      "Epoch 63/100\n",
353
      "52/52 [==============================] - 0s 2ms/step - loss: 74.8475 - val_loss: 119.0299\n",
354
      "Epoch 64/100\n",
355
      "52/52 [==============================] - 0s 2ms/step - loss: 84.7068 - val_loss: 131.0818\n",
356
      "Epoch 65/100\n",
357
      "52/52 [==============================] - 0s 2ms/step - loss: 71.4457 - val_loss: 45.7404\n",
358
      "Epoch 66/100\n",
359
      "52/52 [==============================] - 0s 2ms/step - loss: 96.1561 - val_loss: 104.3360\n",
360
      "Epoch 67/100\n",
361
      "52/52 [==============================] - 0s 2ms/step - loss: 83.9160 - val_loss: 58.0162\n",
362
      "Epoch 68/100\n",
363
      "52/52 [==============================] - 0s 2ms/step - loss: 75.0167 - val_loss: 62.1173\n",
364
      "Epoch 69/100\n",
365
      "52/52 [==============================] - 0s 2ms/step - loss: 90.9415 - val_loss: 54.2195\n",
366
      "Epoch 70/100\n",
367
      "52/52 [==============================] - 0s 2ms/step - loss: 71.6245 - val_loss: 62.3442\n",
368
      "Epoch 71/100\n",
369
      "52/52 [==============================] - 0s 2ms/step - loss: 83.0387 - val_loss: 209.2570\n",
370
      "Epoch 72/100\n",
371
      "52/52 [==============================] - 0s 2ms/step - loss: 75.0388 - val_loss: 135.1255\n",
372
      "Epoch 73/100\n",
373
      "52/52 [==============================] - 0s 2ms/step - loss: 73.3539 - val_loss: 178.8017\n",
374
      "Epoch 74/100\n",
375
      "52/52 [==============================] - 0s 2ms/step - loss: 80.8245 - val_loss: 60.4232\n",
376
      "Epoch 75/100\n",
377
      "52/52 [==============================] - 0s 2ms/step - loss: 88.8882 - val_loss: 85.6680\n",
378
      "Epoch 76/100\n",
379
      "52/52 [==============================] - 0s 2ms/step - loss: 70.0898 - val_loss: 50.6072\n",
380
      "Epoch 77/100\n",
381
      "52/52 [==============================] - 0s 2ms/step - loss: 75.0763 - val_loss: 40.8105\n",
382
      "Epoch 78/100\n",
383
      "52/52 [==============================] - 0s 2ms/step - loss: 69.6293 - val_loss: 237.3010\n",
384
      "Epoch 79/100\n",
385
      "52/52 [==============================] - 0s 2ms/step - loss: 79.8115 - val_loss: 88.8350\n",
386
      "Epoch 80/100\n"
387
     ]
388
    },
389
    {
390
     "name": "stdout",
391
     "output_type": "stream",
392
     "text": [
393
      "52/52 [==============================] - 0s 2ms/step - loss: 80.7484 - val_loss: 57.9937\n",
394
      "Epoch 81/100\n",
395
      "52/52 [==============================] - 0s 2ms/step - loss: 70.4915 - val_loss: 31.3283\n",
396
      "Epoch 82/100\n",
397
      "52/52 [==============================] - 0s 2ms/step - loss: 63.0157 - val_loss: 50.0905\n",
398
      "Epoch 83/100\n",
399
      "52/52 [==============================] - 0s 2ms/step - loss: 64.6075 - val_loss: 37.3694\n",
400
      "Epoch 84/100\n",
401
      "52/52 [==============================] - 0s 2ms/step - loss: 64.7124 - val_loss: 36.9142\n",
402
      "Epoch 85/100\n",
403
      "52/52 [==============================] - 0s 2ms/step - loss: 58.0323 - val_loss: 37.0538\n",
404
      "Epoch 86/100\n",
405
      "52/52 [==============================] - 0s 2ms/step - loss: 73.4507 - val_loss: 45.0116\n",
406
      "Epoch 87/100\n",
407
      "52/52 [==============================] - 0s 2ms/step - loss: 57.4454 - val_loss: 65.2783\n",
408
      "Epoch 88/100\n",
409
      "52/52 [==============================] - 0s 2ms/step - loss: 65.5154 - val_loss: 37.8791\n",
410
      "Epoch 89/100\n",
411
      "52/52 [==============================] - 0s 2ms/step - loss: 65.9017 - val_loss: 32.1330\n",
412
      "Epoch 90/100\n",
413
      "52/52 [==============================] - 0s 2ms/step - loss: 57.5096 - val_loss: 41.4032\n",
414
      "Epoch 91/100\n",
415
      "52/52 [==============================] - 0s 2ms/step - loss: 58.3772 - val_loss: 64.2754\n",
416
      "Epoch 92/100\n",
417
      "52/52 [==============================] - 0s 2ms/step - loss: 60.9921 - val_loss: 49.7985\n",
418
      "Epoch 93/100\n",
419
      "52/52 [==============================] - 0s 2ms/step - loss: 55.2542 - val_loss: 39.2479\n",
420
      "Epoch 94/100\n",
421
      "52/52 [==============================] - 0s 2ms/step - loss: 54.2289 - val_loss: 86.8420\n",
422
      "Epoch 95/100\n",
423
      "52/52 [==============================] - 0s 2ms/step - loss: 60.0738 - val_loss: 63.5683\n",
424
      "Epoch 96/100\n",
425
      "52/52 [==============================] - 0s 2ms/step - loss: 69.5935 - val_loss: 32.8206\n",
426
      "Epoch 97/100\n",
427
      "52/52 [==============================] - 0s 2ms/step - loss: 52.2506 - val_loss: 67.2782\n",
428
      "Epoch 98/100\n",
429
      "52/52 [==============================] - 0s 2ms/step - loss: 53.4356 - val_loss: 43.5972\n",
430
      "Epoch 99/100\n",
431
      "52/52 [==============================] - 0s 2ms/step - loss: 51.5107 - val_loss: 57.3120\n",
432
      "Epoch 100/100\n",
433
      "52/52 [==============================] - 0s 2ms/step - loss: 57.2784 - val_loss: 36.8163\n"
434
     ]
435
    }
436
   ],
437
   "source": [
438
    "# Train the model\n",
439
    "history = model.fit(X_train_scaled, y_train, epochs=100, batch_size=32, validation_split=0.1)"
440
   ]
441
  },
442
  {
443
   "cell_type": "code",
444
   "execution_count": 12,
445
   "id": "c4b720ce",
446
   "metadata": {},
447
   "outputs": [
448
    {
449
     "name": "stdout",
450
     "output_type": "stream",
451
     "text": [
452
      "15/15 [==============================] - 0s 1ms/step\n"
453
     ]
454
    }
455
   ],
456
   "source": [
457
    "# Make predictions\n",
458
    "y_pred = model.predict(X_test_scaled)"
459
   ]
460
  },
461
  {
462
   "cell_type": "code",
463
   "execution_count": 13,
464
   "id": "eac5f7d8",
465
   "metadata": {},
466
   "outputs": [
467
    {
468
     "name": "stdout",
469
     "output_type": "stream",
470
     "text": [
471
      "R-squared score: 0.9471963282577486\n"
472
     ]
473
    }
474
   ],
475
   "source": [
476
    "# Calculate R-squared score\n",
477
    "r2 = r2_score(y_test, y_pred)\n",
478
    "print(\"R-squared score:\", r2)"
479
   ]
480
  },
481
  {
482
   "cell_type": "code",
483
   "execution_count": null,
484
   "id": "53250bf9",
485
   "metadata": {},
486
   "outputs": [],
487
   "source": []
488
  }
489
 ],
490
 "metadata": {
491
  "kernelspec": {
492
   "display_name": "Python 3 (ipykernel)",
493
   "language": "python",
494
   "name": "python3"
495
  },
496
  "language_info": {
497
   "codemirror_mode": {
498
    "name": "ipython",
499
    "version": 3
500
   },
501
   "file_extension": ".py",
502
   "mimetype": "text/x-python",
503
   "name": "python",
504
   "nbconvert_exporter": "python",
505
   "pygments_lexer": "ipython3",
506
   "version": "3.9.13"
507
  }
508
 },
509
 "nbformat": 4,
510
 "nbformat_minor": 5
511
}
512

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.