| 34 | | UINT uType, // Transaction type. |
| 35 | | UINT uFmt, // Clipboard data format. |
| 36 | | HCONV hconv, // Handle to the conversation. |
| 37 | | HSZ hsz1, // Handle to a string. |
| 38 | | HSZ hsz2, // Handle to a string. |
| 39 | | HDDEDATA hdata, // Handle to a global memory object. |
| 40 | | DWORD dwData1, // Transaction-specific data. |
| 41 | | DWORD dwData2) // Transaction-specific data. |
| | 36 | UINT uType, // Transaction type. |
| | 37 | UINT uFmt, // Clipboard data format. |
| | 38 | HCONV hconv, // Handle to the conversation. |
| | 39 | HSZ hsz1, // Handle to a string. |
| | 40 | HSZ hsz2, // Handle to a string. |
| | 41 | HDDEDATA hdata, // Handle to a global memory object. |
| | 42 | DWORD dwData1, // Transaction-specific data. |
| | 43 | DWORD dwData2) // Transaction-specific data. |
| 139 | | vec data_vector; |
| 140 | | for(list<pair<int,int>>::iterator ar_iterator = ar_components.begin();ar_iterator!=ar_components.end();ar_iterator++) |
| 141 | | { |
| 142 | | data_vector.ins(data_vector.size(),(*data_matrix).get(ar_iterator->first,time-ar_iterator->second)); |
| 143 | | } |
| 144 | | |
| 145 | | if(my_rarx!=NULL) |
| 146 | | { |
| 147 | | data_vector.ins(0,(*data_matrix).get(predicted_channel,time)); |
| 148 | | my_rarx->bayes(data_vector); |
| | 182 | if(time > order) |
| | 183 | { |
| | 184 | vec data_vector; |
| | 185 | for(list<pair<int,int>>::iterator ar_iterator = ar_components.begin();ar_iterator!=ar_components.end();ar_iterator++) |
| | 186 | { |
| | 187 | data_vector.ins(data_vector.size(),(*data_matrix).get(ar_iterator->first,time-ar_iterator->second)); |
| | 188 | } |
| | 189 | |
| | 190 | // cout << "Update cond: " << data_vector << endl; |
| | 191 | |
| | 192 | double cur_lognc; |
| | 193 | if(my_rarx!=NULL) |
| | 194 | { |
| | 195 | data_vector.ins(0,(*data_matrix).get(predicted_channel,time)); |
| | 196 | my_rarx->bayes(data_vector); |
| | 197 | cur_lognc = my_rarx->posterior->log_nc; |
| | 198 | } |
| | 199 | else |
| | 200 | { |
| | 201 | vec pred_vec; |
| | 202 | pred_vec.ins(0,(*data_matrix).get(predicted_channel,time)); |
| | 203 | my_arx->bayes(pred_vec,data_vector); |
| | 204 | cur_lognc = my_arx->posterior().lognc(); |
| | 205 | } |
| | 206 | |
| | 207 | number_of_updates++; |
| | 208 | |
| | 209 | if(number_of_updates>window_size) |
| | 210 | { |
| | 211 | lognc_history.ins(lognc_history.size(),cur_lognc-prior_lognc); |
| | 212 | } |
| 160 | | vec condition_vector; |
| 161 | | for(list<pair<int,int>>::iterator ar_iterator = ar_components.begin();ar_iterator!=ar_components.end();ar_iterator++) |
| 162 | | { |
| 163 | | condition_vector.ins(condition_vector.size(),(*data_matrix).get(ar_iterator->first,time-ar_iterator->second+1)); |
| 164 | | } |
| 165 | | |
| 166 | | if(my_rarx!=NULL) |
| 167 | | { |
| 168 | | pair<vec,mat> imp_samples = my_rarx->posterior->importance_sample(sample_size); |
| 169 | | |
| 170 | | //cout << imp_samples.first << endl; |
| | 222 | if(time > order) |
| | 223 | { |
| | 224 | vec condition_vector; |
| | 225 | for(list<pair<int,int>>::iterator ar_iterator = ar_components.begin();ar_iterator!=ar_components.end();ar_iterator++) |
| | 226 | { |
| | 227 | condition_vector.ins(condition_vector.size(),(*data_matrix).get(ar_iterator->first,time-ar_iterator->second+1)); |
| | 228 | } |
| | 229 | |
| | 230 | // cout << "Prediction cond: " << condition_vector << endl; |
| | 231 | |
| | 232 | if(my_rarx!=NULL) |
| | 233 | { |
| | 234 | pair<vec,mat> imp_samples = my_rarx->posterior->importance_sample(sample_size); |
| | 235 | |
| | 236 | //cout << "Point estimate: " << (imp_samples.second*imp_samples.first)/(imp_samples.first*ones(imp_samples.first.size())) << endl; |
| | 237 | |
| | 238 | vec sample_prediction; |
| | 239 | for(int t = 0;t<sample_size;t++) |
| | 240 | { |
| | 241 | vec lap_sample = condition_vector; |
| | 242 | |
| | 243 | if(has_constant) |
| | 244 | { |
| | 245 | lap_sample.ins(lap_sample.size(),1.0); |
| | 246 | } |
| | 247 | |
| | 248 | lap_sample.ins(0,(*LapRNG)()); |
| | 249 | |
| | 250 | sample_prediction.ins(0,lap_sample*imp_samples.second.get_col(t)); |
| | 251 | } |
| | 252 | |
| | 253 | return pair<vec,vec>(imp_samples.first,sample_prediction); |
| | 254 | } |
| | 255 | else |
| | 256 | { |
| | 257 | mat samples = my_arx->posterior().sample_mat(sample_size); |
| | 258 | |
| | 259 | //cout << "Point estimate: " << (samples*ones(samples.cols()))/samples.cols() << endl; |
| | 260 | |
| | 261 | // cout << samples.get_col(1) << endl; |
| | 262 | |
| | 263 | vec sample_prediction; |
| | 264 | for(int t = 0;t<sample_size;t++) |
| | 265 | { |
| | 266 | vec gau_sample = condition_vector; |
| | 267 | |
| | 268 | if(has_constant) |
| | 269 | { |
| | 270 | gau_sample.ins(gau_sample.size(),1.0); |
| | 271 | } |
| | 272 | |
| | 273 | gau_sample.ins(gau_sample.size(),randn()); |
| | 274 | |
| | 275 | sample_prediction.ins(0,gau_sample*samples.get_col(t)); |
| | 276 | } |
| | 277 | |
| | 278 | return pair<vec,vec>(ones(sample_prediction.size()),sample_prediction); |
| | 279 | } |
| | 280 | } |
| | 281 | else |
| | 282 | { |
| 172 | | vec sample_prediction; |
| 173 | | for(int t = 0;t<sample_size;t++) |
| 174 | | { |
| 175 | | vec lap_sample = condition_vector; |
| 176 | | |
| 177 | | if(has_constant) |
| 178 | | { |
| 179 | | lap_sample.ins(lap_sample.size(),1.0); |
| 180 | | } |
| 181 | | |
| 182 | | lap_sample.ins(0,(*LapRNG)()); |
| 183 | | |
| 184 | | sample_prediction.ins(0,lap_sample*imp_samples.second.get_col(t)); |
| 185 | | } |
| 186 | | |
| 187 | | return pair<vec,vec>(imp_samples.first,sample_prediction); |
| 188 | | } |
| 189 | | else |
| 190 | | { |
| 191 | | mat samples = my_arx->posterior().sample_mat(sample_size); |
| 192 | | |
| 193 | | vec sample_prediction; |
| 194 | | for(int t = 0;t<sample_size;t++) |
| 195 | | { |
| 196 | | vec gau_sample = condition_vector; |
| 197 | | |
| 198 | | if(has_constant) |
| 199 | | { |
| 200 | | gau_sample.ins(gau_sample.size(),1.0); |
| 201 | | } |
| 202 | | |
| 203 | | gau_sample.ins(0,randn()); |
| 204 | | |
| 205 | | sample_prediction.ins(0,gau_sample*samples.get_col(t)); |
| 206 | | } |
| 207 | | |
| 208 | | return pair<vec,vec>(ones(sample_prediction.size()),sample_prediction); |
| | 284 | return pair<vec,vec>(zeros(1),ones(1)); |
| | 344 | vec data_vec; |
| | 345 | mat data_matrix; |
| | 346 | DWORD Id; |
| | 347 | |
| | 348 | char szApp[] = "MT4"; |
| | 349 | char szTopic[] = "BID"; |
| | 350 | char szItem[] = "EURUSD"; |
| | 351 | char* itRef = &szItem[0]; |
| | 352 | |
| | 353 | char* file_string = "c:\\rtdata"; |
| | 354 | |
| | 355 | ofstream myfile; |
| | 356 | char fstring[80]; |
| | 357 | strcpy(fstring,file_string); |
| | 358 | strcat(fstring,itRef); |
| | 359 | strcat(fstring,"lognc.txt"); |
| | 360 | |
| | 361 | list<list<pair<int,int>>> model_types = model::possible_models_recurse(max_model_order,1); |
| | 362 | |
| | 363 | int max_window = 50; |
| | 364 | int min_window = 1; |
| | 365 | |
| | 366 | list<model*> models; |
| | 367 | for(list<list<pair<int,int>>>::iterator model_type = model_types.begin();model_type!=model_types.end();model_type++) |
| | 368 | { |
| | 369 | for(int window_size = min_window;window_size < max_window;window_size++) |
| | 370 | { |
| | 371 | //models.push_back(new model((*model_type),true,true,window_size,0,&data_matrix)); |
| | 372 | models.push_back(new model((*model_type),false,true,window_size,0,&data_matrix)); |
| | 373 | //models.push_back(new model((*model_type),true,false,window_size,0,&data_matrix)); |
| | 374 | //models.push_back(new model((*model_type),false,false,window_size,0,&data_matrix)); |
| | 375 | } |
| | 376 | } |
| | 377 | |
| | 378 | mat result_lognc; |
| | 379 | mat result_preds; |
| | 380 | |
| 314 | | MSG msg; |
| 315 | | BOOL MsgReturn = GetMessage ( &msg , NULL , 0 , 0 ); |
| 316 | | |
| 317 | | if(MsgReturn) |
| 318 | | { |
| 319 | | TranslateMessage(&msg); |
| 320 | | DispatchMessage(&msg); |
| 321 | | } |
| 322 | | } |
| 323 | | |
| | 430 | while(avg_vec.size() < data_count) |
| | 431 | { |
| | 432 | MSG msg; |
| | 433 | BOOL MsgReturn = GetMessage ( &msg , NULL , 0 , 0 ); |
| | 434 | |
| | 435 | if(MsgReturn) |
| | 436 | { |
| | 437 | TranslateMessage(&msg); |
| | 438 | DispatchMessage(&msg); |
| | 439 | } |
| | 440 | Sleep(500); |
| | 441 | } |
| | 442 | |
| | 443 | |
| | 444 | data_vec.ins(data_vec.size(),avg_vec*ones(avg_vec.size())/avg_vec.size()); |
| | 445 | data_matrix = mat(data_vec).T(); |
| | 446 | avg_vec.del(0,avg_vec.size()-1); |
| | 447 | |
| | 448 | vector<double> test_lognc; |
| | 449 | vec preds; |
| | 450 | |
| | 451 | for(list<model*>::iterator model_ref = models.begin();model_ref!=models.end();model_ref++) |
| | 452 | { |
| | 453 | (*model_ref)->data_update(data_vec.size()-1); |
| | 454 | |
| | 455 | if((*model_ref)->lognc_history.size()==0) |
| | 456 | { |
| | 457 | test_lognc.push_back(0); |
| | 458 | } |
| | 459 | else |
| | 460 | { |
| | 461 | test_lognc.push_back((ones((*model_ref)->lognc_history.size())*(*model_ref)->lognc_history)/(*model_ref)->lognc_history.size()*(max_window/(*model_ref)->window_size)); |
| | 462 | } |
| | 463 | |
| | 464 | pair<vec,vec> predictions = (*model_ref)->predict(500,data_vec.size()-1,&LapRNG); |
| | 465 | |
| | 466 | preds.ins(preds.size(),(predictions.first*predictions.second)/(predictions.first*ones(predictions.first.size()))); |
| | 467 | |
| | 468 | } |
| | 469 | |
| | 470 | //preds.ins(0,data_matrix.get(0,data_vec.size())); |
| | 471 | |
| | 472 | result_preds.ins_col(result_preds.cols(),preds); |
| | 473 | |
| | 474 | myfile.open(fstring,ios::app); |
| | 475 | |
| | 476 | // myfile << my_rarx->posterior->minimal_vertex->get_coordinates()[0]; |
| | 477 | |
| | 478 | for(int i = 0;i<test_lognc.size();i++) |
| | 479 | { |
| | 480 | myfile << test_lognc[i] << ','; |
| | 481 | } |
| | 482 | |
| | 483 | myfile << endl; |
| | 484 | |
| | 485 | myfile.close(); |
| | 486 | |
| | 487 | } |
| | 488 | |
| 327 | | */ |
| 328 | | |
| 329 | | |
| 330 | | |
| 331 | | /* |
| 332 | | // EXPERIMENT: 100 AR model generated time series of length of 30 from y_t=0.95*y_(t-1)+0.05*y_(t-2)+0.2*e_t, |
| 333 | | // where e_t is normally, student(4) and cauchy distributed are tested using robust AR model, to obtain the |
| 334 | | // variance of location parameter estimators and compare it to the classical setup. |
| 335 | | vector<vector<vector<string>>> string_lists; |
| 336 | | string_lists.push_back(vector<vector<string>>()); |
| 337 | | string_lists.push_back(vector<vector<string>>()); |
| 338 | | string_lists.push_back(vector<vector<string>>()); |
| 339 | | |
| 340 | | char* file_strings[3] = {"c:\\ar_normal.txt", "c:\\ar_student.txt", "c:\\ar_cauchy.txt"}; |
| 341 | | |
| 342 | | |
| 343 | | for(int i = 0;i<3;i++) |
| 344 | | { |
| 345 | | ifstream myfile(file_strings[i]); |
| | 492 | |
| | 493 | return 0; |
| | 494 | } |
| | 495 | |
| | 496 | /* |
| | 497 | // EXPERIMENT: A moving window estimation and prediction of RARX is tested on data generated from |
| | 498 | // y_t=0.95*y_(t-1)+0.05*y_(t-2)+0.2*e_t, where e_t is normally, student(4) and cauchy distributed. It |
| | 499 | // can be compared to the classical setup. |
| | 500 | |
| | 501 | vector<vector<string>> strings; |
| | 502 | |
| | 503 | char* file_string = "c:\\dataGCClosePercDiff"; |
| | 504 | |
| | 505 | char dfstring[80]; |
| | 506 | strcpy(dfstring,file_string); |
| | 507 | strcat(dfstring,".txt"); |
| | 508 | |
| | 509 | |
| | 510 | mat data_matrix; |
| | 511 | ifstream myfile(dfstring); |
| 391 | | if(conditions.size()>2) |
| | 526 | data_matrix.ins_row(data_matrix.rows(),data_vector); |
| | 527 | } |
| | 528 | |
| | 529 | myfile.close(); |
| | 530 | } |
| | 531 | else |
| | 532 | { |
| | 533 | cout << "Can't open data file!" << endl; |
| | 534 | } |
| | 535 | */ |
| | 536 | |
| | 537 | |
| | 538 | // cout << "Updated." << endl; |
| | 539 | |
| | 540 | /* |
| | 541 | // EXPERIMENT: 100 AR model generated time series of length of 30 from y_t=0.95*y_(t-1)+0.05*y_(t-2)+0.2*e_t, |
| | 542 | // where e_t is normally, student(4) and cauchy distributed are tested using robust AR model, to obtain the |
| | 543 | // variance of location parameter estimators and compare it to the classical setup. |
| | 544 | vector<vector<vector<string>>> string_lists; |
| | 545 | string_lists.push_back(vector<vector<string>>()); |
| | 546 | string_lists.push_back(vector<vector<string>>()); |
| | 547 | string_lists.push_back(vector<vector<string>>()); |
| | 548 | |
| | 549 | char* file_strings[3] = {"c:\\ar_normal.txt", "c:\\ar_student.txt", "c:\\ar_cauchy.txt"}; |
| | 550 | |
| | 551 | |
| | 552 | for(int i = 0;i<3;i++) |
| | 553 | { |
| | 554 | ifstream myfile(file_strings[i]); |
| | 555 | if (myfile.is_open()) |
| | 556 | { |
| | 557 | while ( myfile.good() ) |
| 393 | | conditions[k-3].ins(0,string_lists[j][i][k]); |
| 394 | | |
| 395 | | //cout << "modi:" << conditions[k-3] << endl; |
| 396 | | |
| 397 | | my_rarx->bayes(conditions[k-3]); |
| 398 | | |
| 399 | | |
| 400 | | //if(k>5) |
| 401 | | //{ |
| 402 | | // cout << "MaxLik coords:" << emliga->minimal_vertex->get_coordinates() << endl; |
| 403 | | //} |
| 404 | | |
| 405 | | } |
| | 559 | string line; |
| | 560 | getline(myfile,line); |
| | 561 | |
| | 562 | vector<string> parsed_line; |
| | 563 | while(line.find(',') != string::npos) |
| | 564 | { |
| | 565 | int loc = line.find(','); |
| | 566 | parsed_line.push_back(line.substr(0,loc)); |
| | 567 | line.erase(0,loc+1); |
| | 568 | } |
| | 569 | |
| | 570 | string_lists[i].push_back(parsed_line); |
| | 571 | } |
| | 572 | myfile.close(); |
| | 573 | } |
| | 574 | } |
| | 575 | |
| | 576 | for(int j = 0;j<string_lists.size();j++) |
| | 577 | { |
| | 578 | |
| | 579 | for(int i = 0;i<string_lists[j].size()-1;i++) |
| | 580 | { |
| | 581 | vector<vec> conditions; |
| | 582 | //emlig* emliga = new emlig(2); |
| | 583 | RARX* my_rarx = new RARX(2,30); |
| | 584 | |
| | 585 | for(int k = 1;k<string_lists[j][i].size();k++) |
| | 586 | { |
| | 587 | vec condition; |
| | 588 | //condition.ins(0,1); |
| | 589 | condition.ins(0,string_lists[j][i][k]); |
| | 590 | conditions.push_back(condition); |
| | 591 | |
| | 592 | //cout << "orig:" << condition << endl; |
| | 593 | |
| | 594 | if(conditions.size()>1) |
| | 595 | { |
| | 596 | conditions[k-2].ins(0,string_lists[j][i][k]); |
| | 597 | |
| | 598 | } |
| | 599 | |
| | 600 | if(conditions.size()>2) |
| | 601 | { |
| | 602 | conditions[k-3].ins(0,string_lists[j][i][k]); |
| | 603 | |
| | 604 | //cout << "modi:" << conditions[k-3] << endl; |
| | 605 | |
| | 606 | my_rarx->bayes(conditions[k-3]); |
| | 607 | |
| | 608 | |
| | 609 | //if(k>5) |
| | 610 | //{ |
| | 611 | // cout << "MaxLik coords:" << emliga->minimal_vertex->get_coordinates() << endl; |
| | 612 | //} |
| | 613 | |
| | 614 | } |
| | 615 | |
| | 616 | } |
| | 617 | |
| | 618 | //emliga->step_me(0); |
| | 619 | /* |
| | 620 | ofstream myfile; |
| | 621 | myfile.open("c:\\robust_ar1.txt",ios::app); |
| | 622 | myfile << my_rarx->minimal_vertex->get_coordinates()[0] << ";"; |
| | 623 | myfile.close(); |
| | 624 | |
| | 625 | myfile.open("c:\\robust_ar2.txt",ios::app); |
| | 626 | myfile << emliga->minimal_vertex->get_coordinates()[1] << ";"; |
| | 627 | myfile.close(); |
| 419 | | |
| 420 | | |
| 421 | | cout << "MaxLik coords:" << emliga->minimal_vertex->get_coordinates() << endl; |
| 422 | | cout << "Step: " << i << endl; |
| 423 | | } |
| 424 | | |
| 425 | | cout << "One experiment finished." << endl; |
| 426 | | |
| 427 | | ofstream myfile; |
| 428 | | myfile.open("c:\\robust_ar1.txt",ios::app); |
| 429 | | myfile << endl; |
| 430 | | myfile.close(); |
| 431 | | |
| 432 | | myfile.open("c:\\robust_ar2.txt",ios::app); |
| 433 | | myfile << endl; |
| 434 | | myfile.close(); |
| 435 | | }*/ |
| 436 | | |
| 437 | | // EXPERIMENT: A moving window estimation and prediction of RARX is tested on data generated from |
| 438 | | // y_t=0.95*y_(t-1)+0.05*y_(t-2)+0.2*e_t, where e_t is normally, student(4) and cauchy distributed. It |
| 439 | | // can be compared to the classical setup. |
| 440 | | |
| 441 | | itpp::Laplace_RNG LapRNG = Laplace_RNG(); |
| 442 | | |
| 443 | | vector<vector<string>> strings; |
| 444 | | |
| 445 | | char* file_string = "c:\\dataADClosePercDiff"; |
| 446 | | |
| 447 | | char dfstring[80]; |
| 448 | | strcpy(dfstring,file_string); |
| 449 | | strcat(dfstring,".txt"); |
| 450 | | |
| 451 | | |
| 452 | | mat data_matrix; |
| 453 | | ifstream myfile(dfstring); |
| 454 | | if (myfile.is_open()) |
| 455 | | { |
| 456 | | string line; |
| 457 | | while(getline(myfile,line)) |
| 458 | | { |
| 459 | | vec data_vector; |
| 460 | | while(line.find(',') != string::npos) |
| 461 | | { |
| 462 | | int loc2 = line.find('\n'); |
| 463 | | int loc = line.find(','); |
| 464 | | data_vector.ins(data_vector.size(),atof(line.substr(0,loc).c_str())); |
| 465 | | line.erase(0,loc+1); |
| 466 | | } |
| 467 | | |
| 468 | | data_matrix.ins_row(data_matrix.rows(),data_vector); |
| 469 | | } |
| 470 | | |
| 471 | | myfile.close(); |
| 472 | | } |
| 473 | | else |
| 474 | | { |
| 475 | | cout << "Can't open data file!" << endl; |
| 476 | | } |
| 477 | | |
| 478 | | |
| 479 | | list<list<pair<int,int>>> model_types = model::possible_models_recurse(max_model_order,data_matrix.rows()); |
| 480 | | |
| 481 | | list<model*> models; |
| 482 | | for(list<list<pair<int,int>>>::iterator model_type = model_types.begin();model_type!=model_types.end();model_type++) |
| 483 | | { |
| 484 | | for(int window_size = 30;window_size < 31;window_size++) |
| 485 | | { |
| 486 | | models.push_back(new model((*model_type),true,true,window_size,0,&data_matrix)); |
| 487 | | models.push_back(new model((*model_type),false,true,window_size,0,&data_matrix)); |
| 488 | | models.push_back(new model((*model_type),true,false,window_size,0,&data_matrix)); |
| 489 | | models.push_back(new model((*model_type),false,false,window_size,0,&data_matrix)); |
| 490 | | } |
| 491 | | } |
| 492 | | |
| 493 | | mat result_lognc; |
| 494 | | mat result_preds; |
| 495 | | |
| 496 | | for(int time = max_model_order;time<data_matrix.cols();time++) //time<data_matrix.cols() |
| 497 | | { |
| 498 | | vec cur_res_lognc; |
| 499 | | vec preds; |
| 500 | | |
| 501 | | for(list<model*>::iterator model_ref = models.begin();model_ref!=models.end();model_ref++) |
| 502 | | { |
| 503 | | (*model_ref)->data_update(time); |
| 504 | | |
| 505 | | if((*model_ref)->my_rarx!=NULL) |
| 506 | | { |
| 507 | | cur_res_lognc.ins(cur_res_lognc.size(),(*model_ref)->my_rarx->posterior->log_nc); |
| 508 | | } |
| 509 | | else |
| 510 | | { |
| 511 | | cur_res_lognc.ins(cur_res_lognc.size(),(*model_ref)->my_arx->posterior().lognc()); |
| 512 | | } |
| 513 | | |
| 514 | | pair<vec,vec> predictions = (*model_ref)->predict(500,time,&LapRNG); |
| 515 | | |
| 516 | | preds.ins(preds.size(),(predictions.first*predictions.second)/(predictions.first*ones(predictions.first.size()))); |
| 517 | | } |
| 518 | | |
| 519 | | preds.ins(0,data_matrix.get(0,time+1)); |
| 520 | | |
| 521 | | result_lognc.ins_col(result_lognc.cols(),cur_res_lognc); |
| 522 | | result_preds.ins_col(result_preds.cols(),preds); |
| 523 | | |
| 524 | | // cout << "Updated." << endl; |
| 525 | | |
| | 644 | }*/ |
| | 645 | |