PageRenderTime 57ms CodeModel.GetById 16ms RepoModel.GetById 0ms app.codeStats 0ms

/lib/JNI_SVM-light-6.01/src/svmlight-6.01/svm_learn_main.c

https://bitbucket.org/nickheiner/weather-prediction-machine-learning
C | 476 lines | 412 code | 34 blank | 30 comment | 63 complexity | 280657a42ad83a1ac39df93e2868d016 MD5 | raw file
  1. /***********************************************************************/
  2. /* */
  3. /* svm_learn_main.c */
  4. /* */
  5. /* Command line interface to the learning module of the */
  6. /* Support Vector Machine. */
  7. /* */
  8. /* Author: Thorsten Joachims */
  9. /* Date: 02.07.02 */
  10. /* */
  11. /* Copyright (c) 2000 Thorsten Joachims - All rights reserved */
  12. /* */
  13. /* This software is available for non-commercial use only. It must */
  14. /* not be modified and distributed without prior permission of the */
  15. /* author. The author is not responsible for implications from the */
  16. /* use of this software. */
  17. /* */
  18. /***********************************************************************/
  19. /* uncomment, if you want to use svm-learn out of C++ */
  20. /* extern "C" { */
  21. # include "svm_common.h"
  22. # include "svm_learn.h"
  23. /*}*/
  24. char docfile[200]; /* file with training examples */
  25. char modelfile[200]; /* file for resulting classifier */
  26. char restartfile[200]; /* file with initial alphas */
  27. void read_input_parameters(int, char **, char *, char *, char *, long *,
  28. LEARN_PARM *, KERNEL_PARM *);
  29. void wait_any_key();
  30. void print_help();
  31. int main (int argc, char* argv[])
  32. {
  33. DOC **docs; /* training examples */
  34. long totwords,totdoc,i;
  35. double *target;
  36. double *alpha_in=NULL;
  37. KERNEL_CACHE *kernel_cache;
  38. LEARN_PARM learn_parm;
  39. KERNEL_PARM kernel_parm;
  40. MODEL *model=(MODEL *)my_malloc(sizeof(MODEL));
  41. read_input_parameters(argc,argv,docfile,modelfile,restartfile,&verbosity,
  42. &learn_parm,&kernel_parm);
  43. read_documents(docfile,&docs,&target,&totwords,&totdoc);
  44. if(restartfile[0]) alpha_in=read_alphas(restartfile,totdoc);
  45. FILE * dump = NULL;
  46. char* traindump = (char *) my_malloc(sizeof(char)*25);
  47. sprintf(traindump,"maintraindump%d.dat",1);
  48. int lengthcnt = 20;
  49. int namecnt=2;
  50. while((dump = fopen(traindump,"r+")) != NULL) {
  51. fclose(dump);
  52. printf("traindump is already there: %s\n",traindump);
  53. if (strlen(traindump) >= lengthcnt) {
  54. free(traindump);
  55. lengthcnt =+ 20;
  56. traindump = (char *) my_malloc(sizeof(char)*lengthcnt);
  57. }
  58. sprintf(traindump,"maintraindump%d.dat",namecnt++);
  59. }
  60. printf("------------------------------ Writing traindump to file %s",traindump);
  61. if ((dump = fopen(traindump,"w")) == NULL) {
  62. perror("Doesnt work!\n");
  63. exit(1);
  64. }
  65. printf("\n|||||||||||||||||||||||||||||||||| dumping ..\n");
  66. long int z = 0;
  67. long int y = 0;
  68. fprintf(dump,"totaldocuments: %ld \n",totdoc);
  69. while(z<(totdoc)) {
  70. fprintf(dump,"(%ld) (QID: %ld) (CF: %.16g) (SID: %ld) ",docs[z]->docnum,docs[z]->queryid,docs[z]->costfactor,docs[z]->slackid);
  71. SVECTOR *v = docs[z]->fvec;
  72. fprintf(dump,"(NORM:%.32g) (UD:%s) (KID:%ld) (VL:%p) (F:%.32g) %.32g ",v->twonorm_sq,(v->userdefined == NULL ? "" : v->userdefined),v->kernel_id,v->next,v->factor,target[z]);
  73. if (v != NULL && v->words != NULL) {
  74. while ((v->words[y]).wnum) {
  75. fprintf(dump,"%ld:%.32g ",(v->words[y]).wnum, (v->words[y]).weight);
  76. y++;
  77. }
  78. } else
  79. fprintf(dump, "NULL WORTE\n");
  80. fprintf(dump,"\n");
  81. y=0;
  82. z++;
  83. }
  84. fprintf(dump,"---------------------------------------------------\n");
  85. fprintf(dump,"kernel_type: %ld\n",kernel_parm.kernel_type);
  86. fprintf(dump,"poly_degree: %ld\n",kernel_parm.poly_degree);
  87. fprintf(dump,"rbf_gamma: %.32g\n",kernel_parm.rbf_gamma);
  88. fprintf(dump,"coef_lin: %.32g\n",kernel_parm.coef_lin);
  89. fprintf(dump,"coef_const: %.32g\n",kernel_parm.coef_const);
  90. fprintf(dump,"custom: %s\n",kernel_parm.custom);
  91. fprintf(dump,"type: %ld\n",learn_parm.type);
  92. fprintf(dump,"svm_c: %.32g\n",learn_parm.svm_c);
  93. fprintf(dump,"eps: %.32g\n",learn_parm.eps);
  94. fprintf(dump,"svm_costratio: %.32g\n",learn_parm.svm_costratio);
  95. fprintf(dump,"transduction_posratio: %.32g\n",learn_parm.transduction_posratio);
  96. fprintf(dump,"biased_hyperplane: %ld\n",learn_parm.biased_hyperplane);
  97. fprintf(dump,"svm_maxqpsize: %ld\n",learn_parm.svm_maxqpsize);
  98. fprintf(dump,"svm_newvarsinqp: %ld\n",learn_parm.svm_newvarsinqp);
  99. fprintf(dump,"epsilon_crit: %.32g\n",learn_parm.epsilon_crit);
  100. fprintf(dump,"epsilon_shrink: %.32g\n",learn_parm.epsilon_shrink);
  101. fprintf(dump,"svm_iter_to_shrink: %ld\n",learn_parm.svm_iter_to_shrink);
  102. fprintf(dump,"remove_inconsistent: %ld\n",learn_parm.remove_inconsistent);
  103. fprintf(dump,"skip_final_opt_check: %ld\n",learn_parm.skip_final_opt_check);
  104. fprintf(dump,"compute_loo: %ld\n",learn_parm.compute_loo);
  105. fprintf(dump,"rho: %.32g\n",learn_parm.rho);
  106. fprintf(dump,"xa_depth: %ld\n",learn_parm.xa_depth);
  107. fprintf(dump,"predfile: %s\n",learn_parm.predfile);
  108. fprintf(dump,"alphafile: %s\n",learn_parm.alphafile);
  109. fprintf(dump,"epsilon_const: %.32g\n",learn_parm.epsilon_const);
  110. fprintf(dump,"epsilon_a: %.32g\n",learn_parm.epsilon_a);
  111. fprintf(dump,"opt_precision: %.32g\n",learn_parm.opt_precision);
  112. fprintf(dump,"svm_c_steps: %ld\n",learn_parm.svm_c_steps);
  113. fprintf(dump,"svm_c_factor: %.32g\n",learn_parm.svm_c_factor);
  114. fprintf(dump,"svm_costratio_unlab: %.32g\n",learn_parm.svm_costratio_unlab);
  115. fprintf(dump,"svm_unlabbound: %.32g\n",learn_parm.svm_unlabbound);
  116. if(kernel_parm.kernel_type == LINEAR) { /* don't need the cache */
  117. kernel_cache=NULL;
  118. }
  119. else {
  120. /* Always get a new kernel cache. It is not possible to use the
  121. same cache for two different training runs */
  122. kernel_cache=kernel_cache_init(totdoc,learn_parm.kernel_cache_size);
  123. }
  124. if(learn_parm.type == CLASSIFICATION) {
  125. svm_learn_classification(docs,target,totdoc,totwords,&learn_parm,
  126. &kernel_parm,kernel_cache,model,alpha_in);
  127. }
  128. else if(learn_parm.type == REGRESSION) {
  129. svm_learn_regression(docs,target,totdoc,totwords,&learn_parm,
  130. &kernel_parm,&kernel_cache,model);
  131. }
  132. else if(learn_parm.type == RANKING) {
  133. svm_learn_ranking(docs,target,totdoc,totwords,&learn_parm,
  134. &kernel_parm,&kernel_cache,model);
  135. }
  136. else if(learn_parm.type == OPTIMIZATION) {
  137. svm_learn_optimization(docs,target,totdoc,totwords,&learn_parm,
  138. &kernel_parm,kernel_cache,model,alpha_in);
  139. }
  140. fprintf(dump,"totwords: %ld\n",learn_parm.totwords);
  141. printf("|||||||||||||||||||||||||||||||||| z: %ld, totdoc: %ld\n",z,totdoc);
  142. fclose(dump);
  143. if(kernel_cache) {
  144. /* Free the memory used for the cache. */
  145. kernel_cache_cleanup(kernel_cache);
  146. }
  147. /* Warning: The model contains references to the original data 'docs'.
  148. If you want to free the original data, and only keep the model, you
  149. have to make a deep copy of 'model'. */
  150. /* deep_copy_of_model=copy_model(model); */
  151. write_model(modelfile,model);
  152. free(alpha_in);
  153. free_model(model,0);
  154. for(i=0;i<totdoc;i++)
  155. free_example(docs[i],1);
  156. free(docs);
  157. free(target);
  158. return(0);
  159. }
  160. /*---------------------------------------------------------------------------*/
  161. void read_input_parameters(int argc,char *argv[],char *docfile,char *modelfile,
  162. char *restartfile,long *verbosity,
  163. LEARN_PARM *learn_parm,KERNEL_PARM *kernel_parm)
  164. {
  165. long i;
  166. char type[100];
  167. /* set default */
  168. strcpy (modelfile, "svm_model");
  169. strcpy (learn_parm->predfile, "trans_predictions");
  170. strcpy (learn_parm->alphafile, "");
  171. strcpy (restartfile, "");
  172. (*verbosity)=1;
  173. learn_parm->biased_hyperplane=1;
  174. learn_parm->sharedslack=0;
  175. learn_parm->remove_inconsistent=0;
  176. learn_parm->skip_final_opt_check=0;
  177. learn_parm->svm_maxqpsize=10;
  178. learn_parm->svm_newvarsinqp=0;
  179. learn_parm->svm_iter_to_shrink=-9999;
  180. learn_parm->maxiter=100000;
  181. learn_parm->kernel_cache_size=40;
  182. learn_parm->svm_c=0.0;
  183. learn_parm->eps=0.1;
  184. learn_parm->transduction_posratio=-1.0;
  185. learn_parm->svm_costratio=1.0;
  186. learn_parm->svm_costratio_unlab=1.0;
  187. learn_parm->svm_unlabbound=1E-5;
  188. learn_parm->epsilon_crit=0.001;
  189. learn_parm->epsilon_a=1E-15;
  190. learn_parm->compute_loo=0;
  191. learn_parm->rho=1.0;
  192. learn_parm->xa_depth=0;
  193. kernel_parm->kernel_type=0;
  194. kernel_parm->poly_degree=3;
  195. kernel_parm->rbf_gamma=1.0;
  196. kernel_parm->coef_lin=1;
  197. kernel_parm->coef_const=1;
  198. strcpy(kernel_parm->custom,"empty");
  199. strcpy(type,"c");
  200. for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
  201. switch ((argv[i])[1])
  202. {
  203. case '?': print_help(); exit(0);
  204. case 'z': i++; strcpy(type,argv[i]); break;
  205. case 'v': i++; (*verbosity)=atol(argv[i]); break;
  206. case 'b': i++; learn_parm->biased_hyperplane=atol(argv[i]); break;
  207. case 'i': i++; learn_parm->remove_inconsistent=atol(argv[i]); break;
  208. case 'f': i++; learn_parm->skip_final_opt_check=!atol(argv[i]); break;
  209. case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break;
  210. case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break;
  211. case '#': i++; learn_parm->maxiter=atol(argv[i]); break;
  212. case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break;
  213. case 'm': i++; learn_parm->kernel_cache_size=atol(argv[i]); break;
  214. case 'c': i++; learn_parm->svm_c=atof(argv[i]); break;
  215. case 'w': i++; learn_parm->eps=atof(argv[i]); break;
  216. case 'p': i++; learn_parm->transduction_posratio=atof(argv[i]); break;
  217. case 'j': i++; learn_parm->svm_costratio=atof(argv[i]); break;
  218. case 'e': i++; learn_parm->epsilon_crit=atof(argv[i]); break;
  219. case 'o': i++; learn_parm->rho=atof(argv[i]); break;
  220. case 'k': i++; learn_parm->xa_depth=atol(argv[i]); break;
  221. case 'x': i++; learn_parm->compute_loo=atol(argv[i]); break;
  222. case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;
  223. case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;
  224. case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;
  225. case 's': i++; kernel_parm->coef_lin=atof(argv[i]); break;
  226. case 'r': i++; kernel_parm->coef_const=atof(argv[i]); break;
  227. case 'u': i++; strcpy(kernel_parm->custom,argv[i]); break;
  228. case 'l': i++; strcpy(learn_parm->predfile,argv[i]); break;
  229. case 'a': i++; strcpy(learn_parm->alphafile,argv[i]); break;
  230. case 'y': i++; strcpy(restartfile,argv[i]); break;
  231. default: printf("\nUnrecognized option %s!\n\n",argv[i]);
  232. print_help();
  233. exit(0);
  234. }
  235. }
  236. if(i>=argc) {
  237. printf("\nNot enough input parameters!\n\n");
  238. wait_any_key();
  239. print_help();
  240. exit(0);
  241. }
  242. strcpy (docfile, argv[i]);
  243. if((i+1)<argc) {
  244. strcpy (modelfile, argv[i+1]);
  245. }
  246. if(learn_parm->svm_iter_to_shrink == -9999) {
  247. if(kernel_parm->kernel_type == LINEAR)
  248. learn_parm->svm_iter_to_shrink=2;
  249. else
  250. learn_parm->svm_iter_to_shrink=100;
  251. }
  252. if(strcmp(type,"c")==0) {
  253. learn_parm->type=CLASSIFICATION;
  254. }
  255. else if(strcmp(type,"r")==0) {
  256. learn_parm->type=REGRESSION;
  257. }
  258. else if(strcmp(type,"p")==0) {
  259. learn_parm->type=RANKING;
  260. }
  261. else if(strcmp(type,"o")==0) {
  262. learn_parm->type=OPTIMIZATION;
  263. }
  264. else if(strcmp(type,"s")==0) {
  265. learn_parm->type=OPTIMIZATION;
  266. learn_parm->sharedslack=1;
  267. }
  268. else {
  269. printf("\nUnknown type '%s': Valid types are 'c' (classification), 'r' regession, and 'p' preference ranking.\n",type);
  270. wait_any_key();
  271. print_help();
  272. exit(0);
  273. }
  274. if((learn_parm->skip_final_opt_check)
  275. && (kernel_parm->kernel_type == LINEAR)) {
  276. printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n");
  277. learn_parm->skip_final_opt_check=0;
  278. }
  279. if((learn_parm->skip_final_opt_check)
  280. && (learn_parm->remove_inconsistent)) {
  281. printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n");
  282. wait_any_key();
  283. print_help();
  284. exit(0);
  285. }
  286. if((learn_parm->svm_maxqpsize<2)) {
  287. printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize);
  288. wait_any_key();
  289. print_help();
  290. exit(0);
  291. }
  292. if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
  293. printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize);
  294. printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp);
  295. wait_any_key();
  296. print_help();
  297. exit(0);
  298. }
  299. if(learn_parm->svm_iter_to_shrink<1) {
  300. printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink);
  301. wait_any_key();
  302. print_help();
  303. exit(0);
  304. }
  305. if(learn_parm->svm_c<0) {
  306. printf("\nThe C parameter must be greater than zero!\n\n");
  307. wait_any_key();
  308. print_help();
  309. exit(0);
  310. }
  311. if(learn_parm->transduction_posratio>1) {
  312. printf("\nThe fraction of unlabeled examples to classify as positives must\n");
  313. printf("be less than 1.0 !!!\n\n");
  314. wait_any_key();
  315. print_help();
  316. exit(0);
  317. }
  318. if(learn_parm->svm_costratio<=0) {
  319. printf("\nThe COSTRATIO parameter must be greater than zero!\n\n");
  320. wait_any_key();
  321. print_help();
  322. exit(0);
  323. }
  324. if(learn_parm->epsilon_crit<=0) {
  325. printf("\nThe epsilon parameter must be greater than zero!\n\n");
  326. wait_any_key();
  327. print_help();
  328. exit(0);
  329. }
  330. if(learn_parm->rho<0) {
  331. printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n");
  332. printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n");
  333. printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n");
  334. wait_any_key();
  335. print_help();
  336. exit(0);
  337. }
  338. if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
  339. printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n");
  340. printf("for switching to the conventional xa/estimates described in T. Joachims,\n");
  341. printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n");
  342. wait_any_key();
  343. print_help();
  344. exit(0);
  345. }
  346. }
  347. void wait_any_key()
  348. {
  349. printf("\n(more)\n");
  350. (void)getc(stdin);
  351. }
  352. void print_help()
  353. {
  354. printf("\nSVM-light %s: Support Vector Machine, learning module %s\n",VERSION,VERSION_DATE);
  355. copyright_notice();
  356. printf(" usage: svm_learn [options] example_file model_file\n\n");
  357. printf("Arguments:\n");
  358. printf(" example_file-> file with training data\n");
  359. printf(" model_file -> file to store learned decision rule in\n");
  360. printf("General options:\n");
  361. printf(" -? -> this help\n");
  362. printf(" -v [0..3] -> verbosity level (default 1)\n");
  363. printf("Learning options:\n");
  364. printf(" -z {c,r,p} -> select between classification (c), regression (r),\n");
  365. printf(" and preference ranking (p) (default classification)\n");
  366. printf(" -c float -> C: trade-off between training error\n");
  367. printf(" and margin (default [avg. x*x]^-1)\n");
  368. printf(" -w [0..] -> epsilon width of tube for regression\n");
  369. printf(" (default 0.1)\n");
  370. printf(" -j float -> Cost: cost-factor, by which training errors on\n");
  371. printf(" positive examples outweight errors on negative\n");
  372. printf(" examples (default 1) (see [4])\n");
  373. printf(" -b [0,1] -> use biased hyperplane (i.e. x*w+b>0) instead\n");
  374. printf(" of unbiased hyperplane (i.e. x*w>0) (default 1)\n");
  375. printf(" -i [0,1] -> remove inconsistent training examples\n");
  376. printf(" and retrain (default 0)\n");
  377. printf("Performance estimation options:\n");
  378. printf(" -x [0,1] -> compute leave-one-out estimates (default 0)\n");
  379. printf(" (see [5])\n");
  380. printf(" -o ]0..2] -> value of rho for XiAlpha-estimator and for pruning\n");
  381. printf(" leave-one-out computation (default 1.0) (see [2])\n");
  382. printf(" -k [0..100] -> search depth for extended XiAlpha-estimator \n");
  383. printf(" (default 0)\n");
  384. printf("Transduction options (see [3]):\n");
  385. printf(" -p [0..1] -> fraction of unlabeled examples to be classified\n");
  386. printf(" into the positive class (default is the ratio of\n");
  387. printf(" positive and negative examples in the training data)\n");
  388. printf("Kernel options:\n");
  389. printf(" -t int -> type of kernel function:\n");
  390. printf(" 0: linear (default)\n");
  391. printf(" 1: polynomial (s a*b+c)^d\n");
  392. printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n");
  393. printf(" 3: sigmoid tanh(s a*b + c)\n");
  394. printf(" 4: user defined kernel from kernel.h\n");
  395. printf(" -d int -> parameter d in polynomial kernel\n");
  396. printf(" -g float -> parameter gamma in rbf kernel\n");
  397. printf(" -s float -> parameter s in sigmoid/poly kernel\n");
  398. printf(" -r float -> parameter c in sigmoid/poly kernel\n");
  399. printf(" -u string -> parameter of user defined kernel\n");
  400. printf("Optimization options (see [1]):\n");
  401. printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n");
  402. printf(" -n [2..q] -> number of new variables entering the working set\n");
  403. printf(" in each iteration (default n = q). Set n<q to prevent\n");
  404. printf(" zig-zagging.\n");
  405. printf(" -m [5..] -> size of cache for kernel evaluations in MB (default 40)\n");
  406. printf(" The larger the faster...\n");
  407. printf(" -e float -> eps: Allow that error for termination criterion\n");
  408. printf(" [y [w*x+b] - 1] >= eps (default 0.001)\n");
  409. printf(" -y [0,1] -> restart the optimization from alpha values in file\n");
  410. printf(" specified by -a option. (default 0)\n");
  411. printf(" -h [5..] -> number of iterations a variable needs to be\n");
  412. printf(" optimal before considered for shrinking (default 100)\n");
  413. printf(" -f [0,1] -> do final optimality check for variables removed\n");
  414. printf(" by shrinking. Although this test is usually \n");
  415. printf(" positive, there is no guarantee that the optimum\n");
  416. printf(" was found if the test is omitted. (default 1)\n");
  417. printf(" -y string -> if option is given, reads alphas from file with given\n");
  418. printf(" and uses them as starting point. (default 'disabled')\n");
  419. printf(" -# int -> terminate optimization, if no progress after this\n");
  420. printf(" number of iterations. (default 100000)\n");
  421. printf("Output options:\n");
  422. printf(" -l string -> file to write predicted labels of unlabeled\n");
  423. printf(" examples into after transductive learning\n");
  424. printf(" -a string -> write all alphas to this file after learning\n");
  425. printf(" (in the same order as in the training set)\n");
  426. wait_any_key();
  427. printf("\nMore details in:\n");
  428. printf("[1] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n");
  429. printf(" Kernel Methods - Support Vector Learning, B. Schölkopf and C. Burges and\n");
  430. printf(" A. Smola (ed.), MIT Press, 1999.\n");
  431. printf("[2] T. Joachims, Estimating the Generalization performance of an SVM\n");
  432. printf(" Efficiently. International Conference on Machine Learning (ICML), 2000.\n");
  433. printf("[3] T. Joachims, Transductive Inference for Text Classification using Support\n");
  434. printf(" Vector Machines. International Conference on Machine Learning (ICML),\n");
  435. printf(" 1999.\n");
  436. printf("[4] K. Morik, P. Brockhausen, and T. Joachims, Combining statistical learning\n");
  437. printf(" with a knowledge-based approach - A case study in intensive care \n");
  438. printf(" monitoring. International Conference on Machine Learning (ICML), 1999.\n");
  439. printf("[5] T. Joachims, Learning to Classify Text Using Support Vector\n");
  440. printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n");
  441. printf(" 2002.\n\n");
  442. }