Project

General

Profile

1
<workflow-app name="test-core_examples_javamapreduce_stats" xmlns="uri:oozie:workflow:0.4">
2
	<!-- map reduce job that exports hbase data and prepares them for import to the relation database used for statistics generation -->
3

    
4
	<global>
5
		<job-tracker>${jobTracker}</job-tracker>
6
		<name-node>${nameNode}</name-node>
7
		<configuration>
8
			<property>
9
				<name>mapred.job.queue.name</name>
10
				<value>${queueName}</value>
11
			</property>
12
			<property>
13
				<name>oozie.sqoop.log.level</name>
14
				<value>DEBUG</value>
15
			</property>
16
		</configuration>
17
	</global>
18
	<start to='get-scanner' />
19
	<action name='get-scanner'>
20
		<java>
21
			<main-class>eu.dnetlib.iis.core.workflows.stats.HbaseScannerGenerator
22
			</main-class>
23
			<!-- column families: -->
24
			<arg>
25
				-f
26
				datasource ,
27
				datasourceOrganization_provision_provides,
28
  				organization, 
29
				project,
30
				projectOrganization_participation_hasParticipant,
31
				result
32
				, resultProject_outcome_produces,
33
<!-- 		    personResult_authorship_hasAuthor, -->
34
				  resultResult_publicationDataset_isRelatedTo
35
			</arg>
36

    
37
			<capture-output />
38
		</java>
39
		<ok to="mr_export" />
40
		<error to="fail" />
41
	</action>
42
	<action name="mr_export">
43
		<map-reduce>
44

    
45
			<prepare>
46
				<delete path="${nameNode}${Stats_output_Path}" />
47

    
48
			</prepare>
49
			<configuration>
50
				<property>
51
					<name>hbase.mapreduce.scan</name>
52
					<value>${wf:actionData('get-scanner')['scan']}</value>
53
				</property>
54
				<property>
55
					<name>hbase.rootdir</name>
56
					<!--<value>hdfs://nmis-hadoop-cluster/hbase</value> -->
57
					<!-- <value>hdfs://dm-cluster-nn</value> -->
58
					<value>$nameNode/hbase</value>
59

    
60
				</property>
61

    
62
				<property>
63
					<name>hbase.security.authentication</name>
64
					<value>simple</value>
65
				</property>
66
				<!-- ZOOKEEPER -->
67

    
68
				<property>
69
					<name>hbase.zookeeper.quorum</name>
70
					<value>namenode1.hadoop.dm.openaire.eu,namenode2.hadoop.dm.openaire.eu,jobtracker1.hadoop.dm.openaire.eu,jobtracker2.hadoop.dm.openaire.eu,hbase-master1.hadoop.dm.openaire.eu </value>
71
					<!-- <value> quorum1.t.hadoop.research-infrastructures.eu,quorum2.t.hadoop.research-infrastructures.eu,quorum3.t.hadoop.research-infrastructures.eu,quorum4.t.hadoop.research-infrastructures.eu,jobtracker.t.hadoop.research-infrastructures.eu </value> -->
72
				</property>
73
				<property>
74
					<name>zookeeper.znode.rootserver</name>
75
					<value>root-region-server</value>
76

    
77
				</property>
78

    
79
				<property>
80
					<name>hbase.zookeeper.property.clientPort</name>
81
					<value>2181</value>
82
					<!--<value>2182</value> -->
83
				</property>
84

    
85

    
86
				<!-- MR IO -->
87

    
88

    
89
				<property>
90
					<name>mapreduce.inputformat.class</name>
91
					<value>org.apache.hadoop.hbase.mapreduce.TableInputFormat</value>
92
				</property>
93

    
94
				<property>
95
					<name>mapred.mapoutput.key.class</name>
96
					<value>org.apache.hadoop.io.Text</value>
97
				</property>
98
				<property>
99
					<name>mapred.mapoutput.value.class</name>
100
					<value>org.apache.hadoop.hbase.io.ImmutableBytesWritable</value>
101
				</property>
102
				<property>
103
					<name>mapred.output.key.class</name>
104
					<value>org.apache.hadoop.io.Text</value>
105
				</property>
106
				<property>
107
					<name>mapred.output.value.class</name>
108
					<value>org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat
109
					</value>
110
				</property>
111

    
112
				<!-- ## This is required for new MapReduce API usage -->
113
				<property>
114
					<name>mapred.mapper.new-api</name>
115
					<value>true</value>
116
				</property>
117
				<property>
118
					<name>mapred.reducer.new-api</name>
119
					<value>true</value>
120
				</property>
121

    
122
				<!-- # Job-specific options -->
123
				<property>
124
					<name>dfs.blocksize</name>
125
					<value>32M</value>
126
				</property>
127
				<property>
128
					<name>mapred.output.compress</name>
129
					<value>false</value>
130
				</property>
131
				<property>
132
					<name>mapred.reduce.tasks.speculative.execution</name>
133
					<value>false</value>
134
				</property>
135
				<property>
136
					<name>mapred.reduce.tasks.speculative.execution</name>
137
					<value>false</value>
138
				</property>
139
				<property>
140
					<name>mapreduce.map.speculative</name>
141
					<value>false</value>
142
				</property>
143

    
144
				<!-- I/O FORMAT -->
145
				<!-- IMPORTANT: sets default delimeter used by text output writer. Required to fix issue with traling tab added between id and value in multiple outputs -->
146
				<property>
147
					<name>mapred.textoutputformat.separator</name>
148
					<value>${Stats_delim_Character}</value>
149
				</property>
150
				<!-- ## Names of all output ports -->
151

    
152
				<property>
153
					<name>mapreduce.multipleoutputs</name>
154

    
155
					<value>${out1} ${out2} ${out3} ${out4} ${out5} ${out6} ${out7} ${out8} ${out9} ${out10} ${out11} ${out12} ${out13} ${out14} ${out15} ${out16} ${out17} ${out18} ${out19} ${out20}
156
					</value>
157

    
158
				</property>
159
				<!-- datasource -->
160
				<property>
161
					<name>mapreduce.multipleoutputs.namedOutput.${out1}.key</name>
162
					<value>org.apache.hadoop.io.Text</value>
163
				</property>
164
				<property>
165
					<name>mapreduce.multipleoutputs.namedOutput.${out1}.value</name>
166
					<value>org.apache.hadoop.io.Text</value>
167
				</property>
168
				<property>
169
					<name>mapreduce.multipleoutputs.namedOutput.${out1}.format</name>
170
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
171
					</value>
172
				</property>
173
				<!-- datasourceLanguage -->
174
				<property>
175
					<name>mapreduce.multipleoutputs.namedOutput.${out2}.key</name>
176
					<value>org.apache.hadoop.io.Text</value>
177
				</property>
178
				<property>
179
					<name>mapreduce.multipleoutputs.namedOutput.${out2}.value</name>
180
					<value>org.apache.hadoop.io.Text</value>
181
				</property>
182
				<property>
183
					<name>mapreduce.multipleoutputs.namedOutput.${out2}.format</name>
184
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
185
					</value>
186
				</property>
187

    
188

    
189

    
190
				<!-- datasourceOrganization -->
191
				<property>
192
					<name>mapreduce.multipleoutputs.namedOutput.${out3}.key</name>
193
					<value>org.apache.hadoop.io.Text</value>
194
				</property>
195
				<property>
196
					<name>mapreduce.multipleoutputs.namedOutput.${out3}.value</name>
197
					<value>org.apache.hadoop.io.Text</value>
198
				</property>
199
				<property>
200
					<name>mapreduce.multipleoutputs.namedOutput.${out3}.format</name>
201
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
202
					</value>
203
				</property>
204

    
205
				<!-- datasourceTopic -->
206
				<property>
207
					<name>mapreduce.multipleoutputs.namedOutput.${out4}.key</name>
208
					<value>org.apache.hadoop.io.Text</value>
209
				</property>
210
				<property>
211
					<name>mapreduce.multipleoutputs.namedOutput.${out4}.value</name>
212
					<value>org.apache.hadoop.io.Text</value>
213
				</property>
214
				<property>
215
					<name>mapreduce.multipleoutputs.namedOutput.${out4}.format</name>
216
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
217
					</value>
218
				</property>
219

    
220
				<!-- resultDatasource -->
221
				<property>
222
					<name>mapreduce.multipleoutputs.namedOutput.${out5}.key</name>
223
					<value>org.apache.hadoop.io.Text</value>
224
				</property>
225
				<property>
226
					<name>mapreduce.multipleoutputs.namedOutput.${out5}.value</name>
227
					<value>org.apache.hadoop.io.Text</value>
228
				</property>
229
				<property>
230
					<name>mapreduce.multipleoutputs.namedOutput.${out5}.format</name>
231
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
232
					</value>
233
				</property>
234
				<!-- organization -->
235
				<property>
236
					<name>mapreduce.multipleoutputs.namedOutput.${out6}.key</name>
237
					<value>org.apache.hadoop.io.Text</value>
238
				</property>
239
				<property>
240
					<name>mapreduce.multipleoutputs.namedOutput.${out6}.value</name>
241
					<value>org.apache.hadoop.io.Text</value>
242
				</property>
243
				<property>
244
					<name>mapreduce.multipleoutputs.namedOutput.${out6}.format</name>
245
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
246
					</value>
247
				</property>
248

    
249
				<!-- projectOrganization -->
250
				<property>
251
					<name>mapreduce.multipleoutputs.namedOutput.${out7}.key</name>
252
					<value>org.apache.hadoop.io.Text</value>
253
				</property>
254
				<property>
255
					<name>mapreduce.multipleoutputs.namedOutput.${out7}.value</name>
256
					<value>org.apache.hadoop.io.Text</value>
257
				</property>
258
				<property>
259
					<name>mapreduce.multipleoutputs.namedOutput.${out7}.format</name>
260
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
261
					</value>
262
				</property>
263
				<!-- resultProject -->
264
				<property>
265
					<name>mapreduce.multipleoutputs.namedOutput.${out8}.key</name>
266
					<value>org.apache.hadoop.io.Text</value>
267
				</property>
268
				<property>
269
					<name>mapreduce.multipleoutputs.namedOutput.${out8}.value</name>
270
					<value>org.apache.hadoop.io.Text</value>
271
				</property>
272
				<property>
273
					<name>mapreduce.multipleoutputs.namedOutput.${out8}.format</name>
274
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
275
					</value>
276
				</property>
277

    
278
				<!-- project -->
279
				<property>
280
					<name>mapreduce.multipleoutputs.namedOutput.${out9}.key</name>
281
					<value>org.apache.hadoop.io.Text</value>
282
				</property>
283
				<property>
284
					<name>mapreduce.multipleoutputs.namedOutput.${out9}.value</name>
285
					<value>org.apache.hadoop.io.Text</value>
286
				</property>
287
				<property>
288
					<name>mapreduce.multipleoutputs.namedOutput.${out9}.format</name>
289
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
290
					</value>
291
				</property>
292

    
293
				<!-- resultConcept -->
294
				<property>
295
					<name>mapreduce.multipleoutputs.namedOutput.${out10}.key</name>
296
					<value>org.apache.hadoop.io.Text</value>
297
				</property>
298
				<property>
299
					<name>mapreduce.multipleoutputs.namedOutput.${out10}.value</name>
300
					<value>org.apache.hadoop.io.Text</value>
301
				</property>
302
				<property>
303
					<name>mapreduce.multipleoutputs.namedOutput.${out10}.format</name>
304
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
305
					</value>
306
				</property>
307

    
308
				<!-- resultClaim -->
309
				<property>
310
					<name>mapreduce.multipleoutputs.namedOutput.${out11}.key</name>
311
					<value>org.apache.hadoop.io.Text</value>
312
				</property>
313
				<property>
314
					<name>mapreduce.multipleoutputs.namedOutput.${out11}.value</name>
315
					<value>org.apache.hadoop.io.Text</value>
316
				</property>
317
				<property>
318
					<name>mapreduce.multipleoutputs.namedOutput.${out11}.format</name>
319
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
320
					</value>
321
				</property>
322

    
323
				<!-- resultClassification -->
324
				<property>
325
					<name>mapreduce.multipleoutputs.namedOutput.${out12}.key</name>
326
					<value>org.apache.hadoop.io.Text</value>
327
				</property>
328
				<property>
329
					<name>mapreduce.multipleoutputs.namedOutput.${out12}.value</name>
330
					<value>org.apache.hadoop.io.Text</value>
331
				</property>
332
				<property>
333
					<name>mapreduce.multipleoutputs.namedOutput.${out12}.format</name>
334
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
335
					</value>
336
				</property>
337

    
338
				<!-- resultLanguage -->
339
				<property>
340
					<name>mapreduce.multipleoutputs.namedOutput.${out13}.key</name>
341
					<value>org.apache.hadoop.io.Text</value>
342
				</property>
343
				<property>
344
					<name>mapreduce.multipleoutputs.namedOutput.${out13}.value</name>
345
					<value>org.apache.hadoop.io.Text</value>
346
				</property>
347
				<property>
348
					<name>mapreduce.multipleoutputs.namedOutput.${out13}.format</name>
349
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
350
					</value>
351
				</property>
352

    
353
				<!-- resultProject -->
354
				<property>
355
					<name>mapreduce.multipleoutputs.namedOutput.${out14}.key</name>
356
					<value>org.apache.hadoop.io.Text</value>
357
				</property>
358
				<property>
359
					<name>mapreduce.multipleoutputs.namedOutput.${out14}.value</name>
360
					<value>org.apache.hadoop.io.Text</value>
361
				</property>
362
				<property>
363
					<name>mapreduce.multipleoutputs.namedOutput.${out14}.format</name>
364
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
365
					</value>
366
				</property>
367
				<!-- resultResult -->
368
				<property>
369
					<name>mapreduce.multipleoutputs.namedOutput.${out15}.key</name>
370
					<value>org.apache.hadoop.io.Text</value>
371
				</property>
372
				<property>
373
					<name>mapreduce.multipleoutputs.namedOutput.${out15}.value</name>
374
					<value>org.apache.hadoop.io.Text</value>
375
				</property>
376
				<property>
377
					<name>mapreduce.multipleoutputs.namedOutput.${out15}.format</name>
378
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
379
					</value>
380
				</property>
381
				<!-- resultTopic -->
382
				<property>
383
					<name>mapreduce.multipleoutputs.namedOutput.${out16}.key</name>
384
					<value>org.apache.hadoop.io.Text</value>
385
				</property>
386
				<property>
387
					<name>mapreduce.multipleoutputs.namedOutput.${out16}.value</name>
388
					<value>org.apache.hadoop.io.Text</value>
389
				</property>
390
				<property>
391
					<name>mapreduce.multipleoutputs.namedOutput.${out16}.format</name>
392
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
393
					</value>
394
				</property>
395

    
396
				<!-- resultDatasource -->
397
				<property>
398
					<name>mapreduce.multipleoutputs.namedOutput.${out17}.key</name>
399
					<value>org.apache.hadoop.io.Text</value>
400
				</property>
401
				<property>
402
					<name>mapreduce.multipleoutputs.namedOutput.${out17}.value</name>
403
					<value>org.apache.hadoop.io.Text</value>
404
				</property>
405
				<property>
406
					<name>mapreduce.multipleoutputs.namedOutput.${out17}.format</name>
407
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
408
					</value>
409
				</property>
410

    
411

    
412

    
413
				<!-- result -->
414
				<property>
415
					<name>mapreduce.multipleoutputs.namedOutput.${out18}.key</name>
416
					<value>org.apache.hadoop.io.Text</value>
417
				</property>
418
				<property>
419
					<name>mapreduce.multipleoutputs.namedOutput.${out18}.value</name>
420
					<value>org.apache.hadoop.io.Text</value>
421
				</property>
422
				<property>
423
					<name>mapreduce.multipleoutputs.namedOutput.${out18}.format</name>
424
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
425
					</value>
426
				</property>
427

    
428
				<!-- claim -->
429
				<property>
430
					<name>mapreduce.multipleoutputs.namedOutput.${out19}.key</name>
431
					<value>org.apache.hadoop.io.Text</value>
432
				</property>
433
				<property>
434
					<name>mapreduce.multipleoutputs.namedOutput.${out19}.value</name>
435
					<value>org.apache.hadoop.io.Text</value>
436
				</property>
437
				<property>
438
					<name>mapreduce.multipleoutputs.namedOutput.${out19}.format</name>
439
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
440
					</value>
441
				</property>
442
				<!-- cncept -->
443
				<property>
444
					<name>mapreduce.multipleoutputs.namedOutput.${out20}.key</name>
445
					<value>org.apache.hadoop.io.Text</value>
446
				</property>
447
				<property>
448
					<name>mapreduce.multipleoutputs.namedOutput.${out20}.value</name>
449
					<value>org.apache.hadoop.io.Text</value>
450
				</property>
451
				<property>
452
					<name>mapreduce.multipleoutputs.namedOutput.${out20}.format</name>
453
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
454
					</value>
455
				</property>
456

    
457

    
458
				<!-- ## Classes of mapper and reducer -->
459

    
460
				<property>
461
					<name>mapreduce.map.class</name>
462
					<value>eu.dnetlib.data.mapreduce.hbase.statsExport.StatsMapper
463
					</value>
464
				</property>
465
				<property>
466
					<name>mapreduce.reduce.class</name>
467
					<value>eu.dnetlib.data.mapreduce.hbase.statsExport.StatsReducer
468
					</value>
469
				</property>
470
				<property>
471
					<name>io.serializations</name>
472
					<value>org.apache.hadoop.io.serializer.WritableSerialization
473
					</value>
474
				</property>
475
				<!-- ## Custom config -->
476

    
477
				<!--delim character used to seperate fields in hdfs dump files <property> -->
478
				<property>
479
					<name>stats.delim</name>
480
					<value>${Stats_delim_Character}</value>
481
				</property>
482
				<!--default string for Null String Values -->
483
				<property>
484
					<name>stats.nullString</name>
485
					<value>${Stats_null_String_Field}</value>
486
				</property>
487
				<!--default string for Null Numeric Values -->
488
				<property>
489
					<name>stats.nullNum</name>
490
					<value>${Stats_null_Numeric_Field}</value>
491
				</property>
492
				<property>
493
					<name>stats.enclChar</name>
494
					<value>${Stats_enclosing_Character}</value>
495
				</property>
496

    
497

    
498
				<!--source hbase table -->
499
				<property>
500
					<name>hbase.mapreduce.inputtable</name>
501
					<value>${Stats_Hbase_Source_Table}</value>
502
				</property>
503
				<property>
504
					<name>hbase.mapred.inputtable</name>
505
					<value>${Stats_Hbase_Source_Table}</value>
506
				</property>
507

    
508
				<property>
509
					<!-- mapping of protos entities to tables in the relDB -->
510
					<name>stats.dbTablesMap</name>
511
					<value>${Stats_db_table_map}</value>
512
				</property>
513

    
514
				<!-- This directory does not correspond to a data store. In fact, this directory only contains multiple data stores. It has to be set to the name of the workflow node. -->
515
				<property>
516
					<name>mapred.output.dir</name>
517
					<value>${Stats_output_Path}</value>
518
				</property>
519
				<property>
520
					<name>stats.indexConf</name>
521
					<value>${Stats_indexConf}</value>
522
				</property>
523
				<!-- ## Workflow node parameters -->
524
				<property>
525
					<name>mapred.reduce.tasks</name>
526
					<value>${numReducers}</value>
527
				</property>
528

    
529
			</configuration>
530
		</map-reduce>
531
		<ok to="exportContext" />
532
		<error to="fail" />
533
	</action>
534

    
535
	<action name='exportContext'>
536
		<java>
537
			<prepare>
538
			</prepare>
539
			<configuration>
540
				<property>
541
					<name>mapred.job.queue.name</name>
542
					<value>${queueName}</value>
543
				</property>
544
			</configuration>
545
			<main-class>eu.dnetlib.iis.core.workflows.stats.ContextExportWrapper</main-class>
546
			<arg>${Stats_output_Path}</arg>
547
			<arg>${isLookupEndpoint}</arg>
548
		</java>
549
		<ok to="prepareDatabase" />
550
		<error to="fail" />
551
	</action>
552

    
553

    
554
	<action name="prepareDatabase">
555
		<java>
556

    
557
			<prepare>
558
			</prepare>
559
			<configuration>
560
				<property>
561
					<name>mapred.job.queue.name</name>
562
					<value>${queueName}</value>
563
				</property>
564
			</configuration>
565

    
566
			<main-class>eu.dnetlib.iis.core.java.ProcessWrapper</main-class>
567
			<arg>-SworkingDir=${workingDir}</arg>
568
			<arg>eu.dnetlib.iis.core.workflows.stats.DBInitWrapper</arg>
569
			<arg>-PStats_db_Url=${Stats_db_Url}</arg>
570
			<arg>-PStats_db_User=${Stats_db_User}</arg>
571
			<arg>-PStats_db_Pass=${Stats_db_Pass}</arg>
572
			<arg>-PStats_db_Driver=${Stats_db_Driver}</arg>
573

    
574
		</java>
575
		<ok to="sqoopImport" />
576
		<error to="fail" />
577
	</action>
578

    
579
	<action name="sqoopImport">
580
		<java>
581
			<prepare>
582
			</prepare>
583
			<configuration>
584
				<property>
585
					<name>mapred.job.queue.name</name>
586
					<value>${queueName}</value>
587
				</property>
588

    
589
				<property>
590
					<name>oozie.sqoop.log.level</name>
591
					<value>DEBUG</value>
592
				</property>
593

    
594
			</configuration>
595

    
596
			<main-class>eu.dnetlib.iis.core.java.ProcessWrapper</main-class>
597
			<arg>-SworkingDir=${workingDir}</arg>
598
			<arg>eu.dnetlib.iis.core.workflows.stats.SqoopWrapper</arg>
599
			<arg>-PStats_db_Url=${Stats_db_Url}</arg>
600
			<arg>-PStats_db_User=${Stats_db_User}</arg>
601
			<arg>-PStats_db_Pass=${Stats_db_Pass}</arg>
602

    
603
			<arg>-PStats_output_Path=${Stats_output_Path}</arg>
604
			<arg>-PStats_sqoop_RecsPerStatement=${Stats_sqoop_RecsPerStatement}
605
			</arg>
606
			<arg>-PStats_sqoop_ReducersCount=${Stats_sqoop_ReducersCount}</arg>
607
			<arg>-PStats_sqoop_StatementPerTrans=${Stats_sqoop_StatementPerTrans}
608
			</arg>
609
			<arg>-PStats_delim_Character=${Stats_delim_Character}</arg>
610
			<arg>-PStats_db_table_map=${Stats_db_table_map}</arg>
611
			<arg>-PStats_enclosing_Character=${Stats_enclosing_Character} </arg>
612

    
613
		</java>
614
		<ok to="finalizeDatabase" />
615
		<error to="fail" />
616
	</action>
617

    
618
	<action name="finalizeDatabase">
619
		<java>
620
			<prepare>
621
			</prepare>
622
			<configuration>
623
				<property>
624
					<name>mapred.job.queue.name</name>
625
					<value>${queueName}</value>
626
				</property>
627
			</configuration>
628

    
629
			<main-class>eu.dnetlib.iis.core.java.ProcessWrapper</main-class>
630
			<arg>-SworkingDir=${workingDir}</arg>
631
			<arg>eu.dnetlib.iis.core.workflows.stats.DBFinalizeWrapper</arg>
632
			<arg>-PStats_db_Url=${Stats_db_Url}</arg>
633
			<arg>-PStats_db_User=${Stats_db_User}</arg>
634
			<arg>-PStats_db_Pass=${Stats_db_Pass}</arg>
635
			<arg>-PStats_db_Driver=${Stats_db_Driver}</arg>
636
		</java>
637
		<ok to="cleanUpHDFS" />
638
		<error to="fail" />
639
	</action>
640

    
641

    
642
	<action name="cleanUpHDFS">
643
		<fs>
644

    
645
			<delete path="${nameNode}${Stats_output_Path}" />
646

    
647
		</fs>
648
		<ok to="end" />
649
		<error to="fail" />
650
	</action>
651
	<kill name="fail">
652
		<message>
653
			Unfortunately, the process failed -- error message:
654
			[${wf:errorMessage(wf:lastErrorNode())}]
655
		</message>
656
	</kill>
657
	<end name="end" />
658
</workflow-app>
    (1-1/1)