Project

General

Profile

1
<workflow-app name="test-core_examples_javamapreduce_stats" xmlns="uri:oozie:workflow:0.4">
2
	<!-- map reduce job that exports hbase data and prepares them for import to the relation database used for statistics generation -->
3

    
4
	<global>
5
		<job-tracker>${jobTracker}</job-tracker>
6
		<name-node>${nameNode}</name-node>
7
		<configuration>
8
			<property>
9
				<name>mapred.job.queue.name</name>
10
				<value>${queueName}</value>
11
			</property>
12
			<property>
13
				<name>oozie.sqoop.log.level</name>
14
				<value>DEBUG</value>
15
			</property>
16
		</configuration>
17
	</global>
18
	<start to='get-scanner' />
19
	<action name='get-scanner'>
20
		<java>
21
			<main-class>eu.dnetlib.iis.core.workflows.stats.HbaseScannerGenerator
22
			</main-class>
23
			<!-- column families: -->
24
			<arg>	<!-- ${Stats_Column_Families} -->
25
				-f datasource
26
<!--				datasource,datasourceOrganization_provision_provides ,
27
				organization,
28
				project, projectOrganization_participation_hasParticipant ,
29
				result,resultProject_outcome_produces,
30
				personResult_authorship_hasAuthor,resultResult_publicationDataset_isRelatedTo--> 
31
				 	 </arg>
32

    
33
			<capture-output />
34
		</java>
35
		<ok to="mr_export" />
36
		<error to="fail" />
37
	</action>
38
	<action name="mr_export">
39
		<map-reduce>
40

    
41
			<prepare>
42
				<delete path="${nameNode}${Stats_output_Path}" />
43

    
44
			</prepare>
45
			<configuration>
46
				<property>
47
					<name>hbase.mapreduce.scan</name>
48
					<value>${wf:actionData('get-scanner')['scan']}</value>
49
				</property>
50
				<property>
51
					<name>hbase.rootdir</name>
52
					<value>hdfs://nmis-hadoop-cluster/hbase</value>
53
				</property>
54

    
55
				<property>
56
					<name>hbase.security.authentication</name>
57
					<value>simple</value>
58
				</property>
59
				<!-- ZOOKEEPER -->
60

    
61
				<property>
62
					<name>hbase.zookeeper.quorum</name>
63
					<value>
64
						quorum1.t.hadoop.research-infrastructures.eu,quorum2.t.hadoop.research-infrastructures.eu,quorum3.t.hadoop.research-infrastructures.eu,quorum4.t.hadoop.research-infrastructures.eu,jobtracker.t.hadoop.research-infrastructures.eu
65
					</value>
66
				</property>
67
				<property>
68
					<name>zookeeper.znode.rootserver</name>
69
					<value>root-region-server</value>
70
				</property>
71

    
72
				<property>
73
					<name>hbase.zookeeper.property.clientPort</name>
74
					<value>2182</value>
75
				</property>
76

    
77

    
78
				<!-- MR IO -->
79

    
80

    
81
				<property>
82
					<name>mapreduce.inputformat.class</name>
83
					<value>org.apache.hadoop.hbase.mapreduce.TableInputFormat</value>
84
				</property>
85

    
86
				<property>
87
					<name>mapred.mapoutput.key.class</name>
88
					<value>org.apache.hadoop.io.Text</value>
89
				</property>
90
				<property>
91
					<name>mapred.mapoutput.value.class</name>
92
					<value>org.apache.hadoop.hbase.io.ImmutableBytesWritable</value>
93
				</property>
94
				<property>
95
					<name>mapred.output.key.class</name>
96
					<value>org.apache.hadoop.io.Text</value>
97
				</property>
98
				<property>
99
					<name>mapred.output.value.class</name>
100
					<value>org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat
101
					</value>
102
				</property>
103

    
104
				<!-- ## This is required for new MapReduce API usage -->
105
				<property>
106
					<name>mapred.mapper.new-api</name>
107
					<value>true</value>
108
				</property>
109
				<property>
110
					<name>mapred.reducer.new-api</name>
111
					<value>true</value>
112
				</property>
113

    
114
				<!-- # Job-specific options -->
115
				<property>
116
					<name>dfs.blocksize</name>
117
					<value>32M</value>
118
				</property>
119
				<property>
120
					<name>mapred.output.compress</name>
121
					<value>false</value>
122
				</property>
123
				<property>
124
					<name>mapred.reduce.tasks.speculative.execution</name>
125
					<value>false</value>
126
				</property>
127
				<property>
128
					<name>mapred.reduce.tasks.speculative.execution</name>
129
					<value>false</value>
130
				</property>
131
				<property>
132
					<name>mapreduce.map.speculative</name>
133
					<value>false</value>
134
				</property>
135

    
136
				<!-- I/O FORMAT -->
137
				<!-- IMPORTANT: sets default delimeter used by text output writer. Required to fix issue with traling tab added between id and value in multiple outputs -->
138
				<property>
139
					<name>mapred.textoutputformat.separator</name>
140
					<value>${Stats_delim_Character}</value>
141
				</property>
142
				<!-- ## Names of all output ports -->
143

    
144
				<property>
145
					<name>mapreduce.multipleoutputs</name>
146

    
147
					<value>${out1} ${out2} ${out3} ${out4} ${out5} ${out6} ${out7} ${out8} ${out9} ${out10} ${out11} ${out12} ${out13} ${out14} ${out15} ${out16} ${out17} ${out18} ${out19} ${out20}
148
					</value>
149

    
150
				</property>
151
				<!-- datasource -->
152
				<property>
153
					<name>mapreduce.multipleoutputs.namedOutput.${out1}.key</name>
154
					<value>org.apache.hadoop.io.Text</value>
155
				</property>
156
				<property>
157
					<name>mapreduce.multipleoutputs.namedOutput.${out1}.value</name>
158
					<value>org.apache.hadoop.io.Text</value>
159
				</property>
160
				<property>
161
					<name>mapreduce.multipleoutputs.namedOutput.${out1}.format</name>
162
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
163
					</value>
164
				</property>
165
				<!-- datasourceLanguage -->
166
				<property>
167
					<name>mapreduce.multipleoutputs.namedOutput.${out2}.key</name>
168
					<value>org.apache.hadoop.io.Text</value>
169
				</property>
170
				<property>
171
					<name>mapreduce.multipleoutputs.namedOutput.${out2}.value</name>
172
					<value>org.apache.hadoop.io.Text</value>
173
				</property>
174
				<property>
175
					<name>mapreduce.multipleoutputs.namedOutput.${out2}.format</name>
176
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
177
					</value>
178
				</property>
179

    
180

    
181

    
182
				<!-- datasourceOrganization -->
183
				<property>
184
					<name>mapreduce.multipleoutputs.namedOutput.${out3}.key</name>
185
					<value>org.apache.hadoop.io.Text</value>
186
				</property>
187
				<property>
188
					<name>mapreduce.multipleoutputs.namedOutput.${out3}.value</name>
189
					<value>org.apache.hadoop.io.Text</value>
190
				</property>
191
				<property>
192
					<name>mapreduce.multipleoutputs.namedOutput.${out3}.format</name>
193
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
194
					</value>
195
				</property>
196

    
197
				<!-- datasourceTopic -->
198
				<property>
199
					<name>mapreduce.multipleoutputs.namedOutput.${out4}.key</name>
200
					<value>org.apache.hadoop.io.Text</value>
201
				</property>
202
				<property>
203
					<name>mapreduce.multipleoutputs.namedOutput.${out4}.value</name>
204
					<value>org.apache.hadoop.io.Text</value>
205
				</property>
206
				<property>
207
					<name>mapreduce.multipleoutputs.namedOutput.${out4}.format</name>
208
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
209
					</value>
210
				</property>
211

    
212
				<!-- resultDatasource -->
213
				<property>
214
					<name>mapreduce.multipleoutputs.namedOutput.${out5}.key</name>
215
					<value>org.apache.hadoop.io.Text</value>
216
				</property>
217
				<property>
218
					<name>mapreduce.multipleoutputs.namedOutput.${out5}.value</name>
219
					<value>org.apache.hadoop.io.Text</value>
220
				</property>
221
				<property>
222
					<name>mapreduce.multipleoutputs.namedOutput.${out5}.format</name>
223
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
224
					</value>
225
				</property>
226
				<!-- organization -->
227
				<property>
228
					<name>mapreduce.multipleoutputs.namedOutput.${out6}.key</name>
229
					<value>org.apache.hadoop.io.Text</value>
230
				</property>
231
				<property>
232
					<name>mapreduce.multipleoutputs.namedOutput.${out6}.value</name>
233
					<value>org.apache.hadoop.io.Text</value>
234
				</property>
235
				<property>
236
					<name>mapreduce.multipleoutputs.namedOutput.${out6}.format</name>
237
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
238
					</value>
239
				</property>
240

    
241
				<!-- projectOrganization -->
242
				<property>
243
					<name>mapreduce.multipleoutputs.namedOutput.${out7}.key</name>
244
					<value>org.apache.hadoop.io.Text</value>
245
				</property>
246
				<property>
247
					<name>mapreduce.multipleoutputs.namedOutput.${out7}.value</name>
248
					<value>org.apache.hadoop.io.Text</value>
249
				</property>
250
				<property>
251
					<name>mapreduce.multipleoutputs.namedOutput.${out7}.format</name>
252
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
253
					</value>
254
				</property>
255
				<!-- resultProject -->
256
				<property>
257
					<name>mapreduce.multipleoutputs.namedOutput.${out8}.key</name>
258
					<value>org.apache.hadoop.io.Text</value>
259
				</property>
260
				<property>
261
					<name>mapreduce.multipleoutputs.namedOutput.${out8}.value</name>
262
					<value>org.apache.hadoop.io.Text</value>
263
				</property>
264
				<property>
265
					<name>mapreduce.multipleoutputs.namedOutput.${out8}.format</name>
266
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
267
					</value>
268
				</property>
269

    
270
				<!-- project -->
271
				<property>
272
					<name>mapreduce.multipleoutputs.namedOutput.${out9}.key</name>
273
					<value>org.apache.hadoop.io.Text</value>
274
				</property>
275
				<property>
276
					<name>mapreduce.multipleoutputs.namedOutput.${out9}.value</name>
277
					<value>org.apache.hadoop.io.Text</value>
278
				</property>
279
				<property>
280
					<name>mapreduce.multipleoutputs.namedOutput.${out9}.format</name>
281
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
282
					</value>
283
				</property>
284

    
285
				<!-- resultConcept -->
286
				<property>
287
					<name>mapreduce.multipleoutputs.namedOutput.${out10}.key</name>
288
					<value>org.apache.hadoop.io.Text</value>
289
				</property>
290
				<property>
291
					<name>mapreduce.multipleoutputs.namedOutput.${out10}.value</name>
292
					<value>org.apache.hadoop.io.Text</value>
293
				</property>
294
				<property>
295
					<name>mapreduce.multipleoutputs.namedOutput.${out10}.format</name>
296
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
297
					</value>
298
				</property>
299

    
300
				<!-- resultClaim -->
301
				<property>
302
					<name>mapreduce.multipleoutputs.namedOutput.${out11}.key</name>
303
					<value>org.apache.hadoop.io.Text</value>
304
				</property>
305
				<property>
306
					<name>mapreduce.multipleoutputs.namedOutput.${out11}.value</name>
307
					<value>org.apache.hadoop.io.Text</value>
308
				</property>
309
				<property>
310
					<name>mapreduce.multipleoutputs.namedOutput.${out11}.format</name>
311
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
312
					</value>
313
				</property>
314

    
315
				<!-- resultClassification -->
316
				<property>
317
					<name>mapreduce.multipleoutputs.namedOutput.${out12}.key</name>
318
					<value>org.apache.hadoop.io.Text</value>
319
				</property>
320
				<property>
321
					<name>mapreduce.multipleoutputs.namedOutput.${out12}.value</name>
322
					<value>org.apache.hadoop.io.Text</value>
323
				</property>
324
				<property>
325
					<name>mapreduce.multipleoutputs.namedOutput.${out12}.format</name>
326
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
327
					</value>
328
				</property>
329

    
330
				<!-- resultLanguage -->
331
				<property>
332
					<name>mapreduce.multipleoutputs.namedOutput.${out13}.key</name>
333
					<value>org.apache.hadoop.io.Text</value>
334
				</property>
335
				<property>
336
					<name>mapreduce.multipleoutputs.namedOutput.${out13}.value</name>
337
					<value>org.apache.hadoop.io.Text</value>
338
				</property>
339
				<property>
340
					<name>mapreduce.multipleoutputs.namedOutput.${out13}.format</name>
341
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
342
					</value>
343
				</property>
344

    
345
				<!-- resultProject -->
346
				<property>
347
					<name>mapreduce.multipleoutputs.namedOutput.${out14}.key</name>
348
					<value>org.apache.hadoop.io.Text</value>
349
				</property>
350
				<property>
351
					<name>mapreduce.multipleoutputs.namedOutput.${out14}.value</name>
352
					<value>org.apache.hadoop.io.Text</value>
353
				</property>
354
				<property>
355
					<name>mapreduce.multipleoutputs.namedOutput.${out14}.format</name>
356
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
357
					</value>
358
				</property>
359
				<!-- resultResult -->
360
				<property>
361
					<name>mapreduce.multipleoutputs.namedOutput.${out15}.key</name>
362
					<value>org.apache.hadoop.io.Text</value>
363
				</property>
364
				<property>
365
					<name>mapreduce.multipleoutputs.namedOutput.${out15}.value</name>
366
					<value>org.apache.hadoop.io.Text</value>
367
				</property>
368
				<property>
369
					<name>mapreduce.multipleoutputs.namedOutput.${out15}.format</name>
370
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
371
					</value>
372
				</property>
373
				<!-- resultTopic -->
374
				<property>
375
					<name>mapreduce.multipleoutputs.namedOutput.${out16}.key</name>
376
					<value>org.apache.hadoop.io.Text</value>
377
				</property>
378
				<property>
379
					<name>mapreduce.multipleoutputs.namedOutput.${out16}.value</name>
380
					<value>org.apache.hadoop.io.Text</value>
381
				</property>
382
				<property>
383
					<name>mapreduce.multipleoutputs.namedOutput.${out16}.format</name>
384
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
385
					</value>
386
				</property>
387

    
388
				<!-- resultDatasource -->
389
				<property>
390
					<name>mapreduce.multipleoutputs.namedOutput.${out17}.key</name>
391
					<value>org.apache.hadoop.io.Text</value>
392
				</property>
393
				<property>
394
					<name>mapreduce.multipleoutputs.namedOutput.${out17}.value</name>
395
					<value>org.apache.hadoop.io.Text</value>
396
				</property>
397
				<property>
398
					<name>mapreduce.multipleoutputs.namedOutput.${out17}.format</name>
399
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
400
					</value>
401
				</property>
402

    
403

    
404

    
405
				<!-- result -->
406
				<property>
407
					<name>mapreduce.multipleoutputs.namedOutput.${out18}.key</name>
408
					<value>org.apache.hadoop.io.Text</value>
409
				</property>
410
				<property>
411
					<name>mapreduce.multipleoutputs.namedOutput.${out18}.value</name>
412
					<value>org.apache.hadoop.io.Text</value>
413
				</property>
414
				<property>
415
					<name>mapreduce.multipleoutputs.namedOutput.${out18}.format</name>
416
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
417
					</value>
418
				</property>
419

    
420
				<!-- claim -->
421
				<property>
422
					<name>mapreduce.multipleoutputs.namedOutput.${out19}.key</name>
423
					<value>org.apache.hadoop.io.Text</value>
424
				</property>
425
				<property>
426
					<name>mapreduce.multipleoutputs.namedOutput.${out19}.value</name>
427
					<value>org.apache.hadoop.io.Text</value>
428
				</property>
429
				<property>
430
					<name>mapreduce.multipleoutputs.namedOutput.${out19}.format</name>
431
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
432
					</value>
433
				</property>
434
				<!-- cncept -->
435
				<property>
436
					<name>mapreduce.multipleoutputs.namedOutput.${out20}.key</name>
437
					<value>org.apache.hadoop.io.Text</value>
438
				</property>
439
				<property>
440
					<name>mapreduce.multipleoutputs.namedOutput.${out20}.value</name>
441
					<value>org.apache.hadoop.io.Text</value>
442
				</property>
443
				<property>
444
					<name>mapreduce.multipleoutputs.namedOutput.${out20}.format</name>
445
					<value>org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
446
					</value>
447
				</property>
448

    
449

    
450
				<!-- ## Classes of mapper and reducer -->
451

    
452
				<property>
453
					<name>mapreduce.map.class</name>
454
					<value>eu.dnetlib.data.mapreduce.hbase.statsExport.StatsMapper
455
					</value>
456
				</property>
457
				<property>
458
					<name>mapreduce.reduce.class</name>
459
					<value>eu.dnetlib.data.mapreduce.hbase.statsExport.StatsReducer
460
					</value>
461
				</property>
462
				<property>
463
					<name>io.serializations</name>
464
					<value>org.apache.hadoop.io.serializer.WritableSerialization
465
					</value>
466
				</property>
467
				<!-- ## Custom config -->
468

    
469
				<!--delim character used to seperate fields in hdfs dump files <property> -->
470
				<property>
471
					<name>stats.delim</name>
472
					<value>${Stats_delim_Character}</value>
473
				</property>
474
				<!--default string for Null String Values -->
475
				<property>
476
					<name>stats.nullString</name>
477
					<value>${Stats_null_String_Field}</value>
478
				</property>
479
				<!--default string for Null Numeric Values -->
480
				<property>
481
					<name>stats.nullNum</name>
482
					<value>${Stats_null_Numeric_Field}</value>
483
				</property>
484
				<property>
485
					<name>stats.enclChar</name>
486
					<value>${Stats_enclosing_Character}</value>
487
				</property>
488

    
489

    
490
				<!--source hbase table -->
491
				<property>
492
					<name>hbase.mapreduce.inputtable</name>
493
					<value>${Stats_Hbase_Source_Table}</value>
494
				</property>
495
				<property>
496
					<name>hbase.mapred.inputtable</name>
497
					<value>${Stats_Hbase_Source_Table}</value>
498
				</property>
499

    
500
				<property>
501
					<!-- mapping of protos entities to tables in the relDB -->
502
					<name>stats.dbTablesMap</name>
503
					<value>${Stats_db_table_map}</value>
504
				</property>
505

    
506
				<!-- This directory does not correspond to a data store. In fact, this directory only contains multiple data stores. It has to be set to the name of the workflow node. -->
507
				<property>
508
					<name>mapred.output.dir</name>
509
					<value>${Stats_output_Path}</value>
510
				</property>
511
				<property>
512
					<name>stats.indexConf</name>
513
					<value>${Stats_indexConf}</value>
514
				</property>
515
				<!-- ## Workflow node parameters -->
516
				<property>
517
					<name>mapred.reduce.tasks</name>
518
					<value>${numReducers}</value>
519
				</property>
520

    
521
			</configuration>
522
		</map-reduce>
523
		<ok to="end" />
524
		<error to="fail" />
525
	</action>
526

    
527
	<action name='exportContext'>
528
		<java>
529
			<prepare>
530
			</prepare>
531
			<configuration>
532
				<property>
533
					<name>mapred.job.queue.name</name>
534
					<value>${queueName}</value>
535
				</property>
536
			</configuration>
537
			<main-class>eu.dnetlib.iis.core.workflows.stats.ContextExportWrapper</main-class>
538
			<arg>${Stats_output_Path}</arg>
539
			<arg>${isLookupEndpoint}</arg>
540
		</java>
541
		<ok to="prepareDatabase" />
542
		<error to="fail" />
543
	</action>
544

    
545

    
546
	<action name="prepareDatabase">
547
		<java>
548

    
549
			<prepare>
550
			</prepare>
551
			<configuration>
552
				<property>
553
					<name>mapred.job.queue.name</name>
554
					<value>${queueName}</value>
555
				</property>
556
			</configuration>
557

    
558
			<main-class>eu.dnetlib.iis.core.java.ProcessWrapper</main-class>
559
			<arg>-SworkingDir=${workingDir}</arg>
560
			<arg>eu.dnetlib.iis.core.workflows.stats.DBInitWrapper</arg>
561

    
562
			<arg>-PStats_db_Url=${Stats_db_Url}</arg>
563
			<arg>-PStats_db_User=${Stats_db_User}</arg>
564
			<arg>-PStats_db_Pass=${Stats_db_Pass}</arg>
565
			<arg>-PStats_db_Driver=${Stats_db_Driver}</arg>
566

    
567
		</java>
568
		<ok to="sqoopImport" />
569
		<error to="fail" />
570
	</action>
571

    
572
	<action name="sqoopImport">
573
		<java>
574
			<prepare>
575
			</prepare>
576
			<configuration>
577
				<property>
578
					<name>mapred.job.queue.name</name>
579
					<value>${queueName}</value>
580
				</property>
581

    
582
				<property>
583
					<name>oozie.sqoop.log.level</name>
584
					<value>DEBUG</value>
585
				</property>
586

    
587
			</configuration>
588

    
589
			<main-class>eu.dnetlib.iis.core.java.ProcessWrapper</main-class>
590
			<arg>-SworkingDir=${workingDir}</arg>
591
			<arg>eu.dnetlib.iis.core.workflows.stats.SqoopWrapper</arg>
592
			<arg>-PStats_db_Url=${Stats_db_Url}</arg>
593
			<arg>-PStats_db_User=${Stats_db_User}</arg>
594
			<arg>-PStats_db_Pass=${Stats_db_Pass}</arg>
595

    
596
			<arg>-PStats_output_Path=${Stats_output_Path}</arg>
597
			<arg>-PStats_sqoop_RecsPerStatement=${Stats_sqoop_RecsPerStatement}
598
			</arg>
599
			<arg>-PStats_sqoop_ReducersCount=${Stats_sqoop_ReducersCount}</arg>
600
			<arg>-PStats_sqoop_StatementPerTrans=${Stats_sqoop_StatementPerTrans}
601
			</arg>
602
			<arg>-PStats_delim_Character=${Stats_delim_Character}</arg>
603
			<arg>-PStats_db_table_map=${Stats_db_table_map}</arg>
604
			<arg>-PStats_enclosing_Character=${Stats_enclosing_Character} </arg>
605

    
606
		</java>
607
		<ok to="finalizeDatabase"/>
608
		<error to="fail" />
609
	</action>
610

    
611
	<action name="finalizeDatabase">
612
		<java>
613
			<prepare>
614
			</prepare>
615
			<configuration>
616
				<property>
617
					<name>mapred.job.queue.name</name>
618
					<value>${queueName}</value>
619
				</property>
620
			</configuration>
621

    
622
			<main-class>eu.dnetlib.iis.core.java.ProcessWrapper</main-class>
623
			<arg>-SworkingDir=${workingDir}</arg>
624
			<arg>eu.dnetlib.iis.core.workflows.stats.DBFinalizeWrapper</arg>
625
			<arg>-PStats_db_Url=${Stats_db_Url}</arg>
626
			<arg>-PStats_db_User=${Stats_db_User}</arg>
627
			<arg>-PStats_db_Pass=${Stats_db_Pass}</arg>
628
			<arg>-PStats_db_Driver=${Stats_db_Driver}</arg>
629
		</java>
630
		<ok to="end" />
631
		<error to="fail" />
632
	</action>
633
	<kill name="fail">
634
		<message>
635
			Unfortunately, the process failed -- error message:
636
			[${wf:errorMessage(wf:lastErrorNode())}]
637
		</message>
638
	</kill>
639
	<end name="end" />
640
</workflow-app>
    (1-1/1)