Changeset 27126 for gs2-extensions/parallel-building/trunk
- Timestamp:
- 2013-03-26T09:14:31+13:00 (11 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
gs2-extensions/parallel-building/trunk/src/bin/script/hadoop_import.pl
r27058 r27126 78 78 } 79 79 # - clear out the archives regardless 80 print " * Clearing existing archives directory for this collection... "; 80 81 if (-e $gs_archives_dir) 81 82 { … … 85 86 if (&hdfsTest('d', 0, $hdfs_output_dir)) 86 87 { 87 print " * Clearing existing archives directory for this collection... ";88 88 &hdfsCommand('rmr', $hdfs_output_dir); 89 print "Done!\n";90 89 } 91 90 # - clear out any old logs … … 93 92 { 94 93 &shellCommand('rm /tmp/greenstone/*.*'); 94 &shellCommand('rm /tmp/gsimport*'); 95 95 } 96 96 if ($is_rocks_cluster) 97 97 { 98 98 &shellCommand('rocks run host "rm /tmp/greenstone/*.*"'); 99 } 99 &shellCommand('rocks run host "rm /tmp/gsimport*"'); 100 } 101 print "Done!\n"; 102 103 # - flush DNS cache too, so we are playing on a level field 104 &shellCommand('flush_caches.pl'); 105 &shellCommand('rocks run host "flush_caches.pl"'); 100 106 101 107 # 3. Special case for *Server type infodbs (namely TDBServer and GDBMServer) … … 187 193 print " * Cleaning up temporary files... "; 188 194 &shellCommand('rm -rf /tmp/greenstone'); 195 &shellCommand('rm -rf /tmp/gsimport*'); 189 196 if ($is_rocks_cluster) 190 197 { 191 198 &shellCommand('rocks run host "rm -rf /tmp/greenstone"'); 199 &shellCommand('rocks run host "rm -rf /tmp/gsimport*"'); 192 200 } 193 201 print "Done!\n";
Note:
See TracChangeset
for help on using the changeset viewer.