Bug fixes. Update timing intervals. Improve output.
This commit is contained in:
parent
0afcbba267
commit
3f0bb8cbac
|
@ -156,9 +156,8 @@ module ClassyFireAPI
|
||||||
f = File.open(absolute_path, 'r')
|
f = File.open(absolute_path, 'r')
|
||||||
input = []
|
input = []
|
||||||
|
|
||||||
|
|
||||||
lines = File.readlines(absolute_path)
|
lines = File.readlines(absolute_path)
|
||||||
puts lines.length, lines[0]
|
|
||||||
i = 0
|
i = 0
|
||||||
lines.uniq.each do |line|
|
lines.uniq.each do |line|
|
||||||
i += 1
|
i += 1
|
||||||
|
@ -181,19 +180,19 @@ module ClassyFireAPI
|
||||||
i = start
|
i = start
|
||||||
|
|
||||||
if i < initial_nr_of_jobs
|
if i < initial_nr_of_jobs
|
||||||
while i < initial_nr_of_jobs
|
while i <= initial_nr_of_jobs
|
||||||
|
|
||||||
title = File.basename(absolute_path).split(".")[0] + "_yannick" + "_part_#{i}"
|
title = File.basename(absolute_path).split(".")[0] + "_part_#{i}"
|
||||||
|
|
||||||
if i <= subdivised_groups.length
|
if i <= subdivised_groups.length
|
||||||
puts "\n\n\n\n---------------------- -----------"
|
puts "\n---------------------------------"
|
||||||
begin
|
begin
|
||||||
puts "submitting #{title}"
|
puts "submitting #{title}"
|
||||||
# puts subdivised_groups[i-1].join("\n")
|
# puts subdivised_groups[i-1].join("\n")
|
||||||
q = submit_query(title,subdivised_groups[i-1].join("\n"),type)
|
q = submit_query(title,subdivised_groups[i-1].join("\n"),type)
|
||||||
puts JSON.parse(q)['id']
|
puts "Query ID: " + JSON.parse(q)['id'].to_s
|
||||||
query_ids << JSON.parse(q)['id']
|
query_ids << JSON.parse(q)['id']
|
||||||
sleep(10)
|
sleep(20)
|
||||||
rescue Exception => e
|
rescue Exception => e
|
||||||
puts e.message
|
puts e.message
|
||||||
puts e.backtrace.inspect
|
puts e.backtrace.inspect
|
||||||
|
@ -202,38 +201,36 @@ module ClassyFireAPI
|
||||||
else
|
else
|
||||||
break
|
break
|
||||||
end
|
end
|
||||||
query_ids
|
|
||||||
end
|
end
|
||||||
puts "Going to sleep at #{Time.now - @start_time} for #{sleeping_time} s."
|
puts "Going to sleep at #{Time.now - @start_time} for #{sleeping_time} s."
|
||||||
sleep(sleeping_time)
|
sleep(sleeping_time)
|
||||||
puts "Waking up at #{Time.now - @start_time}"
|
puts "Waking up at #{Time.now - @start_time}"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
while i >= initial_nr_of_jobs && i < subdivised_groups.length
|
while i >= initial_nr_of_jobs && i < subdivised_groups.length
|
||||||
k = 0
|
k = 0
|
||||||
for k in (i...(i + initial_nr_of_jobs))
|
for k in (i...(i + initial_nr_of_jobs))
|
||||||
title = File.basename(absolute_path).split(".")[0] + "_yannick" + "_part_#{k}"
|
title = File.basename(absolute_path).split(".")[0] + "_part_#{k}"
|
||||||
|
|
||||||
begin
|
begin
|
||||||
puts "submitting #{title}"
|
puts "submitting #{title}"
|
||||||
q = submit_query(title,subdivised_groups[k-1].join("\n"),type)
|
q = submit_query(title,subdivised_groups[k-1].join("\n"),type)
|
||||||
puts JSON.parse(q)['id']
|
puts "Query ID: " + JSON.parse(q)['id'].to_s
|
||||||
query_ids << JSON.parse(q)['id']
|
query_ids << JSON.parse(q)['id']
|
||||||
sleep(10)
|
sleep(20)
|
||||||
rescue Exception => e
|
rescue Exception => e
|
||||||
puts e.message
|
puts e.message
|
||||||
puts e.backtrace.inspect
|
puts e.backtrace.inspect
|
||||||
end
|
end
|
||||||
i = i + 1
|
i = i + 1
|
||||||
end
|
end
|
||||||
i = k
|
if i >= initial_nr_of_jobs && i < subdivised_groups.length
|
||||||
puts "Going to sleep at #{Time.now - @start_time} for #{sleeping_time} s."
|
puts "Going to sleep at #{Time.now - @start_time} for #{sleeping_time} s."
|
||||||
sleep(sleeping_time)
|
sleep(sleeping_time)
|
||||||
puts "Waking up at #{Time.now - @start_time}"
|
puts "Waking up at #{Time.now - @start_time}"
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
puts "Done at #{Time.now - @start_time}"
|
||||||
end
|
end
|
||||||
|
|
||||||
# Takes each file in a folder, and submit the contained structures in bluks of a given size.
|
# Takes each file in a folder, and submit the contained structures in bluks of a given size.
|
||||||
|
|
Loading…
Reference in New Issue