def __init__(self):
self.stats = []
- self.duration = 7200.0 # Seconds for which to run
+ self.duration = 1800.0 # Seconds for which to run
self.write_fraction = 0.5 # Fraction of operations which are writes
self.wss_count = 2048 # Files in the working set
self.tot_count = 2048 # Total number of files created
- self.filesize = 256 * 1024 # Size of files to work with
- self.target_ops = 10 # Target operations/second/thread
+ self.filesize = 32 * 1024 # Size of files to work with
+ self.target_ops = 40 # Target operations/second/thread
def get_params(self):
params = {}
info = self._operation()
time2 = time.time()
self.stats.append((time1, time2 - time1, info))
- print self.stats[-1]
+ #print self.stats[-1]
delay = time1 + (1.0 / self.target_ops) - time2
if delay > 0: time.sleep(delay)
print "WRITES:"
print_distribution_stats([x[1] for x in stats if x[2][0] == 'write'])
-if __name__ == '__main__':
+fp = open('/tmp/results.json', 'a')
+
+def run(filecount, writefrac, filesize):
workers = []
threads = []
for i in range(THREADS):
w = WorkerThread()
- #if i == 0: w.setup()
+ w.write_fraction = writefrac
+ w.wss_count = w.tot_count = filecount
+ w.filesize = filesize
+ if i == 0: w.setup()
t = threading.Thread(target=w.run)
threads.append(t)
workers.append(w)
results += w.stats
results.sort()
- fp = open('/tmp/results.json', 'w')
fp.write(json.dumps(workers[0].get_params(), indent=2) + "\n\n")
fp.write(json.dumps(results, indent=2))
- fp.close()
+ fp.write("\n\n")
run_stats(results)
+
+if __name__ == '__main__':
+ for filesize in [32, 256, 2048]: # KiB
+ for totsize in [256, 512, 1024]: # MiB
+ filecount = totsize * 1024 / filesize
+ for writefrac in [0.0, 0.5]:
+ run(filecount, writefrac, filesize * 1024)
+ fp.close()