diff --git a/dashboard/mergeresults.py b/dashboard/mergeresults.py index b31ab3b..a4605b5 100644 --- a/dashboard/mergeresults.py +++ b/dashboard/mergeresults.py @@ -56,4 +56,31 @@ class ResultMergingProcess(Process): self.ctx.merge_result_file(input_path) os.remove(input_path) print " - Merged result, % i left" % len(self.path_set) - self.ctx.output(self.result_path) \ No newline at end of file + self.ctx.output(self.result_path) + +def main(): + p = ArgumentParser( + description = 'Merge analysis results to single file', + formatter_class = ArgumentDefaultsHelpFormatter + ) + p.add_argument( + "-i", "--input-file", + help = "Input files to merge to data folder", + required = True, + nargs = '+' + ) + p.add_argument( + "-o", "--output-file", + help = "Output file to write data to", + required = True + ) + + cfg = p.parse_args() + ctx = ResultContext() + for f in cfg.input_file: + ctx.merge_result_file(f) + ctx.output(cfg.output_file) + + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/dashboard/results2disk.py b/dashboard/results2disk.py index 235d486..a99c270 100644 --- a/dashboard/results2disk.py +++ b/dashboard/results2disk.py @@ -176,8 +176,6 @@ class ChannelVersionManager: # if we have a data-array length mismatch, then we purge existing data if length != len(dump['values']): purge_data = True - if metadata.get('length', None) != None: - print dump['values'] revision = dump['revision'] buildId = dump['buildId'] length = len(dump['values']) @@ -205,10 +203,6 @@ class ChannelVersionManager: oldlen = metadata.get('length', None) if oldlen != None: print >> sys.stderr, "Purging data for %s from length %i to %i" % (measure, oldlen, length) - print >> sys.stderr, " old-build: " + metadata.get('buildId', '') - print >> sys.stderr, " new-build: " + buildId - print >> sys.stderr, " old-rev: " + metadata.get('revision', '') - print >> sys.stderr, " new-rev: " + revision # Filename to merge blob into filename = measure + "-" + byDateType + ".json"