+def write_metrics_info(metrics_info, metrics_file):
+ metrics = open(metrics_file, 'w')
+ for metric in metrics_info:
+ metrics.write("Snippet %s %f\n" % metric)
+ metrics.close()
+
+
+# Reads a .tex files and create an identical file but only with
+# pages whose index is in pages_to_keep
+def filter_pages(source_path, destination_path, pages_to_keep):
+ def_re = re.compile(b"(\\\\newcommandx|\\\\renewcommandx|\\\\global\\\\long\\\\def)(\\[a-zA-Z]+)(.+)")
+ source_file = open(source_path, "rb")
+ destination_file = open(destination_path, "wb")
+
+ page_index = 0
+ skip_page = False
+ macros = []
+ for line in source_file:
+ # We found a new page
+ if line.startswith(b"\\begin{preview}"):
+ page_index += 1
+ # If the page index isn't in pages_to_keep we don't copy it
+ skip_page = page_index not in pages_to_keep
+
+ if not skip_page:
+ match = def_re.match(line)
+ if match != None:
+ definecmd = match.group(1)
+ macroname = match.group(2)
+ if not macroname in macros:
+ macros.append(macroname)
+ if definecmd == b"\\renewcommandx":
+ line = line.replace(definecmd, b"\\newcommandx")
+ destination_file.write(line)
+
+ # End of a page, we reset the skip_page bool
+ if line.startswith(b"\\end{preview}"):
+ skip_page = False
+
+ destination_file.close()
+ source_file.close()
+
+# Joins two metrics list, that is a list of tuple (page_index, metric)
+# new_page_indexes contains the original page number of the pages in new_metrics
+# e.g. new_page_indexes[3] == 14 means that the 4th item in new_metrics is the 15th in the original counting
+# original_bitmap and destination_bitmap are file name models used to rename the new files
+# e.g. image_new%d.png and image_%d.png
+def join_metrics_and_rename(original_metrics, new_metrics, new_page_indexes, original_bitmap, destination_bitmap):
+ legacy_index = 0
+ for (index, metric) in new_metrics:
+ # If the file exists we rename it
+ if os.path.isfile(original_bitmap % (index)):
+ os.rename(original_bitmap % (index), destination_bitmap % new_page_indexes[index-1])
+
+ # Extract the original page index
+ index = new_page_indexes[index-1]
+ # Goes through the array until the end is reached or the correct index is found
+ while legacy_index < len(original_metrics) and original_metrics[legacy_index][0] < index:
+ legacy_index += 1
+
+ # Add or update the metric for this page
+ if legacy_index < len(original_metrics) and original_metrics[legacy_index][0] == index:
+ original_metrics[legacy_index] = (index, metric)
+ else:
+ original_metrics.insert(legacy_index, (index, metric))
+
+
+def run_latex(latex, latex_file, bibtex = None):
+ # Run latex
+ latex_status, latex_stdout = run_tex(latex, latex_file)
+
+ if bibtex is None:
+ return latex_status, latex_stdout
+
+ # The aux and log output file names
+ aux_file = latex_file_re.sub(".aux", latex_file)
+ log_file = latex_file_re.sub(".log", latex_file)
+
+ # Run bibtex/latex if necessary
+ progress("Checking if a bibtex run is necessary")
+ if string_in_file(r"\bibdata", aux_file):
+ bibtex_status, bibtex_stdout = run_tex(bibtex, aux_file)
+ latex_status, latex_stdout = run_tex(latex, latex_file)
+ # Rerun latex if necessary
+ progress("Checking if a latex rerun is necessary")
+ if string_in_file("Warning: Citation", log_file):
+ latex_status, latex_stdout = run_tex(latex, latex_file)
+
+ return latex_status, latex_stdout
+
+
+def run_tex(tex, tex_file):
+ tex_call = '%s "%s"' % (tex, tex_file)
+
+ tex_status, tex_stdout = run_command(tex_call)
+ if tex_status:
+ progress("Warning: %s had problems compiling %s" \
+ % (os.path.basename(tex), tex_file))
+ return tex_status, tex_stdout
+
+
+def string_in_file(string, infile):
+ if not os.path.isfile(infile):
+ return False
+ f = open(infile, 'rb')
+ for line in f.readlines():
+ if string.encode() in line:
+ f.close()
+ return True
+ f.close()
+ return False
+
+
+# Returns a list of indexes of pages giving errors extracted from the latex log
+def check_latex_log(log_file):
+
+ error_re = re.compile(b"^! ")
+ snippet_re = re.compile(b"^Preview: Snippet ")
+ data_re = re.compile(b"([0-9]+) ([0-9]+) ([0-9]+) ([0-9]+)")
+
+ found_error = False
+ error_pages = []