# -*- coding: utf-8 -*-
# This file is part of lyx2lyx
-# -*- coding: utf-8 -*-
# Copyright (C) 2016 The LyX team
#
# This program is free software; you can redistribute it and/or
return
if get_value(document.header, "\\inputencoding", i) == "pt254":
document.header[i] = "\\inputencoding pt154"
-
+
def convert_ibranches(document):
' Add "inverted 0" to branch insets'
else:
document.warning("Malformed LyX document: No selection indicator for branch " + branch)
selected = 1
-
+
# the value tells us whether the branch is selected
ourbranches[document.header[i][8:].strip()] = selected
i += 1
i += 1
# now we need to add the new branches to the header
- for old, new in ibranches.iteritems():
+ for old, new in ibranches.items():
i = find_token(document.header, "\\branch " + old, 0)
if i == -1:
document.warning("Can't find branch %s even though we found it before!" % (old))
replace = "'"
document.body[k:l+1] = [replace]
i = l
-
+
def revert_iopart(document):
" Input new styles via local layout "
def revert_crimson(document):
- " Revert native Cochineal/Crimson font definition to LaTeX "
+ " Revert native Cochineal/Crimson font definition to LaTeX "
if find_token(document.header, "\\use_non_tex_fonts false", 0) != -1:
preamble = ""
def revert_cochinealmath(document):
- " Revert cochineal newtxmath definitions to LaTeX "
+ " Revert cochineal newtxmath definitions to LaTeX "
- if find_token(document.header, "\\use_non_tex_fonts false", 0) != -1:
+ if find_token(document.header, "\\use_non_tex_fonts false", 0) != -1:
i = find_token(document.header, "\\font_math \"cochineal-ntxm\"", 0)
if i != -1:
add_to_preamble(document, "\\usepackage[cochineal]{newtxmath}")
"end{btUnit}"
"\\end_layout", "", "\\end_inset", "", "",
"\\end_layout", ""]
-
+
+
+def revert_chapterbib(document):
+ " Revert chapterbib support "
+
+ # 1. Get cite engine
+ engine = "basic"
+ i = find_token(document.header, "\\cite_engine", 0)
+ if i == -1:
+ document.warning("Malformed document! Missing \\cite_engine")
+ else:
+ engine = get_value(document.header, "\\cite_engine", i)
+
+ # 2. Do we use biblatex?
+ biblatex = False
+ if engine in ["biblatex", "biblatex-natbib"]:
+ biblatex = True
+
+ # 3. Store multibib document header value
+ multibib = ""
+ i = find_token(document.header, "\\multibib", 0)
+ if i != -1:
+ multibib = get_value(document.header, "\\multibib", i)
+
+ if not multibib or multibib != "child":
+ # nothing to do
+ return
+
+ # 4. remove multibib header
+ del document.header[i]
+
+ # 5. Biblatex
+ if biblatex:
+ # find include insets
+ i = 0
+ while (True):
+ i = find_token(document.body, "\\begin_inset CommandInset include", i)
+ if i == -1:
+ break
+ j = find_end_of_inset(document.body, i)
+ if j == -1:
+ document.warning("Can't find end of bibtex inset at line %d!!" %(i))
+ i += 1
+ continue
+ parent = get_containing_layout(document.body, i)
+ parbeg = parent[1]
+
+ # Insert ERT \\newrefsection before inset
+ beg = ["\\begin_layout Standard",
+ "\\begin_inset ERT", "status open", "",
+ "\\begin_layout Plain Layout", "", "",
+ "\\backslash",
+ "newrefsection"
+ "\\end_layout", "", "\\end_inset", "", "",
+ "\\end_layout", ""]
+ document.body[parbeg-1:parbeg-1] = beg
+ j += len(beg)
+ i = j + 1
+ return
+
+ # 6. Bibtex/Bibtopic
+ i = find_token(document.header, "\\use_bibtopic", 0)
+ if i == -1:
+ # this should not happen
+ document.warning("Malformed LyX document! No \\use_bibtopic header found!")
+ return
+ if get_value(document.header, "\\use_bibtopic", i) == "true":
+ # find include insets
+ i = 0
+ while (True):
+ i = find_token(document.body, "\\begin_inset CommandInset include", i)
+ if i == -1:
+ break
+ j = find_end_of_inset(document.body, i)
+ if j == -1:
+ document.warning("Can't find end of bibtex inset at line %d!!" %(i))
+ i += 1
+ continue
+ parent = get_containing_layout(document.body, i)
+ parbeg = parent[1]
+ parend = parent[2]
+
+ # Insert wrap inset into \\begin{btUnit}...\\end{btUnit}
+ beg = ["\\begin_layout Standard",
+ "\\begin_inset ERT", "status open", "",
+ "\\begin_layout Plain Layout", "", "",
+ "\\backslash",
+ "begin{btUnit}"
+ "\\end_layout", "", "\\end_inset", "", "",
+ "\\end_layout", ""]
+ end = ["\\begin_layout Standard",
+ "\\begin_inset ERT", "status open", "",
+ "\\begin_layout Plain Layout", "", "",
+ "\\backslash",
+ "end{btUnit}"
+ "\\end_layout", "", "\\end_inset", "", "",
+ "\\end_layout", ""]
+ document.body[parend+1:parend+1] = end
+ document.body[parbeg-1:parbeg-1] = beg
+ j += len(beg) + len(end)
+ i = j + 1
+ return
+
+ # 7. Chapterbib proper
+ add_to_preamble(document, ["\\usepackage{chapterbib}"])
+
+
+def convert_dashligatures(document):
+ " Remove a zero-length space (U+200B) after en- and em-dashes. "
+
+ i = find_token(document.header, "\\use_microtype", 0)
+ if i != -1:
+ if document.initial_format > 474 and document.initial_format < 509:
+ # This was created by LyX 2.2
+ document.header[i+1:i+1] = ["\\use_dash_ligatures false"]
+ else:
+ # This was created by LyX 2.1 or earlier
+ document.header[i+1:i+1] = ["\\use_dash_ligatures true"]
+
+ i = 0
+ while i < len(document.body):
+ words = document.body[i].split()
+ # Skip some document parts where dashes are not converted
+ if len(words) > 1 and words[0] == "\\begin_inset" and \
+ words[1] in ["CommandInset", "ERT", "External", "Formula", \
+ "FormulaMacro", "Graphics", "IPA", "listings"]:
+ j = find_end_of_inset(document.body, i)
+ if j == -1:
+ document.warning("Malformed LyX document: Can't find end of " \
+ + words[1] + " inset at line " + str(i))
+ i += 1
+ else:
+ i = j
+ continue
+ if len(words) > 0 and words[0] in ["\\leftindent", \
+ "\\paragraph_spacing", "\\align", "\\labelwidthstring"]:
+ i += 1
+ continue
+
+ start = 0
+ while True:
+ j = document.body[i].find(u"\u2013", start) # en-dash
+ k = document.body[i].find(u"\u2014", start) # em-dash
+ if j == -1 and k == -1:
+ break
+ if j == -1 or (k != -1 and k < j):
+ j = k
+ after = document.body[i][j+1:]
+ if after.startswith(u"\u200B"):
+ document.body[i] = document.body[i][:j+1] + after[1:]
+ else:
+ if len(after) == 0 and document.body[i+1].startswith(u"\u200B"):
+ document.body[i+1] = document.body[i+1][1:]
+ break
+ start = j+1
+ i += 1
+
+
+def revert_dashligatures(document):
+ " Remove font ligature settings for en- and em-dashes. "
+ i = find_token(document.header, "\\use_dash_ligatures", 0)
+ if i == -1:
+ return
+ use_dash_ligatures = get_bool_value(document.header, "\\use_dash_ligatures", i)
+ del document.header[i]
+ use_non_tex_fonts = False
+ i = find_token(document.header, "\\use_non_tex_fonts", 0)
+ if i != -1:
+ use_non_tex_fonts = get_bool_value(document.header, "\\use_non_tex_fonts", i)
+ if not use_dash_ligatures or use_non_tex_fonts:
+ return
+
+ # Add a zero-length space (U+200B) after en- and em-dashes
+ i = 0
+ while i < len(document.body):
+ words = document.body[i].split()
+ # Skip some document parts where dashes are not converted
+ if len(words) > 1 and words[0] == "\\begin_inset" and \
+ words[1] in ["CommandInset", "ERT", "External", "Formula", \
+ "FormulaMacro", "Graphics", "IPA", "listings"]:
+ j = find_end_of_inset(document.body, i)
+ if j == -1:
+ document.warning("Malformed LyX document: Can't find end of " \
+ + words[1] + " inset at line " + str(i))
+ i += 1
+ else:
+ i = j
+ continue
+ if len(words) > 0 and words[0] in ["\\leftindent", \
+ "\\paragraph_spacing", "\\align", "\\labelwidthstring"]:
+ i += 1
+ continue
+
+ start = 0
+ while True:
+ j = document.body[i].find(u"\u2013", start) # en-dash
+ k = document.body[i].find(u"\u2014", start) # em-dash
+ if j == -1 and k == -1:
+ break
+ if j == -1 or (k != -1 and k < j):
+ j = k
+ after = document.body[i][j+1:]
+ document.body[i] = document.body[i][:j+1] + u"\u200B" + after
+ start = j+1
+ i += 1
+
+
+def revert_noto(document):
+ " Revert Noto font definitions to LaTeX "
+
+ if find_token(document.header, "\\use_non_tex_fonts false", 0) != -1:
+ preamble = ""
+ i = find_token(document.header, "\\font_roman \"NotoSerif-TLF\"", 0)
+ if i != -1:
+ add_to_preamble(document, ["\\renewcommand{\\rmdefault}{NotoSerif-TLF}"])
+ document.header[i] = document.header[i].replace("NotoSerif-TLF", "default")
+ i = find_token(document.header, "\\font_sans \"NotoSans-TLF\"", 0)
+ if i != -1:
+ add_to_preamble(document, ["\\renewcommand{\\sfdefault}{NotoSans-TLF}"])
+ document.header[i] = document.header[i].replace("NotoSans-TLF", "default")
+ i = find_token(document.header, "\\font_typewriter \"NotoMono-TLF\"", 0)
+ if i != -1:
+ add_to_preamble(document, ["\\renewcommand{\\ttdefault}{NotoMono-TLF}"])
+ document.header[i] = document.header[i].replace("NotoMono-TLF", "default")
+
##
# Conversion hub
[531, []],
[532, [convert_literalparam]],
[533, []],
+ [534, []],
+ [535, [convert_dashligatures]],
+ [536, []]
]
revert = [
+ [535, [revert_noto]],
+ [534, [revert_dashligatures]],
+ [533, [revert_chapterbib]],
[532, [revert_multibib]],
[531, [revert_literalparam]],
[530, [revert_qualicites]],