return false;
}
-} // anon namespace
+} // namespace
class BufferParams::Impl
papersize = PAPER_DEFAULT;
orientation = ORIENTATION_PORTRAIT;
use_geometry = false;
- biblio_style = "plain";
+ biblio_style = string();
use_bibtopic = false;
multibib = string();
use_indices = false;
secnumdepth = 3;
tocdepth = 3;
language = default_language;
- fontenc = "global";
+ fontenc = "auto";
fonts_roman[0] = "default";
fonts_roman[1] = "default";
fonts_sans[0] = "default";
html_css_as_file = false;
display_pixel_ratio = 1.0;
+ shell_escape = false;
output_sync = false;
use_refstyle = true;
use_minted = false;
return "\"" + str + "\"";
return str;
}
-}
+} // namespace
void BufferParams::writeFile(ostream & os, Buffer const * buf) const
// http://www.mail-archive.com/lyx-devel@lists.lyx.org/msg144349.html
os << from_ascii(features.getColorOptions());
- // If we use hyperref, jurabib, japanese, varioref or vietnamese,
+ // If we use hyperref, jurabib, japanese or varioref,
// we have to call babel before
if (use_babel
&& (features.isRequired("jurabib")
|| features.isRequired("hyperref")
|| features.isRequired("varioref")
- || features.isRequired("vietnamese")
|| features.isRequired("japanese"))) {
os << features.getBabelPresettings();
// FIXME UNICODE
OutputParams tmp_params = features.runparams();
pdfoptions().writeLaTeX(tmp_params, os,
features.isProvided("hyperref"));
- // correctly break URLs with hyperref and dvi output
- if (features.runparams().flavor == OutputParams::LATEX
+ // correctly break URLs with hyperref and dvi/ps output
+ if (features.runparams().hyperref_driver == "dvips"
&& features.isAvailable("breakurl"))
os << "\\usepackage{breakurl}\n";
} else if (features.isRequired("nameref"))
// called after babel, though.
if (use_babel && !features.isRequired("jurabib")
&& !features.isRequired("hyperref")
- && !features.isRequired("varioref")
- && !features.isRequired("vietnamese")
+ && !features.isRequired("varioref")
&& !features.isRequired("japanese")) {
os << features.getBabelPresettings();
// FIXME UNICODE
// (the rest is obsoleted by the new TU encoding).
// It needs to be loaded at least after amsmath, amssymb,
// esint and the other packages that provide special glyphs
- if (features.mustProvide("tipa") && useNonTeXFonts) {
+ if (features.mustProvide("tipa") && useNonTeXFonts
+ && !features.isProvided("xunicode")) {
// The package officially only supports XeTeX, but also works
// with LuaTeX. Thus we work around its XeTeX test.
if (features.runparams().flavor != OutputParams::XETEX) {
}
// ... but before biblatex (see #7065)
- if (features.mustProvide("biblatex")) {
+ if (features.mustProvide("biblatex")
+ && !features.isProvided("biblatex-natbib")
+ && !features.isProvided("natbib-internal")
+ && !features.isProvided("natbib")
+ && !features.isProvided("jurabib")) {
string delim = "";
string opts;
os << "\\usepackage";
v.push_back("luatex");
v.push_back("dviluatex");
}
- } else
- v.push_back(buffmt);
+ } else {
+ string rbuffmt = buffmt;
+ // If we use an OutputFormat in Japanese docs,
+ // we need special format in order to get the path
+ // via pLaTeX (#8823)
+ if (documentClass().hasOutputFormat()
+ && encoding().package() == Encoding::japanese)
+ rbuffmt += "-ja";
+ v.push_back(rbuffmt);
+ }
v.push_back("xhtml");
v.push_back("text");
if (find(backs.begin(), backs.end(), dformat) == backs.end()) {
// Get shortest path to format
Graph::EdgePath path;
- for (vector<string>::const_iterator it = backs.begin();
- it != backs.end(); ++it) {
- Graph::EdgePath p = theConverters().getPath(*it, dformat);
+ for (auto const & bvar : backs) {
+ Graph::EdgePath p = theConverters().getPath(bvar, dformat);
if (!p.empty() && (path.empty() || p.size() < path.size())) {
path = p;
}
if (!default_output_format.empty()
&& default_output_format != "default")
return default_output_format;
- if (isDocBook()
- || encoding().package() == Encoding::japanese) {
+ if (isDocBook()) {
FormatList const & formats = exportableFormats(true);
if (formats.empty())
return string();
// return the first we find
return formats.front()->name();
}
+ if (encoding().package() == Encoding::japanese)
+ return lyxrc.default_platex_view_format;
if (useNonTeXFonts)
return lyxrc.default_otf_view_format;
return lyxrc.default_view_format;
string const BufferParams::main_font_encoding() const
{
- return font_encodings().empty() ? "default" : font_encodings().back();
+ if (font_encodings().empty()) {
+ if (ascii_lowercase(language->fontenc(*this)) == "none")
+ return "none";
+ return "default";
+ }
+ return font_encodings().back();
}
vector<string> const BufferParams::font_encodings() const
{
- string doc_fontenc = (fontenc == "global") ? lyxrc.fontenc : fontenc;
+ string doc_fontenc = (fontenc == "auto") ? string() : fontenc;
vector<string> fontencs;
// "default" means "no explicit font encoding"
if (doc_fontenc != "default") {
- fontencs = getVectorFromString(doc_fontenc);
- if (!language->fontenc().empty()
- && ascii_lowercase(language->fontenc()) != "none") {
- vector<string> fencs = getVectorFromString(language->fontenc());
- vector<string>::const_iterator fit = fencs.begin();
- for (; fit != fencs.end(); ++fit) {
- if (find(fontencs.begin(), fontencs.end(), *fit) == fontencs.end())
- fontencs.push_back(*fit);
+ if (!doc_fontenc.empty())
+ // If we have a custom setting, we use only that!
+ return getVectorFromString(doc_fontenc);
+ if (!language->fontenc(*this).empty()
+ && ascii_lowercase(language->fontenc(*this)) != "none") {
+ vector<string> fencs = getVectorFromString(language->fontenc(*this));
+ for (auto & fe : fencs) {
+ if (find(fontencs.begin(), fontencs.end(), fe) == fontencs.end())
+ fontencs.push_back(fe);
}
}
}
language->encoding()->package();
// Create list of inputenc options:
- set<string> encodings;
+ set<string> encoding_set;
// luainputenc fails with more than one encoding
if (!features.runparams().isFullUnicode()) // if we reach this point, this means LuaTeX with TeX fonts
// list all input encodings used in the document
- encodings = features.getEncodingSet(doc_encoding);
+ encoding_set = features.getEncodingSet(doc_encoding);
// If the "japanese" package (i.e. pLaTeX) is used,
// inputenc must be omitted.
// see http://www.mail-archive.com/lyx-devel@lists.lyx.org/msg129680.html
- if ((!encodings.empty() || package == Encoding::inputenc)
+ if ((!encoding_set.empty() || package == Encoding::inputenc)
&& !features.isRequired("japanese")
&& !features.isProvided("inputenc")) {
os << "\\usepackage[";
- set<string>::const_iterator it = encodings.begin();
- set<string>::const_iterator const end = encodings.end();
+ set<string>::const_iterator it = encoding_set.begin();
+ set<string>::const_iterator const end = encoding_set.end();
if (it != end) {
os << from_ascii(*it);
++it;
for (; it != end; ++it)
os << ',' << from_ascii(*it);
if (package == Encoding::inputenc) {
- if (!encodings.empty())
+ if (!encoding_set.empty())
os << ',';
os << from_ascii(doc_encoding);
}
string const & BufferParams::defaultBiblioStyle() const
{
+ if (!biblio_style.empty())
+ return biblio_style;
+
map<string, string> const & bs = documentClass().defaultBiblioStyle();
auto cit = bs.find(theCiteEnginesList.getTypeAsString(citeEngineType()));
if (cit != bs.end())