#include "Floating.h"
#include "FloatList.h"
#include "Language.h"
+#include "LaTeXFonts.h"
#include "LaTeXPackages.h"
#include "Layout.h"
#include "Lexer.h"
#include "support/filetools.h"
#include "support/gettext.h"
#include "support/lstrings.h"
+#include "support/regex.h"
#include <algorithm>
"\\dvipost{osend color pop}\n"
"\\dvipost{cbstart color push Blue}\n"
"\\dvipost{cbend color pop}\n"
- "\\newcommand{\\lyxadded}[3]{\\changestart#3\\changeend}\n"
- "\\newcommand{\\lyxdeleted}[3]{%\n"
+ "\\DeclareRobustCommand{\\lyxadded}[3]{\\changestart#3\\changeend}\n"
+ "\\DeclareRobustCommand{\\lyxdeleted}[3]{%\n"
"\\changestart\\overstrikeon#3\\overstrikeoff\\changeend}\n");
static docstring const changetracking_xcolor_ulem_def = from_ascii(
"%% Change tracking with ulem\n"
- "\\newcommand{\\lyxadded}[3]{{\\color{lyxadded}{}#3}}\n"
- "\\newcommand{\\lyxdeleted}[3]{{\\color{lyxdeleted}\\sout{#3}}}\n");
+ "\\DeclareRobustCommand{\\lyxadded}[3]{{\\color{lyxadded}{}#3}}\n"
+ "\\DeclareRobustCommand{\\lyxdeleted}[3]{{\\color{lyxdeleted}\\sout{#3}}}\n");
static docstring const changetracking_xcolor_ulem_hyperref_def = from_ascii(
"%% Change tracking with ulem\n"
- "\\newcommand{\\lyxadded}[3]{{\\texorpdfstring{\\color{lyxadded}{}}{}#3}}\n"
- "\\newcommand{\\lyxdeleted}[3]{{\\texorpdfstring{\\color{lyxdeleted}\\sout{#3}}{}}}\n");
+ "\\DeclareRobustCommand{\\lyxadded}[3]{{\\texorpdfstring{\\color{lyxadded}{}}{}#3}}\n"
+ "\\DeclareRobustCommand{\\lyxdeleted}[3]{{\\texorpdfstring{\\color{lyxdeleted}\\sout{#3}}{}}}\n");
static docstring const changetracking_none_def = from_ascii(
"\\newcommand{\\lyxadded}[3]{#3}\n"
" {\\def\\RSlemtxt{lemma~}\\newref{lem}{name = \\RSlemtxt}}\n"
" {}\n");
+// Make sure the columns are also outputed as rtl
+static docstring const rtloutputdblcol_def = from_ascii(
+ "\\def\\@outputdblcol{%\n"
+ " \\if@firstcolumn\n"
+ " \\global \\@firstcolumnfalse\n"
+ " \\global \\setbox\\@leftcolumn \\box\\@outputbox\n"
+ " \\else\n"
+ " \\global \\@firstcolumntrue\n"
+ " \\setbox\\@outputbox \\vbox {%\n"
+ " \\hb@xt@\\textwidth {%\n"
+ " \\kern\\textwidth \\kern-\\columnwidth %**\n"
+ " \\hb@xt@\\columnwidth {%\n"
+ " \\box\\@leftcolumn \\hss}%\n"
+ " \\kern-\\textwidth %**\n"
+ " \\hfil\n"
+ " {\\normalcolor\\vrule \\@width\\columnseprule}%\n"
+ " \\hfil\n"
+ " \\kern-\\textwidth %**\n"
+ " \\hb@xt@\\columnwidth {%\n"
+ " \\box\\@outputbox \\hss}%\n"
+ " \\kern-\\columnwidth \\kern\\textwidth %**\n"
+ " }%\n"
+ " }%\n"
+ " \\@combinedblfloats\n"
+ " \\@outputpage\n"
+ " \\begingroup\n"
+ " \\@dblfloatplacement\n"
+ " \\@startdblcolumn\n"
+ " \\@whilesw\\if@fcolmade \\fi\n"
+ " {\\@outputpage\n"
+ " \\@startdblcolumn}%\n"
+ " \\endgroup\n"
+ " \\fi\n"
+ "}\n"
+ "\\@mparswitchtrue\n");
+
+
+/////////////////////////////////////////////////////////////////////
+//
+// LyXHTML strings
+//
+/////////////////////////////////////////////////////////////////////
+
+static docstring const lyxnoun_style = from_ascii(
+ "dfn.lyxnoun {\n"
+ " font-variant: small-caps;\n"
+ "}\n");
+
+
+// this is how it normally renders, but it might not always do so.
+static docstring const lyxstrikeout_style = from_ascii(
+ "del.strikeout {\n"
+ " text-decoration: line-through;\n"
+ "}\n");
+
/////////////////////////////////////////////////////////////////////
//
{}
-bool LaTeXFeatures::useBabel() const
+LaTeXFeatures::LangPackage LaTeXFeatures::langPackage() const
{
- if (usePolyglossia())
- return false;
- if (bufferParams().lang_package == "default")
- return (lyxrc.language_package_selection != LyXRC::LP_NONE)
- || (bufferParams().language->lang() != lyxrc.default_language
- && !bufferParams().language->babel().empty())
- || this->hasLanguages();
- return (bufferParams().lang_package != "none")
- || (bufferParams().language->lang() != lyxrc.default_language
- && !bufferParams().language->babel().empty())
- || this->hasLanguages();
-}
+ string const local_lp = bufferParams().lang_package;
+
+ // Locally, custom is just stored as a string
+ // in bufferParams().lang_package.
+ if (local_lp != "auto"
+ && local_lp != "babel"
+ && local_lp != "default"
+ && local_lp != "none")
+ return LANG_PACK_CUSTOM;
+
+ if (local_lp == "none")
+ return LANG_PACK_NONE;
+
+ /* If "auto" is selected, we load polyglossia if required,
+ * else we select babel.
+ * If babel is selected (either directly or via the "auto"
+ * mechanism), we really do only require it if we have
+ * a language that needs it.
+ */
+ bool const polyglossia_required =
+ isRequired("polyglossia")
+ && isAvailable("polyglossia")
+ && !isProvided("babel")
+ && this->hasOnlyPolyglossiaLanguages();
+ bool const babel_required =
+ !bufferParams().language->babel().empty()
+ || !this->getBabelLanguages().empty();
+
+ if (local_lp == "auto") {
+ // polyglossia requirement has priority over babel
+ if (polyglossia_required)
+ return LANG_PACK_POLYGLOSSIA;
+ else if (babel_required)
+ return LANG_PACK_BABEL;
+ }
+ if (local_lp == "babel") {
+ if (babel_required)
+ return LANG_PACK_BABEL;
+ }
-bool LaTeXFeatures::usePolyglossia() const
-{
- if (bufferParams().lang_package == "default")
- return (lyxrc.language_package_selection == LyXRC::LP_AUTO)
- && isRequired("polyglossia")
- && isAvailable("polyglossia")
- && !params_.documentClass().provides("babel")
- && this->hasPolyglossiaLanguages();
- return (bufferParams().lang_package == "auto")
- && isRequired("polyglossia")
- && isAvailable("polyglossia")
- && !params_.documentClass().provides("babel")
- && this->hasPolyglossiaLanguages();
+ if (local_lp == "default") {
+ switch (lyxrc.language_package_selection) {
+ case LyXRC::LP_AUTO:
+ // polyglossia requirement has priority over babel
+ if (polyglossia_required)
+ return LANG_PACK_POLYGLOSSIA;
+ else if (babel_required)
+ return LANG_PACK_BABEL;
+ break;
+ case LyXRC::LP_BABEL:
+ if (babel_required)
+ return LANG_PACK_BABEL;
+ break;
+ case LyXRC::LP_CUSTOM:
+ return LANG_PACK_CUSTOM;
+ case LyXRC::LP_NONE:
+ return LANG_PACK_NONE;
+ }
+ }
+
+ return LANG_PACK_NONE;
}
void LaTeXFeatures::useLayout(docstring const & layoutname)
{
// Some code to avoid loops in dependency definition
+ // FIXME THREAD
static int level = 0;
const int maxlevel = 30;
if (level > maxlevel) {
}
+bool LaTeXFeatures::isProvided(string const & name) const
+{
+ if (params_.useNonTeXFonts)
+ return params_.documentClass().provides(name);
+
+ bool const ot1 = (params_.font_encoding() == "default"
+ || params_.font_encoding() == "OT1");
+ bool const complete = (params_.fonts_sans == "default")
+ && (params_.fonts_typewriter == "default");
+ bool const nomath = (params_.fonts_math == "default");
+ return params_.documentClass().provides(name)
+ || theLaTeXFonts().getLaTeXFont(
+ from_ascii(params_.fonts_roman)).provides(name, ot1,
+ complete,
+ nomath)
+ || theLaTeXFonts().getLaTeXFont(
+ from_ascii(params_.fonts_sans)).provides(name, ot1,
+ complete,
+ nomath)
+ || theLaTeXFonts().getLaTeXFont(
+ from_ascii(params_.fonts_typewriter)).provides(name, ot1,
+ complete,
+ nomath)
+ || theLaTeXFonts().getLaTeXFont(
+ from_ascii(params_.fonts_math)).provides(name, ot1,
+ complete,
+ nomath);
+}
+
+
bool LaTeXFeatures::mustProvide(string const & name) const
{
- return isRequired(name) && !params_.documentClass().provides(name);
+ return isRequired(name) && !isProvided(name);
}
void LaTeXFeatures::useLanguage(Language const * lang)
{
- if (!lang->babel().empty())
+ if (!lang->babel().empty() || !lang->polyglossia().empty())
UsedLanguages_.insert(lang);
- if (lang->lang() == "vietnamese")
- require("vietnamese");
+ if (!lang->requires().empty())
+ require(lang->requires());
// CJK languages do not have a babel name.
// They use the CJK package
if (lang->encoding()->package() == Encoding::CJK)
}
-bool LaTeXFeatures::hasPolyglossiaLanguages() const
+bool LaTeXFeatures::hasOnlyPolyglossiaLanguages() const
{
+ // first the main language
+ if (params_.language->polyglossia().empty())
+ return false;
+ // now the secondary languages
LanguageList::const_iterator const begin = UsedLanguages_.begin();
for (LanguageList::const_iterator cit = begin;
cit != UsedLanguages_.end();
}
-string LaTeXFeatures::getLanguages() const
+bool LaTeXFeatures::hasPolyglossiaExclusiveLanguages() const
+{
+ // first the main language
+ if (params_.language->isPolyglossiaExclusive())
+ return true;
+ // now the secondary languages
+ LanguageList::const_iterator const begin = UsedLanguages_.begin();
+ for (LanguageList::const_iterator cit = begin;
+ cit != UsedLanguages_.end();
+ ++cit) {
+ if ((*cit)->isPolyglossiaExclusive())
+ return true;
+ }
+ return false;
+}
+
+
+string LaTeXFeatures::getBabelLanguages() const
{
ostringstream languages;
+ bool first = true;
LanguageList::const_iterator const begin = UsedLanguages_.begin();
for (LanguageList::const_iterator cit = begin;
cit != UsedLanguages_.end();
++cit) {
- if (cit != begin)
+ if ((*cit)->babel().empty())
+ continue;
+ if (!first)
languages << ',';
+ else
+ first = false;
languages << (*cit)->babel();
}
return languages.str();
"fancybox",
"calc",
"units",
- "tipa",
- "tipx",
"framed",
"soul",
"textcomp",
"mathrsfs",
"mathabx",
"mathtools",
+ // "cancel",
"ascii",
"url",
"covington",
"pdfpages",
"amscd",
"slashed",
+ "multicol",
"multirow",
- "tfrupee"
+ "tfrupee",
+ "shapepar",
+ "rsphrase",
+ "algorithm2e"
+};
+
+char const * bibliofeatures[] = {
+ // Known bibliography packages (will be loaded before natbib)
+ "achicago",
+ "apacite",
+ "apalike",
+ "astron",
+ "authordate1-4",
+ "babelbib",
+ "bibgerm",
+ "chicago",
+ "chscite",
+ "harvard",
+ "mslapa",
+ "named"
};
+int const nb_bibliofeatures = sizeof(bibliofeatures) / sizeof(char const *);
+
int const nb_simplefeatures = sizeof(simplefeatures) / sizeof(char const *);
}
string const LaTeXFeatures::getPackages() const
{
ostringstream packages;
- DocumentClass const & tclass = params_.documentClass();
// FIXME: currently, we can only load packages and macros known
// to LyX.
// also unknown packages can be requested. They are silently
// swallowed now. We should change this eventually.
- //
+ // Output all the package option stuff we have been asked to do.
+ map<string, string>::const_iterator it =
+ params_.documentClass().packageOptions().begin();
+ map<string, string>::const_iterator en =
+ params_.documentClass().packageOptions().end();
+ for (; it != en; ++it)
+ if (mustProvide(it->first))
+ packages << "\\PassOptionsToPackage{" << it->second << "}"
+ << "{" << it->first << "}\n";
+
// These are all the 'simple' includes. i.e
// packages which we just \usepackage{package}
- //
for (int i = 0; i < nb_simplefeatures; ++i) {
if (mustProvide(simplefeatures[i]))
- packages << "\\usepackage{"
- << simplefeatures[i] << "}\n";
+ packages << "\\usepackage{" << simplefeatures[i] << "}\n";
}
- //
// The rest of these packages are somewhat more complicated
// than those above.
- //
- // if fontspec is used, AMS packages have to be loaded before
- // fontspec (in BufferParams)
+ // The tipa package and its extensions (tipx, tone) must not
+ // be loaded with non-TeX fonts, since fontspec includes the
+ // respective macros
+ if (mustProvide("tipa") && !params_.useNonTeXFonts)
+ packages << "\\usepackage{tipa}\n";
+ if (mustProvide("tipx") && !params_.useNonTeXFonts)
+ packages << "\\usepackage{tipx}\n";
+ if (mustProvide("tone") && !params_.useNonTeXFonts)
+ packages << "\\usepackage{tone}\n";
+
+ // if fontspec or newtxmath is used, AMS packages have to be loaded
+ // before fontspec (in BufferParams)
string const amsPackages = loadAMSPackages();
- if (!params_.useNonTeXFonts && !amsPackages.empty())
+ bool const ot1 = (params_.font_encoding() == "default" || params_.font_encoding() == "OT1");
+ bool const use_newtxmath =
+ theLaTeXFonts().getLaTeXFont(from_ascii(params_.fonts_math)).getUsedPackage(
+ ot1, false, false) == "newtxmath";
+
+ if (!params_.useNonTeXFonts && !use_newtxmath && !amsPackages.empty())
packages << amsPackages;
// fixltx2e must be loaded after amsthm, since amsthm produces an error with
if (mustProvide("fixltx2e"))
packages << "\\usepackage{fixltx2e}\n";
+ if (mustProvide("cancel") &&
+ params_.use_package("cancel") != BufferParams::package_off)
+ packages << "\\usepackage{cancel}\n";
// wasysym is a simple feature, but it must be after amsmath if both
// are used
// wasysym redefines some integrals (e.g. iint) from amsmath. That
params_.use_package("yhmath") != BufferParams::package_off)
packages << "\\usepackage{yhmath}\n";
+ // stmaryrd must be loaded after amsmath
+ if (mustProvide("stmaryrd") &&
+ params_.use_package("stmaryrd") != BufferParams::package_off)
+ packages << "\\usepackage{stmaryrd}\n";
+
+ if (mustProvide("stackrel") &&
+ params_.use_package("stackrel") != BufferParams::package_off)
+ packages << "\\usepackage{stackrel}\n";
+
if (mustProvide("undertilde") &&
params_.use_package("undertilde") != BufferParams::package_off)
packages << "\\usepackage{undertilde}\n";
// makeidx.sty
if (isRequired("makeidx") || isRequired("splitidx")) {
- if (!tclass.provides("makeidx") && !isRequired("splitidx"))
+ if (!isProvided("makeidx") && !isRequired("splitidx"))
packages << "\\usepackage{makeidx}\n";
- if (!tclass.provides("splitidx") && isRequired("splitidx"))
+ if (mustProvide("splitidx"))
packages << "\\usepackage{splitidx}\n";
packages << "\\makeindex\n";
}
packages << "\\usepackage[ps,mover]{lyxskak}\n";
// setspace.sty
- if (mustProvide("setspace") && !tclass.provides("SetSpace"))
+ if (mustProvide("setspace") && !isProvided("SetSpace"))
packages << "\\usepackage{setspace}\n";
- // esint must be after amsmath and wasysym, since it will redeclare
- // inconsistent integral symbols
+ // we need to assure that mhchem is loaded before esint
+ // because esint must be loaded AFTER amslatex and mhchem loads amlatex
+ // (this info is from the author of mhchem from June 2013)
+ if (mustProvide("mhchem") &&
+ params_.use_package("mhchem") != BufferParams::package_off)
+ packages << "\\PassOptionsToPackage{version=3}{mhchem}\n"
+ "\\usepackage{mhchem}\n";
+
+ // esint must be after amsmath (and packages requiring amsmath, like mhchem)
+ // and wasysym, since it will redeclare inconsistent integral symbols
if (mustProvide("esint") &&
params_.use_package("esint") != BufferParams::package_off)
packages << "\\usepackage{esint}\n";
+ // Known bibliography packages (simple \usepackage{package})
+ for (int i = 0; i < nb_bibliofeatures; ++i) {
+ if (mustProvide(bibliofeatures[i]))
+ packages << "\\usepackage{"
+ << bibliofeatures[i] << "}\n";
+ }
+
+ // Compatibility between achicago and natbib
+ if (mustProvide("achicago") && mustProvide("natbib"))
+ packages << "\\let\\achicagobib\\thebibliography\n";
+
// natbib.sty
// Some classes load natbib themselves, but still allow (or even require)
// plain numeric citations (ReVTeX is such a case, see bug 5182).
// This special case is indicated by the "natbib-internal" key.
- if (mustProvide("natbib") && !tclass.provides("natbib-internal")) {
+ if (mustProvide("natbib") && !isProvided("natbib-internal")) {
packages << "\\usepackage[";
if (params_.citeEngineType() == ENGINE_TYPE_NUMERICAL)
packages << "numbers";
packages << "]{natbib}\n";
}
+ // Compatibility between achicago and natbib
+ if (mustProvide("achicago") && mustProvide("natbib")) {
+ packages << "\\let\\thebibliography\\achicagobib\n";
+ packages << "\\let\\SCcite\\astroncite\n";
+ packages << "\\let\\UnexpandableProtect\\protect\n";
+ }
+
// jurabib -- we need version 0.6 at least.
if (mustProvide("jurabib"))
packages << "\\usepackage{jurabib}[2004/01/25]\n";
+ // opcit -- we pass custombst as we output \bibliographystyle ourselves
+ if (mustProvide("opcit")) {
+ if (isRequired("hyperref"))
+ packages << "\\usepackage[custombst,hyperref]{opcit}\n";
+ else
+ packages << "\\usepackage[custombst]{opcit}\n";
+ }
+
// xargs -- we need version 1.09 at least
if (mustProvide("xargs"))
packages << "\\usepackage{xargs}[2008/03/08]\n";
- // bibtopic -- the dot provides the aux file naming which
- // LyX can detect.
- if (mustProvide("bibtopic"))
- packages << "\\usepackage[dot]{bibtopic}\n";
-
if (mustProvide("xy"))
packages << "\\usepackage[all]{xy}\n";
packages << "\\PassOptionsToPackage{normalem}{ulem}\n"
"\\usepackage{ulem}\n";
- if (mustProvide("mhchem") &&
- params_.use_package("mhchem") != BufferParams::package_off)
- packages << "\\PassOptionsToPackage{version=3}{mhchem}\n"
- "\\usepackage{mhchem}\n";
-
if (mustProvide("nomencl")) {
// Make it work with the new and old version of the package,
// but don't use the compatibility option since it is
if (mustProvide("ct-none"))
macros << changetracking_none_def;
+ if (mustProvide("rtloutputdblcol"))
+ macros << rtloutputdblcol_def;
+
return macros.str();
}
if (!params_.language->babel_presettings().empty())
tmp << params_.language->babel_presettings() << '\n';
- return tmp.str();
+ if (!contains(tmp.str(), '@'))
+ return tmp.str();
+
+ return "\\makeatletter\n" + tmp.str() + "\\makeatother\n";
}
if (!params_.language->babel_postsettings().empty())
tmp << params_.language->babel_postsettings() << '\n';
- return tmp.str();
+ if (!contains(tmp.str(), '@'))
+ return tmp.str();
+
+ return "\\makeatletter\n" + tmp.str() + "\\makeatother\n";
}
}
if (mustProvide("amssymb")
- || params_.use_package("amsmath") == BufferParams::package_on)
+ && params_.use_package("amssymb") != BufferParams::package_off)
tmp << "\\usepackage{amssymb}\n";
return tmp.str();
DocumentClass const & tclass = params_.documentClass();
odocstringstream tcpreamble;
+ if (mustProvide("noun"))
+ tcpreamble << lyxnoun_style;
+ // this isn't exact, but it won't hurt that much if it
+ // wasn't for this.
+ if (mustProvide("ulem"))
+ tcpreamble << lyxstrikeout_style;
+
tcpreamble << tclass.htmlstyles();
list<docstring>::const_iterator cit = usedLayouts_.begin();
namespace {
+
docstring const getFloatI18nPreamble(docstring const & type,
docstring const & name, Language const * lang,
- Encoding const & enc, bool const polyglossia,
- bool const unicode)
+ Encoding const & enc, bool const polyglossia)
{
+ // Check whether name can be encoded in the buffer encoding
+ bool encodable = true;
+ for (size_t i = 0; i < name.size(); ++i) {
+ if (!enc.encodable(name[i])) {
+ encodable = false;
+ break;
+ }
+ }
+
docstring const language = polyglossia ? from_ascii(lang->polyglossia())
: from_ascii(lang->babel());
docstring const langenc = from_ascii(lang->encoding()->iconvName());
docstring const bufenc = from_ascii(enc.iconvName());
docstring const s1 = docstring(1, 0xF0000);
docstring const s2 = docstring(1, 0xF0001);
- docstring const translated = (unicode || langenc == bufenc) ? name
+ docstring const translated = encodable ? name
: from_ascii("\\inputencoding{") + texenc + from_ascii("}")
+ s1 + langenc + s2 + name + s1 + bufenc + s2;
<< "{\\renewcommand{\\" << type << "name}{" << translated << "}}\n";
return os.str();
}
+
+
+docstring const i18npreamble(docstring const & templ, Language const * lang,
+ Encoding const & enc, bool const polyglossia)
+{
+ if (templ.empty())
+ return templ;
+
+ string preamble = polyglossia ?
+ subst(to_utf8(templ), "$$lang", lang->polyglossia()) :
+ subst(to_utf8(templ), "$$lang", lang->babel());
+
+ string const langenc = lang->encoding()->iconvName();
+ string const texenc = lang->encoding()->latexName();
+ string const bufenc = enc.iconvName();
+ // First and second character of plane 15 (Private Use Area)
+ string const s1 = "\xf3\xb0\x80\x80"; // U+F0000
+ string const s2 = "\xf3\xb0\x80\x81"; // U+F0001
+ // FIXME UNICODE
+ // lyx::regex is not unicode-safe.
+ // Should use QRegExp or (boost::u32regex, but that requires ICU)
+ static regex const reg("_\\(([^\\)]+)\\)");
+ smatch sub;
+ while (regex_search(preamble, sub, reg)) {
+ string const key = sub.str(1);
+ docstring const name = lang->translateLayout(key);
+ // Check whether name can be encoded in the buffer encoding
+ bool encodable = true;
+ for (size_t i = 0; i < name.size(); ++i) {
+ if (!enc.encodable(name[i])) {
+ encodable = false;
+ break;
+ }
+ }
+ string const translated = encodable ? to_utf8(name)
+ : "\\inputencoding{" + texenc + "}"
+ + s1 + langenc + s2 + to_utf8(name)
+ + s1 + bufenc + s2;
+ preamble = subst(preamble, sub.str(), translated);
+ }
+ return from_utf8(preamble);
+}
+
}
list<docstring>::const_iterator end = usedLayouts_.end();
for (; cit != end; ++cit) {
// language dependent commands (once per document)
- snippets.insert(tclass[*cit].langpreamble(buffer().language(),
+ snippets.insert(i18npreamble(tclass[*cit].langpreamble(),
+ buffer().language(),
buffer().params().encoding(),
- use_polyglossia,
- runparams().isFullUnicode()));
+ use_polyglossia));
// commands for language changing (for multilanguage documents)
if ((use_babel || use_polyglossia) && !UsedLanguages_.empty()) {
- snippets.insert(tclass[*cit].babelpreamble(
+ snippets.insert(i18npreamble(
+ tclass[*cit].babelpreamble(),
buffer().language(),
buffer().params().encoding(),
- use_polyglossia,
- runparams().isFullUnicode()));
+ use_polyglossia));
for (lang_it lit = lbeg; lit != lend; ++lit)
- snippets.insert(tclass[*cit].babelpreamble(
+ snippets.insert(i18npreamble(
+ tclass[*cit].babelpreamble(),
*lit,
buffer().params().encoding(),
- use_polyglossia,
- runparams().isFullUnicode()));
+ use_polyglossia));
}
}
if ((use_babel || use_polyglossia) && !UsedLanguages_.empty()) {
snippets.insert(getFloatI18nPreamble(
type, name, buffer().language(),
buffer().params().encoding(),
- use_polyglossia,
- runparams().isFullUnicode()));
+ use_polyglossia));
for (lang_it lit = lbeg; lit != lend; ++lit) {
string const code = (*lit)->code();
name = (*lit)->translateLayout(fl.name());
snippets.insert(getFloatI18nPreamble(
type, name, *lit,
buffer().params().encoding(),
- use_polyglossia,
- runparams().isFullUnicode()));
+ use_polyglossia));
}
}
}
+ cit = usedInsetLayouts_.begin();
+ end = usedInsetLayouts_.end();
+ TextClass::InsetLayouts const & ils = tclass.insetLayouts();
+ for (; cit != end; ++cit) {
+ TextClass::InsetLayouts::const_iterator it = ils.find(*cit);
+ if (it == ils.end())
+ continue;
+ // language dependent commands (once per document)
+ snippets.insert(i18npreamble(it->second.langpreamble(),
+ buffer().language(),
+ buffer().params().encoding(),
+ use_polyglossia));
+ // commands for language changing (for multilanguage documents)
+ if ((use_babel || use_polyglossia) && !UsedLanguages_.empty()) {
+ snippets.insert(i18npreamble(
+ it->second.babelpreamble(),
+ buffer().language(),
+ buffer().params().encoding(),
+ use_polyglossia));
+ for (lang_it lit = lbeg; lit != lend; ++lit)
+ snippets.insert(i18npreamble(
+ it->second.babelpreamble(),
+ *lit,
+ buffer().params().encoding(),
+ use_polyglossia));
+ }
+ }
+
odocstringstream tcpreamble;
set<docstring>::const_iterator const send = snippets.end();
set<docstring>::const_iterator it = snippets.begin();
// effect. (Lgb)
}
if (cit->second)
- os << "\n\\newsubfloat{" << from_ascii(fl.floattype()) << "}\n";
+ // The subfig package is loaded later
+ os << "\n\\AtBeginDocument{\\newsubfloat{" << from_ascii(fl.floattype()) << "}}\n";
}
}