#include "support/filetools.h"
#include "support/gettext.h"
#include "support/lstrings.h"
+#include "support/regex.h"
#include <algorithm>
"mathrsfs",
"mathabx",
"mathtools",
+ "cancel",
"ascii",
"url",
"covington",
// This special case is indicated by the "natbib-internal" key.
if (mustProvide("natbib") && !tclass.provides("natbib-internal")) {
packages << "\\usepackage[";
- if (params_.citeEngine() == ENGINE_NATBIB_NUMERICAL)
+ if (params_.citeEngineType() == ENGINE_TYPE_NUMERICAL)
packages << "numbers";
else
packages << "authoryear";
packages << "\\PassOptionsToPackage{normalem}{ulem}\n"
"\\usepackage{ulem}\n";
- if (params_.use_package("mhchem") == BufferParams::package_on ||
- (mustProvide("mhchem") &&
- params_.use_package("mhchem") != BufferParams::package_off))
+ if (mustProvide("mhchem") &&
+ params_.use_package("mhchem") != BufferParams::package_off)
packages << "\\PassOptionsToPackage{version=3}{mhchem}\n"
"\\usepackage{mhchem}\n";
docstring const & name, Language const * lang,
Encoding const & enc, bool const polyglossia)
{
+ // Check whether name can be encoded in the buffer encoding
+ bool encodable = true;
+ for (size_t i = 0; i < name.size(); ++i) {
+ if (!enc.encodable(name[i])) {
+ encodable = false;
+ break;
+ }
+ }
+
docstring const language = polyglossia ? from_ascii(lang->polyglossia())
: from_ascii(lang->babel());
docstring const langenc = from_ascii(lang->encoding()->iconvName());
docstring const bufenc = from_ascii(enc.iconvName());
docstring const s1 = docstring(1, 0xF0000);
docstring const s2 = docstring(1, 0xF0001);
- docstring const translated = (langenc == bufenc) ? name
+ docstring const translated = encodable ? name
: from_ascii("\\inputencoding{") + texenc + from_ascii("}")
+ s1 + langenc + s2 + name + s1 + bufenc + s2;
<< "{\\renewcommand{\\" << type << "name}{" << translated << "}}\n";
return os.str();
}
+
+
+docstring const i18npreamble(docstring const & templ, Language const * lang,
+ Encoding const & enc, bool const polyglossia)
+{
+ if (templ.empty())
+ return templ;
+
+ string preamble = polyglossia ?
+ subst(to_utf8(templ), "$$lang", lang->polyglossia()) :
+ subst(to_utf8(templ), "$$lang", lang->babel());
+
+ string const langenc = lang->encoding()->iconvName();
+ string const texenc = lang->encoding()->latexName();
+ string const bufenc = enc.iconvName();
+ // First and second character of plane 15 (Private Use Area)
+ string const s1 = "\xf3\xb0\x80\x80"; // U+F0000
+ string const s2 = "\xf3\xb0\x80\x81"; // U+F0001
+ // FIXME UNICODE
+ // lyx::regex is not unicode-safe.
+ // Should use QRegExp or (boost::u32regex, but that requires ICU)
+ static regex const reg("_\\(([^\\)]+)\\)");
+ smatch sub;
+ while (regex_search(preamble, sub, reg)) {
+ string const key = sub.str(1);
+ docstring const name = lang->translateLayout(key);
+ // Check whether name can be encoded in the buffer encoding
+ bool encodable = true;
+ for (size_t i = 0; i < name.size(); ++i) {
+ if (!enc.encodable(name[i])) {
+ encodable = false;
+ break;
+ }
+ }
+ string const translated = encodable ? to_utf8(name)
+ : "\\inputencoding{" + texenc + "}"
+ + s1 + langenc + s2 + to_utf8(name)
+ + s1 + bufenc + s2;
+ preamble = subst(preamble, sub.str(), translated);
+ }
+ return from_utf8(preamble);
+}
+
}
list<docstring>::const_iterator end = usedLayouts_.end();
for (; cit != end; ++cit) {
// language dependent commands (once per document)
- snippets.insert(tclass[*cit].langpreamble(buffer().language(),
+ snippets.insert(i18npreamble(tclass[*cit].langpreamble(),
+ buffer().language(),
buffer().params().encoding(),
use_polyglossia));
// commands for language changing (for multilanguage documents)
if ((use_babel || use_polyglossia) && !UsedLanguages_.empty()) {
- snippets.insert(tclass[*cit].babelpreamble(
+ snippets.insert(i18npreamble(
+ tclass[*cit].babelpreamble(),
buffer().language(),
buffer().params().encoding(),
use_polyglossia));
for (lang_it lit = lbeg; lit != lend; ++lit)
- snippets.insert(tclass[*cit].babelpreamble(
+ snippets.insert(i18npreamble(
+ tclass[*cit].babelpreamble(),
*lit,
buffer().params().encoding(),
use_polyglossia));
}
}
+ cit = usedInsetLayouts_.begin();
+ end = usedInsetLayouts_.end();
+ TextClass::InsetLayouts const & ils = tclass.insetLayouts();
+ for (; cit != end; ++cit) {
+ TextClass::InsetLayouts::const_iterator it = ils.find(*cit);
+ if (it == ils.end())
+ continue;
+ // language dependent commands (once per document)
+ snippets.insert(i18npreamble(it->second.langpreamble(),
+ buffer().language(),
+ buffer().params().encoding(),
+ use_polyglossia));
+ // commands for language changing (for multilanguage documents)
+ if ((use_babel || use_polyglossia) && !UsedLanguages_.empty()) {
+ snippets.insert(i18npreamble(
+ it->second.babelpreamble(),
+ buffer().language(),
+ buffer().params().encoding(),
+ use_polyglossia));
+ for (lang_it lit = lbeg; lit != lend; ++lit)
+ snippets.insert(i18npreamble(
+ it->second.babelpreamble(),
+ *lit,
+ buffer().params().encoding(),
+ use_polyglossia));
+ }
+ }
+
odocstringstream tcpreamble;
set<docstring>::const_iterator const send = snippets.end();
set<docstring>::const_iterator it = snippets.begin();