#include "Context.h"
#include "Encoding.h"
#include "FloatList.h"
+#include "LaTeXPackages.h"
#include "Layout.h"
#include "Length.h"
+#include "Preamble.h"
+
+#include "insets/ExternalTemplate.h"
#include "support/lassert.h"
#include "support/convert.h"
#include "support/FileName.h"
#include "support/filetools.h"
#include "support/lstrings.h"
+#include "support/lyxtime.h"
#include <algorithm>
#include <iostream>
char const * const known_coded_ref_commands[] = { "ref", "pageref", "vref",
"vpageref", "formatted", "eqref", 0 };
+/**
+ * known polyglossia language names (including variants)
+ */
+const char * const polyglossia_languages[] = {
+"albanian", "croatian", "hebrew", "norsk", "swedish", "amharic", "czech", "hindi",
+"nynorsk", "syriac", "arabic", "danish", "icelandic", "occitan", "tamil",
+"armenian", "divehi", "interlingua", "polish", "telugu", "asturian", "dutch",
+"irish", "portuges", "thai", "bahasai", "english", "italian", "romanian", "turkish",
+"bahasam", "esperanto", "lao", "russian", "turkmen", "basque", "estonian", "latin",
+"samin", "ukrainian", "bengali", "farsi", "latvian", "sanskrit", "urdu", "brazil",
+"brazilian", "finnish", "lithuanian", "scottish", "usorbian", "breton", "french",
+"lsorbian", "serbian", "vietnamese", "bulgarian", "galician", "magyar", "slovak",
+"welsh", "catalan", "german", "malayalam", "slovenian", "coptic", "greek",
+"marathi", "spanish",
+"american", "ancient", "australian", "british", "monotonic", "newzealand",
+"polytonic", 0};
+
+/**
+ * the same as polyglossia_languages with .lyx names
+ * please keep this in sync with polyglossia_languages line by line!
+ */
+const char * const coded_polyglossia_languages[] = {
+"albanian", "croatian", "hebrew", "norsk", "swedish", "amharic", "czech", "hindi",
+"nynorsk", "syriac", "arabic_arabi", "danish", "icelandic", "occitan", "tamil",
+"armenian", "divehi", "interlingua", "polish", "telugu", "asturian", "dutch",
+"irish", "portuges", "thai", "bahasa", "english", "italian", "romanian", "turkish",
+"bahasam", "esperanto", "lao", "russian", "turkmen", "basque", "estonian", "latin",
+"samin", "ukrainian", "bengali", "farsi", "latvian", "sanskrit", "urdu", "brazilian",
+"brazilian", "finnish", "lithuanian", "scottish", "uppersorbian", "breton", "french",
+"lowersorbian", "serbian", "vietnamese", "bulgarian", "galician", "magyar", "slovak",
+"welsh", "catalan", "ngerman", "malayalam", "slovene", "coptic", "greek",
+"marathi", "spanish",
+"american", "ancientgreek", "australian", "british", "greek", "newzealand",
+"polutonikogreek", 0};
+
+/**
+ * supported CJK encodings
+ */
+const char * const supported_CJK_encodings[] = {
+"EUC-JP", "KS", "GB", "UTF8", 0};
+
+/**
+ * the same as supported_CJK_encodings with their corresponding LyX language name
+ * please keep this in sync with supported_CJK_encodings line by line!
+ */
+const char * const coded_supported_CJK_encodings[] = {
+"japanese-cjk", "korean", "chinese-simplified", "chinese-traditional", 0};
+
+string CJK2lyx(string const & encoding)
+{
+ char const * const * where = is_known(encoding, supported_CJK_encodings);
+ if (where)
+ return coded_supported_CJK_encodings[where - supported_CJK_encodings];
+ return encoding;
+}
+
/*!
* natbib commands.
* The starred forms are also known except for "citefullauthor",
char const * const known_font_families[] = { "rmfamily", "sffamily",
"ttfamily", 0};
-/// the same as known_old_font_families and known_font_families with .lyx names
+/// LaTeX names for font family changing commands
+char const * const known_text_font_families[] = { "textrm", "textsf",
+"texttt", 0};
+
+/// The same as known_old_font_families, known_font_families and
+/// known_text_font_families with .lyx names
char const * const known_coded_font_families[] = { "roman", "sans",
"typewriter", 0};
/// LaTeX names for font series
char const * const known_font_series[] = { "bfseries", "mdseries", 0};
-/// the same as known_old_font_series and known_font_series with .lyx names
+/// LaTeX names for font series changing commands
+char const * const known_text_font_series[] = { "textbf", "textmd", 0};
+
+/// The same as known_old_font_series, known_font_series and
+/// known_text_font_series with .lyx names
char const * const known_coded_font_series[] = { "bold", "medium", 0};
/// LaTeX 2.09 names for font shapes
char const * const known_font_shapes[] = { "itshape", "slshape", "scshape",
"upshape", 0};
-/// the same as known_old_font_shapes and known_font_shapes with .lyx names
+/// LaTeX names for font shape changing commands
+char const * const known_text_font_shapes[] = { "textit", "textsl", "textsc",
+"textup", 0};
+
+/// The same as known_old_font_shapes, known_font_shapes and
+/// known_text_font_shapes with .lyx names
char const * const known_coded_font_shapes[] = { "italic", "slanted",
"smallcaps", "up", 0};
+/// Known special characters which need skip_spaces_braces() afterwards
+char const * const known_special_chars[] = {"ldots", "lyxarrow",
+"textcompwordmark", "slash", 0};
+
+/// the same as known_special_chars with .lyx names
+char const * const known_coded_special_chars[] = {"ldots{}", "menuseparator",
+"textcompwordmark{}", "slash{}", 0};
+
/*!
* Graphics file extensions known by the dvips driver of the graphics package.
* These extensions are used to complete the filename of an included
char const * const known_coded_phrases[] = {"LyX", "TeX", "LaTeX2e", "LaTeX", 0};
int const known_phrase_lengths[] = {3, 5, 7, 0};
+// string to store the float type to be able to determine the type of subfloats
+string float_type = "";
+
/// splits "x=z, y=b" into a map and an ordered keyword vector
void split_map(string const & s, map<string, string> & res, vector<string> & keys)
continue;
}
s = s.substr(i);
+ bool termination;
docstring rem;
- docstring parsed = encodings.fromLaTeXCommand(s, rem,
- Encodings::TEXT_CMD);
+ set<string> req;
+ docstring parsed = encodings.fromLaTeXCommand(s,
+ Encodings::TEXT_CMD, termination, rem, &req);
+ set<string>::const_iterator it = req.begin();
+ set<string>::const_iterator en = req.end();
+ for (; it != en; ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
os << parsed;
s = rem;
if (s.empty() || s[0] != '\\')
should be handled by this function:
- abc \j{} xyz
- abc \j {} xyz
- - abc \j
+ - abc \j
{} xyz
- abc \j %comment
{} xyz
while (optargs < context.layout->optargs) {
eat_whitespace(p, os, context, false);
if (p.next_token().cat() == catEscape ||
- p.next_token().character() != '[')
+ p.next_token().character() != '[')
break;
p.get_token(); // eat '['
begin_inset(os, "Argument\n");
for (size_t i = 0; i < no_arguments; ++i) {
switch (template_arguments[i]) {
case required:
+ case req_group:
// This argument contains regular LaTeX
handle_ert(os, ert + '{', context);
eat_whitespace(p, os, context, false);
- parse_text(p, os, FLAG_ITEM, outer, context);
+ if (template_arguments[i] == required)
+ parse_text(p, os, FLAG_ITEM, outer, context);
+ else
+ parse_text_snippet(p, os, FLAG_ITEM, outer, context);
ert = "}";
break;
case item:
else
ert += p.verbatim_item();
break;
+ case displaymath:
case verbatim:
// This argument may contain special characters
ert += '{' + p.verbatim_item() + '}';
break;
case optional:
+ case opt_group:
// true because we must not eat whitespace
// if an optional arg follows we must not strip the
// brackets from this one
string height_unit = "in";
string height_special = "totalheight";
string latex_height;
+ string width_value;
+ string width_unit;
+ string latex_width;
+ string width_special = "none";
if (!inner_type.empty() && p.hasOpt()) {
- position = p.getArg('[', ']');
+ if (inner_type != "makebox")
+ position = p.getArg('[', ']');
+ else {
+ latex_width = p.getArg('[', ']');
+ translate_box_len(latex_width, width_value, width_unit, width_special);
+ position = "t";
+ }
if (position != "t" && position != "c" && position != "b") {
cerr << "invalid position " << position << " for "
<< inner_type << endl;
position = "c";
}
if (p.hasOpt()) {
- latex_height = p.getArg('[', ']');
- translate_box_len(latex_height, height_value, height_unit, height_special);
+ if (inner_type != "makebox") {
+ latex_height = p.getArg('[', ']');
+ translate_box_len(latex_height, height_value, height_unit, height_special);
+ } else
+ hor_pos = p.getArg('[', ']');
if (p.hasOpt()) {
inner_pos = p.getArg('[', ']');
}
}
}
- string width_value;
- string width_unit;
- string latex_width;
if (inner_type.empty()) {
- if (special.empty())
- latex_width = "\\columnwidth";
+ if (special.empty() && outer_type != "framebox")
+ latex_width = "1\\columnwidth";
else {
Parser p2(special);
latex_width = p2.getArg('[', ']');
}
}
}
- } else
+ } else if (inner_type != "makebox")
latex_width = p.verbatim_item();
+ // if e.g. only \ovalbox{content} was used, set the width to 1\columnwidth
+ // as this is LyX's standard for such cases (except for makebox)
+ // \framebox is more special and handled below
+ if (latex_width.empty() && inner_type != "makebox"
+ && outer_type != "framebox")
+ latex_width = "1\\columnwidth";
+
translate_len(latex_width, width_value, width_unit);
+
+ bool shadedparbox = false;
+ if (inner_type == "shaded") {
+ eat_whitespace(p, os, parent_context, false);
+ if (outer_type == "parbox") {
+ // Eat '{'
+ if (p.next_token().cat() == catBegin)
+ p.get_token();
+ eat_whitespace(p, os, parent_context, false);
+ shadedparbox = true;
+ }
+ p.get_token();
+ p.getArg('{', '}');
+ }
+ // If we already read the inner box we have to push the inner env
+ if (!outer_type.empty() && !inner_type.empty() &&
+ (inner_flags & FLAG_END))
+ active_environments.push_back(inner_type);
// LyX can't handle length variables
bool use_ert = contains(width_unit, '\\') || contains(height_unit, '\\');
if (!use_ert && !outer_type.empty() && !inner_type.empty()) {
else
p.verbatim_item();
p.skip_spaces(true);
- if ((outer_type == "framed" && p.next_token().asInput() != "\\end") ||
- (outer_type != "framed" && p.next_token().cat() != catEnd)) {
+ bool const outer_env(outer_type == "framed" || outer_type == "minipage");
+ if ((outer_env && p.next_token().asInput() != "\\end") ||
+ (!outer_env && p.next_token().cat() != catEnd)) {
// something is between the end of the inner box and
// the end of the outer box, so we need to use ERT.
use_ert = true;
}
p.popPosition();
}
+ // if only \makebox{content} was used we can set its width to 1\width
+ // because this identic and also identic to \mbox
+ // this doesn't work for \framebox{content}, thus we have to use ERT for this
+ if (latex_width.empty() && inner_type == "makebox") {
+ width_value = "1";
+ width_unit = "in";
+ width_special = "width";
+ } else if (latex_width.empty() && outer_type == "framebox") {
+ use_ert = true;
+ }
if (use_ert) {
ostringstream ss;
if (!outer_type.empty()) {
}
}
if (!inner_type.empty()) {
- if (inner_flags & FLAG_END)
- ss << "\\begin{" << inner_type << '}';
- else
- ss << '\\' << inner_type;
+ if (inner_type != "shaded") {
+ if (inner_flags & FLAG_END)
+ ss << "\\begin{" << inner_type << '}';
+ else
+ ss << '\\' << inner_type;
+ }
if (!position.empty())
ss << '[' << position << ']';
if (!latex_height.empty())
if (!(inner_flags & FLAG_END))
ss << '{';
}
+ if (inner_type == "shaded")
+ ss << "\\begin{shaded}";
handle_ert(os, ss.str(), parent_context);
if (!inner_type.empty()) {
parse_text(p, os, inner_flags, outer, parent_context);
handle_ert(os, "}", parent_context);
}
if (!outer_type.empty()) {
+ // If we already read the inner box we have to pop
+ // the inner env
+ if (!inner_type.empty() && (inner_flags & FLAG_END))
+ active_environments.pop_back();
+
+ // Ensure that the end of the outer box is parsed correctly:
+ // The opening brace has been eaten by parse_outer_box()
+ if (!outer_type.empty() && (outer_flags & FLAG_ITEM)) {
+ outer_flags &= ~FLAG_ITEM;
+ outer_flags |= FLAG_BRACE_LAST;
+ }
parse_text(p, os, outer_flags, outer, parent_context);
if (outer_flags & FLAG_END)
handle_ert(os, "\\end{" + outer_type + '}',
parent_context);
+ else if (inner_type.empty() && outer_type == "framebox")
+ // in this case it is already closed later
+ ;
else
handle_ert(os, "}", parent_context);
}
os << "Boxed\n";
else if (outer_type == "shadowbox")
os << "Shadowbox\n";
- else if (outer_type == "shaded")
+ else if ((outer_type == "shaded" && inner_type.empty()) ||
+ (outer_type == "minipage" && inner_type == "shaded") ||
+ (outer_type == "parbox" && inner_type == "shaded")) {
os << "Shaded\n";
- else if (outer_type == "doublebox")
+ preamble.registerAutomaticallyLoadedPackage("color");
+ } else if (outer_type == "doublebox")
os << "Doublebox\n";
else if (outer_type.empty())
os << "Frameless\n";
os << "hor_pos \"" << hor_pos << "\"\n";
os << "has_inner_box " << !inner_type.empty() << "\n";
os << "inner_pos \"" << inner_pos << "\"\n";
- os << "use_parbox " << (inner_type == "parbox") << '\n';
- os << "use_makebox 0\n";
+ os << "use_parbox " << (inner_type == "parbox" || shadedparbox)
+ << '\n';
+ os << "use_makebox " << (inner_type == "makebox") << '\n';
os << "width \"" << width_value << width_unit << "\"\n";
- os << "special \"none\"\n";
+ os << "special \"" << width_special << "\"\n";
os << "height \"" << height_value << height_unit << "\"\n";
os << "height_special \"" << height_special << "\"\n";
os << "status open\n\n";
- Context context(true, parent_context.textclass);
- context.font = parent_context.font;
- // FIXME, the inset layout should be plain, not standard, see bug #7846
+ // Unfortunately we can't use parse_text_in_inset:
+ // InsetBox::forcePlainLayout() is hard coded and does not
+ // use the inset layout. Apart from that do we call parse_text
+ // up to two times, but need only one check_end_layout.
+ bool const forcePlainLayout =
+ (!inner_type.empty() || inner_type == "makebox") &&
+ outer_type != "shaded" && outer_type != "framed";
+ Context context(true, parent_context.textclass);
+ if (forcePlainLayout)
+ context.layout = &context.textclass.plainLayout();
+ else
+ context.font = parent_context.font;
- // If we have no inner box the contens will be read with the outer box
+ // If we have no inner box the contents will be read with the outer box
if (!inner_type.empty())
parse_text(p, os, inner_flags, outer, context);
// Find end of outer box, output contents if inner_type is
// empty and output possible comments
if (!outer_type.empty()) {
+ // If we already read the inner box we have to pop
+ // the inner env
+ if (!inner_type.empty() && (inner_flags & FLAG_END))
+ active_environments.pop_back();
// This does not output anything but comments if
// inner_type is not empty (see use_ert)
parse_text(p, os, outer_flags, outer, context);
}
string inner;
unsigned int inner_flags = 0;
+ p.pushPosition();
+ if (outer_type == "minipage" || outer_type == "parbox") {
+ p.skip_spaces(true);
+ while (p.hasOpt()) {
+ p.getArg('[', ']');
+ p.skip_spaces(true);
+ }
+ p.getArg('{', '}');
+ p.skip_spaces(true);
+ if (outer_type == "parbox") {
+ // Eat '{'
+ if (p.next_token().cat() == catBegin)
+ p.get_token();
+ p.skip_spaces(true);
+ }
+ }
if (outer_type == "shaded") {
// These boxes never have an inner box
;
inner = p.get_token().cs();
inner_flags = FLAG_ITEM;
} else if (p.next_token().asInput() == "\\begin") {
- // Is this a minipage?
+ // Is this a minipage or shaded box?
p.pushPosition();
p.get_token();
inner = p.getArg('{', '}');
p.popPosition();
- if (inner == "minipage") {
- p.get_token();
- p.getArg('{', '}');
- eat_whitespace(p, os, parent_context, false);
+ if (inner == "minipage" || inner == "shaded")
inner_flags = FLAG_END;
- } else
+ else
inner = "";
}
+ p.popPosition();
if (inner_flags == FLAG_END) {
- active_environments.push_back(inner);
+ if (inner != "shaded")
+ {
+ p.get_token();
+ p.getArg('{', '}');
+ eat_whitespace(p, os, parent_context, false);
+ }
parse_box(p, os, flags, FLAG_END, outer, parent_context,
outer_type, special, inner);
- active_environments.pop_back();
} else {
+ if (inner_flags == FLAG_ITEM) {
+ p.get_token();
+ eat_whitespace(p, os, parent_context, false);
+ }
parse_box(p, os, flags, inner_flags, outer, parent_context,
outer_type, special, inner);
}
}
-void parse_listings(Parser & p, ostream & os, Context & parent_context)
+void parse_listings(Parser & p, ostream & os, Context & parent_context, bool in_line)
{
parent_context.check_layout(os);
begin_inset(os, "listings\n");
- os << "inline false\n"
- << "status collapsed\n";
+ if (p.hasOpt()) {
+ string arg = p.verbatimOption();
+ os << "lstparams " << '"' << arg << '"' << '\n';
+ }
+ if (in_line)
+ os << "inline true\n";
+ else
+ os << "inline false\n";
+ os << "status collapsed\n";
Context context(true, parent_context.textclass);
context.layout = &parent_context.textclass.plainLayout();
- context.check_layout(os);
- string const s = p.verbatimEnvironment("lstlisting");
+ string s;
+ if (in_line) {
+ s = p.plainCommand('!', '!', "lstinline");
+ context.new_paragraph(os);
+ context.check_layout(os);
+ } else
+ s = p.plainEnvironment("lstlisting");
for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
if (*it == '\\')
os << "\n\\backslash\n";
parse_math(p, os, FLAG_END, MATH_MODE);
os << "\\end{" << name << "}";
end_inset(os);
+ if (is_display_math_env(name)) {
+ // Prevent the conversion of a line break to a space
+ // (bug 7668). This does not change the output, but
+ // looks ugly in LyX.
+ eat_whitespace(p, os, parent_context, false);
+ }
}
- else if (name == "tabular" || name == "longtable") {
+ else if (is_known(name, polyglossia_languages)) {
+ // We must begin a new paragraph if not already done
+ if (! parent_context.atParagraphStart()) {
+ parent_context.check_end_layout(os);
+ parent_context.new_paragraph(os);
+ }
+ // save the language in the context so that it is
+ // handled by parse_text
+ parent_context.font.language = polyglossia2lyx(name);
+ parse_text(p, os, FLAG_END, outer, parent_context);
+ // Just in case the environment is empty
+ parent_context.extra_stuff.erase();
+ // We must begin a new paragraph to reset the language
+ parent_context.new_paragraph(os);
+ p.skip_spaces();
+ }
+
+ else if (unstarred_name == "tabular" || name == "longtable") {
eat_whitespace(p, os, parent_context, false);
+ string width = "0pt";
+ if (name == "tabular*") {
+ width = lyx::translate_len(p.getArg('{', '}'));
+ eat_whitespace(p, os, parent_context, false);
+ }
parent_context.check_layout(os);
begin_inset(os, "Tabular ");
- handle_tabular(p, os, name == "longtable", parent_context);
+ handle_tabular(p, os, name, width, parent_context);
end_inset(os);
p.skip_spaces();
}
else if (parent_context.textclass.floats().typeExist(unstarred_name)) {
+ eat_whitespace(p, os, parent_context, false);
+ string const opt = p.hasOpt() ? p.getArg('[', ']') : string();
eat_whitespace(p, os, parent_context, false);
parent_context.check_layout(os);
begin_inset(os, "Float " + unstarred_name + "\n");
- if (p.hasOpt())
- os << "placement " << p.getArg('[', ']') << '\n';
+ // store the float type for subfloats
+ // subfloats only work with figures and tables
+ if (unstarred_name == "figure")
+ float_type = unstarred_name;
+ else if (unstarred_name == "table")
+ float_type = unstarred_name;
+ else
+ float_type = "";
+ if (!opt.empty())
+ os << "placement " << opt << '\n';
+ if (contains(opt, "H"))
+ preamble.registerAutomaticallyLoadedPackage("float");
+ else {
+ Floating const & fl = parent_context.textclass.floats()
+ .getType(unstarred_name);
+ if (!fl.floattype().empty() && fl.usesFloatPkg())
+ preamble.registerAutomaticallyLoadedPackage("float");
+ }
+
os << "wide " << convert<string>(is_starred)
<< "\nsideways false"
<< "\nstatus open\n\n";
// we must make sure that the next item gets a \begin_layout.
parent_context.new_paragraph(os);
p.skip_spaces();
+ // the float is parsed thus delete the type
+ float_type = "";
}
else if (unstarred_name == "sidewaysfigure"
// we must make sure that the next item gets a \begin_layout.
parent_context.new_paragraph(os);
p.skip_spaces();
+ preamble.registerAutomaticallyLoadedPackage("rotfloat");
+ }
+
+ else if (name == "wrapfigure" || name == "wraptable") {
+ // syntax is \begin{wrapfigure}[lines]{placement}[overhang]{width}
+ eat_whitespace(p, os, parent_context, false);
+ parent_context.check_layout(os);
+ // default values
+ string lines = "0";
+ string overhang = "0col%";
+ // parse
+ if (p.hasOpt())
+ lines = p.getArg('[', ']');
+ string const placement = p.getArg('{', '}');
+ if (p.hasOpt())
+ overhang = p.getArg('[', ']');
+ string const width = p.getArg('{', '}');
+ // write
+ if (name == "wrapfigure")
+ begin_inset(os, "Wrap figure\n");
+ else
+ begin_inset(os, "Wrap table\n");
+ os << "lines " << lines
+ << "\nplacement " << placement
+ << "\noverhang " << lyx::translate_len(overhang)
+ << "\nwidth " << lyx::translate_len(width)
+ << "\nstatus open\n\n";
+ parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
+ end_inset(os);
+ // We don't need really a new paragraph, but
+ // we must make sure that the next item gets a \begin_layout.
+ parent_context.new_paragraph(os);
+ p.skip_spaces();
+ preamble.registerAutomaticallyLoadedPackage("wrapfig");
}
else if (name == "minipage") {
eat_whitespace(p, os, parent_context, false);
- parse_box(p, os, 0, FLAG_END, outer, parent_context, "", "", name);
+ // Test whether this is an outer box of a shaded box
+ p.pushPosition();
+ // swallow arguments
+ while (p.hasOpt()) {
+ p.getArg('[', ']');
+ p.skip_spaces(true);
+ }
+ p.getArg('{', '}');
+ p.skip_spaces(true);
+ Token t = p.get_token();
+ bool shaded = false;
+ if (t.asInput() == "\\begin") {
+ p.skip_spaces(true);
+ if (p.getArg('{', '}') == "shaded")
+ shaded = true;
+ }
+ p.popPosition();
+ if (shaded)
+ parse_outer_box(p, os, FLAG_END, outer,
+ parent_context, name, "shaded");
+ else
+ parse_box(p, os, 0, FLAG_END, outer, parent_context,
+ "", "", name);
p.skip_spaces();
}
end_inset(os);
p.skip_spaces();
skip_braces(p); // eat {} that might by set by LyX behind comments
+ preamble.registerAutomaticallyLoadedPackage("verbatim");
+ }
+
+ else if (name == "verbatim") {
+ os << "\n\\end_layout\n\n\\begin_layout Verbatim\n";
+ string const s = p.plainEnvironment("verbatim");
+ string::const_iterator it2 = s.begin();
+ for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
+ if (*it == '\\')
+ os << "\\backslash ";
+ else if (*it == '\n') {
+ it2 = it + 1;
+ // avoid adding an empty paragraph at the end
+ // FIXME: if there are 2 consecutive spaces at the end ignore it
+ // because LyX will re-add a \n
+ // This hack must be removed once bug 8049 is fixed!
+ if ((it + 1 != et) && (it + 2 != et || *it2 != '\n'))
+ os << "\n\\end_layout\n\\begin_layout Verbatim\n";
+ } else
+ os << *it;
+ }
+ os << "\n\\end_layout\n\n";
+ p.skip_spaces();
+ // reset to Standard layout
+ os << "\n\\begin_layout Standard\n";
+ }
+
+ else if (name == "CJK") {
+ // the scheme is \begin{CJK}{encoding}{mapping}{text}
+ // It is impossible to decide if a CJK environment was in its own paragraph or within
+ // a line. We therefore always assume a paragraph since the latter is a rare case.
+ eat_whitespace(p, os, parent_context, false);
+ parent_context.check_end_layout(os);
+ // store the encoding to be able to reset it
+ string const encoding_old = p.encoding_latex_;
+ string const encoding = p.getArg('{', '}');
+ // SJIS and BIG5 don't work with LaTeX according to the comment in unicode.cpp
+ // JIS does not work with LyX's encoding conversion
+ if (encoding != "SJIS" && encoding != "BIG5" && encoding != "JIS")
+ p.setEncoding(encoding);
+ else
+ p.setEncoding("utf8");
+ // LyX doesn't support the second argument so if
+ // this is used we need to output everything as ERT
+ string const mapping = p.getArg('{', '}');
+ if ( (!mapping.empty() && mapping != " ")
+ || (!is_known(encoding, supported_CJK_encodings))) {
+ parent_context.check_layout(os);
+ handle_ert(os, "\\begin{" + name + "}{" + encoding + "}{" + mapping + "}",
+ parent_context);
+ // we must parse the content as verbatim because e.g. SJIS can contain
+ // normally invalid characters
+ string const s = p.plainEnvironment("CJK");
+ string::const_iterator it2 = s.begin();
+ for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
+ if (*it == '\\')
+ handle_ert(os, "\\", parent_context);
+ else if (*it == '$')
+ handle_ert(os, "$", parent_context);
+ else
+ os << *it;
+ }
+ p.skip_spaces();
+ handle_ert(os, "\\end{" + name + "}",
+ parent_context);
+ } else {
+ string const lang = CJK2lyx(encoding);
+ // store the language because we must reset it at the end
+ string const lang_old = parent_context.font.language;
+ parent_context.font.language = lang;
+ parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
+ parent_context.font.language = lang_old;
+ parent_context.new_paragraph(os);
+ }
+ p.encoding_latex_ = encoding_old;
+ p.skip_spaces();
}
else if (name == "lyxgreyedout") {
parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
end_inset(os);
p.skip_spaces();
+ if (!preamble.notefontcolor().empty())
+ preamble.registerAutomaticallyLoadedPackage("color");
}
else if (name == "framed" || name == "shaded") {
else if (name == "lstlisting") {
eat_whitespace(p, os, parent_context, false);
- // FIXME handle listings with parameters
- if (p.hasOpt())
- parse_unknown_environment(p, name, os, FLAG_END,
- outer, parent_context);
- else
- parse_listings(p, os, parent_context);
+ // FIXME handle the automatic color package loading
+ // uwestoehr asks: In what case color is loaded?
+ parse_listings(p, os, parent_context, false);
p.skip_spaces();
}
parent_context.add_extra_stuff("\\align center\n");
else if (name == "singlespace")
parent_context.add_extra_stuff("\\paragraph_spacing single\n");
- else if (name == "onehalfspace")
+ else if (name == "onehalfspace") {
parent_context.add_extra_stuff("\\paragraph_spacing onehalf\n");
- else if (name == "doublespace")
+ preamble.registerAutomaticallyLoadedPackage("setspace");
+ } else if (name == "doublespace") {
parent_context.add_extra_stuff("\\paragraph_spacing double\n");
- else if (name == "spacing")
+ preamble.registerAutomaticallyLoadedPackage("setspace");
+ } else if (name == "spacing") {
parent_context.add_extra_stuff("\\paragraph_spacing other " + p.verbatim_item() + "\n");
+ preamble.registerAutomaticallyLoadedPackage("setspace");
+ }
parse_text(p, os, FLAG_END, outer, parent_context);
// Just in case the environment is empty
parent_context.extra_stuff.erase();
while (optargs < context.layout->optargs) {
eat_whitespace(p, os, context, false);
if (p.next_token().cat() == catEscape ||
- p.next_token().character() != '[')
+ p.next_token().character() != '[')
break;
p.get_token(); // eat '['
if (need_layout) {
context.check_end_deeper(os);
parent_context.new_paragraph(os);
p.skip_spaces();
+ if (!preamble.titleLayoutFound())
+ preamble.titleLayoutFound(newlayout->intitle);
+ set<string> const & req = newlayout->requires();
+ set<string>::const_iterator it = req.begin();
+ set<string>::const_iterator en = req.end();
+ for (; it != en; ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
}
// The single '=' is meant here.
handle_ert(os, command + ert, context);
}
+
+void registerExternalTemplatePackages(string const & name)
+{
+ external::TemplateManager const & etm = external::TemplateManager::get();
+ external::Template const * const et = etm.getTemplateByName(name);
+ if (!et)
+ return;
+ external::Template::Formats::const_iterator cit = et->formats.end();
+ if (pdflatex)
+ cit = et->formats.find("PDFLaTeX");
+ if (cit == et->formats.end())
+ // If the template has not specified a PDFLaTeX output,
+ // we try the LaTeX format.
+ cit = et->formats.find("LaTeX");
+ if (cit == et->formats.end())
+ return;
+ vector<string>::const_iterator qit = cit->second.requirements.begin();
+ vector<string>::const_iterator qend = cit->second.requirements.end();
+ for (; qit != qend; ++qit)
+ preamble.registerAutomaticallyLoadedPackage(*qit);
+}
+
} // anonymous namespace
{
Layout const * newlayout = 0;
InsetLayout const * newinsetlayout = 0;
+ char const * const * where = 0;
// Store the latest bibliographystyle and nocite{*} option
// (needed for bibtex inset)
string btprint;
- string bibliographystyle;
- bool const use_natbib = used_packages.find("natbib") != used_packages.end();
- bool const use_jurabib = used_packages.find("jurabib") != used_packages.end();
+ string bibliographystyle = "default";
+ bool const use_natbib = preamble.isPackageUsed("natbib");
+ bool const use_jurabib = preamble.isPackageUsed("jurabib");
string last_env;
while (p.good()) {
Token const & t = p.get_token();
+ // it is impossible to determine the correct document language if CJK is used.
+ // Therefore write a note at the beginning of the document
+ if (have_CJK) {
+ context.check_layout(os);
+ begin_inset(os, "Note Note\n");
+ os << "status open\n\\begin_layout Plain Layout\n"
+ << "\\series bold\n"
+ << "Important information:\n"
+ << "\\end_layout\n\n"
+ << "\\begin_layout Plain Layout\n"
+ << "This document contains text in Chinese, Japanese or Korean.\n"
+ << " It was therefore impossible for tex2lyx to set the correct document langue for your document."
+ << " Please set in the document settings by yourself!\n"
+ << "\\end_layout\n";
+ end_inset(os);
+ have_CJK = false;
+ }
+
+ // it is impossible to determine the correct encoding for non-CJK Japanese.
+ // Therefore write a note at the beginning of the document
+ if (is_nonCJKJapanese) {
+ context.check_layout(os);
+ begin_inset(os, "Note Note\n");
+ os << "status open\n\\begin_layout Plain Layout\n"
+ << "\\series bold\n"
+ << "Important information:\n"
+ << "\\end_layout\n\n"
+ << "\\begin_layout Plain Layout\n"
+ << "This document is in Japanese (non-CJK).\n"
+ << " It was therefore impossible for tex2lyx to determine the correct encoding."
+ << " The encoding EUC-JP was assumed. If this is incorrect, please set the correct"
+ << " encoding in the document settings.\n"
+ << "\\end_layout\n";
+ end_inset(os);
+ is_nonCJKJapanese = false;
+ }
+
#ifdef FILEDEBUG
debugToken(cerr, t, flags);
#endif
context.check_layout(os);
begin_inset(os, "Formula ");
Token const & n = p.get_token();
- if (n.cat() == catMath && outer) {
+ bool const display(n.cat() == catMath && outer);
+ if (display) {
// TeX's $$...$$ syntax for displayed math
os << "\\[";
parse_math(p, os, FLAG_SIMPLE, MATH_MODE);
os << '$';
}
end_inset(os);
+ if (display) {
+ // Prevent the conversion of a line break to a
+ // space (bug 7668). This does not change the
+ // output, but looks ugly in LyX.
+ eat_whitespace(p, os, context, false);
+ }
}
else if (t.cat() == catSuper || t.cat() == catSub)
os << t.cs();
}
- else if (t.cat() == catBegin &&
- p.next_token().cat() == catEnd) {
+ else if (t.cat() == catBegin) {
+ Token const next = p.next_token();
+ Token const end = p.next_next_token();
+ if (next.cat() == catEnd) {
// {}
Token const prev = p.prev_token();
p.get_token();
; // ignore it in {}`` or -{}-
else
handle_ert(os, "{}", context);
-
- }
-
- else if (t.cat() == catBegin) {
+ } else if (next.cat() == catEscape &&
+ is_known(next.cs(), known_quotes) &&
+ end.cat() == catEnd) {
+ // Something like {\textquoteright} (e.g.
+ // from writer2latex). LyX writes
+ // \textquoteright{}, so we may skip the
+ // braces here for better readability.
+ parse_text_snippet(p, os, FLAG_BRACE_LAST,
+ outer, context);
+ } else {
context.check_layout(os);
// special handling of font attribute changes
Token const prev = p.prev_token();
- Token const next = p.next_token();
TeXFont const oldFont = context.font;
if (next.character() == '[' ||
next.character() == ']' ||
parse_text_snippet(p, os, FLAG_BRACE_LAST,
outer, context);
handle_ert(os, "}", context);
+ }
}
}
parse_math(p, os, FLAG_EQUATION, MATH_MODE);
os << "\\]";
end_inset(os);
+ // Prevent the conversion of a line break to a space
+ // (bug 7668). This does not change the output, but
+ // looks ugly in LyX.
+ eat_whitespace(p, os, context, false);
}
else if (t.cs() == "begin")
- parse_environment(p, os, outer, last_env, context);
+ parse_environment(p, os, outer, last_env,
+ context);
else if (t.cs() == "end") {
if (flags & FLAG_END) {
}
else if (t.cs() == "item") {
- p.skip_spaces();
string s;
- bool optarg = false;
- if (p.next_token().cat() != catEscape &&
- p.next_token().character() == '[') {
- p.get_token(); // eat '['
- s = parse_text_snippet(p, FLAG_BRACK_LAST,
- outer, context);
- optarg = true;
- }
+ bool const optarg = p.hasOpt();
+ if (optarg) {
+ // FIXME: This swallows comments, but we cannot use
+ // eat_whitespace() since we must not output
+ // anything before the item.
+ p.skip_spaces(true);
+ s = p.verbatimOption();
+ } else
+ p.skip_spaces(false);
context.set_item();
context.check_layout(os);
if (context.has_item) {
if (context.layout->labeltype != LABEL_MANUAL) {
// LyX does not support \item[\mybullet]
// in itemize environments
- handle_ert(os, "[", context);
- os << s;
- handle_ert(os, "]", context);
+ Parser p2(s + ']');
+ os << parse_text_snippet(p2,
+ FLAG_BRACK_LAST, outer, context);
} else if (!s.empty()) {
+ // LyX adds braces around the argument,
+ // so we need to remove them here.
+ if (s.size() > 2 && s[0] == '{' &&
+ s[s.size()-1] == '}')
+ s = s.substr(1, s.size()-2);
+ // If the argument contains a space we
+ // must put it into ERT: Otherwise LyX
+ // would misinterpret the space as
+ // item delimiter (bug 7663)
+ if (contains(s, ' ')) {
+ handle_ert(os, s, context);
+ } else {
+ Parser p2(s + ']');
+ os << parse_text_snippet(p2,
+ FLAG_BRACK_LAST,
+ outer, context);
+ }
// The space is needed to separate the
// item from the rest of the sentence.
- os << s << ' ';
+ os << ' ';
eat_whitespace(p, os, context, false);
}
}
else if (t.cs() == "bibitem") {
context.set_item();
context.check_layout(os);
- string label = convert_command_inset_arg(p.getArg('[', ']'));
+ eat_whitespace(p, os, context, false);
+ string label = convert_command_inset_arg(p.verbatimOption());
string key = convert_command_inset_arg(p.verbatim_item());
if (contains(label, '\\') || contains(key, '\\')) {
// LyX can't handle LaTeX commands in labels or keys
}
}
- else if (is_macro(p))
- parse_macro(p, os, context);
+ else if (is_macro(p)) {
+ // catch the case of \def\inputGnumericTable
+ bool macro = true;
+ if (t.cs() == "def") {
+ Token second = p.next_token();
+ if (second.cs() == "inputGnumericTable") {
+ p.pushPosition();
+ p.get_token();
+ skip_braces(p);
+ Token third = p.get_token();
+ p.popPosition();
+ if (third.cs() == "input") {
+ p.get_token();
+ skip_braces(p);
+ p.get_token();
+ string name = normalize_filename(p.verbatim_item());
+ string const path = getMasterFilePath();
+ // We want to preserve relative / absolute filenames,
+ // therefore path is only used for testing
+ // The file extension is in every case ".tex".
+ // So we need to remove this extension and check for
+ // the original one.
+ name = removeExtension(name);
+ if (!makeAbsPath(name, path).exists()) {
+ char const * const Gnumeric_formats[] = {"gnumeric",
+ "ods", "xls", 0};
+ string const Gnumeric_name =
+ find_file(name, path, Gnumeric_formats);
+ if (!Gnumeric_name.empty())
+ name = Gnumeric_name;
+ }
+ if (makeAbsPath(name, path).exists())
+ fix_relative_filename(name);
+ else
+ cerr << "Warning: Could not find file '"
+ << name << "'." << endl;
+ context.check_layout(os);
+ begin_inset(os, "External\n\ttemplate ");
+ os << "GnumericSpreadsheet\n\tfilename "
+ << name << "\n";
+ end_inset(os);
+ context.check_layout(os);
+ macro = false;
+ // register the packages that are automatically reloaded
+ // by the Gnumeric template
+ registerExternalTemplatePackages("GnumericSpreadsheet");
+ }
+ }
+ }
+ if (macro)
+ parse_macro(p, os, context);
+ }
else if (t.cs() == "noindent") {
p.skip_spaces();
eat_whitespace(p, os, context, true);
}
+ // Must catch empty dates before findLayout is called below
+ else if (t.cs() == "date") {
+ eat_whitespace(p, os, context, false);
+ p.pushPosition();
+ string const date = p.verbatim_item();
+ p.popPosition();
+ if (date.empty()) {
+ preamble.suppressDate(true);
+ p.verbatim_item();
+ } else {
+ preamble.suppressDate(false);
+ if (context.new_layout_allowed &&
+ (newlayout = findLayout(context.textclass,
+ t.cs(), true))) {
+ // write the layout
+ output_command_layout(os, p, outer,
+ context, newlayout);
+ parse_text_snippet(p, os, FLAG_ITEM, outer, context);
+ if (!preamble.titleLayoutFound())
+ preamble.titleLayoutFound(newlayout->intitle);
+ set<string> const & req = newlayout->requires();
+ set<string>::const_iterator it = req.begin();
+ set<string>::const_iterator en = req.end();
+ for (; it != en; ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
+ } else
+ handle_ert(os,
+ "\\date{" + p.verbatim_item() + '}',
+ context);
+ }
+ }
+
// Starred section headings
// Must attempt to parse "Section*" before "Section".
else if ((p.next_token().asInput() == "*") &&
p.get_token();
output_command_layout(os, p, outer, context, newlayout);
p.skip_spaces();
+ if (!preamble.titleLayoutFound())
+ preamble.titleLayoutFound(newlayout->intitle);
+ set<string> const & req = newlayout->requires();
+ for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
}
// Section headings and the like
// write the layout
output_command_layout(os, p, outer, context, newlayout);
p.skip_spaces();
+ if (!preamble.titleLayoutFound())
+ preamble.titleLayoutFound(newlayout->intitle);
+ set<string> const & req = newlayout->requires();
+ for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
}
else if (t.cs() == "caption") {
p.skip_spaces();
context.check_layout(os);
p.skip_spaces();
- begin_inset(os, "Caption\n\n");
+ begin_inset(os, "Caption\n");
Context newcontext(true, context.textclass);
newcontext.font = context.font;
newcontext.check_layout(os);
newcontext.check_end_layout(os);
}
+ else if (t.cs() == "subfloat") {
+ // the syntax is \subfloat[caption]{content}
+ // if it is a table of figure depends on the surrounding float
+ bool has_caption = false;
+ p.skip_spaces();
+ // do nothing if there is no outer float
+ if (!float_type.empty()) {
+ context.check_layout(os);
+ p.skip_spaces();
+ begin_inset(os, "Float " + float_type + "\n");
+ os << "wide false"
+ << "\nsideways false"
+ << "\nstatus collapsed\n\n";
+ // test for caption
+ string caption;
+ if (p.next_token().cat() != catEscape &&
+ p.next_token().character() == '[') {
+ p.get_token(); // eat '['
+ caption = parse_text_snippet(p, FLAG_BRACK_LAST, outer, context);
+ has_caption = true;
+ }
+ // the content
+ parse_text_in_inset(p, os, FLAG_ITEM, outer, context);
+ // the caption comes always as the last
+ if (has_caption) {
+ // we must make sure that the caption gets a \begin_layout
+ os << "\n\\begin_layout Plain Layout";
+ p.skip_spaces();
+ begin_inset(os, "Caption\n");
+ Context newcontext(true, context.textclass);
+ newcontext.font = context.font;
+ newcontext.check_layout(os);
+ os << caption << "\n";
+ newcontext.check_end_layout(os);
+ // We don't need really a new paragraph, but
+ // we must make sure that the next item gets a \begin_layout.
+ //newcontext.new_paragraph(os);
+ end_inset(os);
+ p.skip_spaces();
+ }
+ // We don't need really a new paragraph, but
+ // we must make sure that the next item gets a \begin_layout.
+ if (has_caption)
+ context.new_paragraph(os);
+ end_inset(os);
+ p.skip_spaces();
+ context.check_end_layout(os);
+ // close the layout we opened
+ if (has_caption)
+ os << "\n\\end_layout\n";
+ } else {
+ // if the float type is not supported or there is no surrounding float
+ // output it as ERT
+ if (p.hasOpt()) {
+ string opt_arg = convert_command_inset_arg(p.getArg('[', ']'));
+ handle_ert(os, t.asInput() + '[' + opt_arg +
+ "]{" + p.verbatim_item() + '}', context);
+ } else
+ handle_ert(os, t.asInput() + "{" + p.verbatim_item() + '}', context);
+ }
+ }
+
else if (t.cs() == "includegraphics") {
bool const clip = p.next_token().asInput() == "*";
if (clip)
// Warn about invalid options.
// Check whether some option was given twice.
end_inset(os);
+ preamble.registerAutomaticallyLoadedPackage("graphicx");
}
else if (t.cs() == "footnote" ||
end_inset(os);
}
+ else if (t.cs() == "lstinline") {
+ p.skip_spaces();
+ parse_listings(p, os, context, true);
+ }
+
else if (t.cs() == "ensuremath") {
p.skip_spaces();
context.check_layout(os);
}
else if (t.cs() == "makeindex" || t.cs() == "maketitle") {
- // FIXME: Somehow prevent title layouts if
- // "maketitle" was not found
- // swallow this
- skip_spaces_braces(p);
+ if (preamble.titleLayoutFound()) {
+ // swallow this
+ skip_spaces_braces(p);
+ } else
+ handle_ert(os, t.asInput(), context);
}
- else if (t.cs() == "tableofcontents") {
+ else if (t.cs() == "tableofcontents" || t.cs() == "lstlistoflistings") {
context.check_layout(os);
- begin_command_inset(os, "toc", "tableofcontents");
+ begin_command_inset(os, "toc", t.cs());
end_inset(os);
skip_spaces_braces(p);
+ if (t.cs() == "lstlistoflistings")
+ preamble.registerAutomaticallyLoadedPackage("listings");
}
else if (t.cs() == "listoffigures") {
handle_ert(os, "\\listof{" + name + "}", context);
}
- else if (t.cs() == "textrm")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\family",
- context.font.family, "roman");
-
- else if (t.cs() == "textsf")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\family",
- context.font.family, "sans");
-
- else if (t.cs() == "texttt")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\family",
- context.font.family, "typewriter");
-
- else if (t.cs() == "textmd")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\series",
- context.font.series, "medium");
-
- else if (t.cs() == "textbf")
+ else if ((where = is_known(t.cs(), known_text_font_families)))
parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\series",
- context.font.series, "bold");
+ context, "\\family", context.font.family,
+ known_coded_font_families[where - known_text_font_families]);
- else if (t.cs() == "textup")
+ else if ((where = is_known(t.cs(), known_text_font_series)))
parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\shape",
- context.font.shape, "up");
+ context, "\\series", context.font.series,
+ known_coded_font_series[where - known_text_font_series]);
- else if (t.cs() == "textit")
+ else if ((where = is_known(t.cs(), known_text_font_shapes)))
parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\shape",
- context.font.shape, "italic");
-
- else if (t.cs() == "textsl")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\shape",
- context.font.shape, "slanted");
-
- else if (t.cs() == "textsc")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\shape",
- context.font.shape, "smallcaps");
+ context, "\\shape", context.font.shape,
+ known_coded_font_shapes[where - known_text_font_shapes]);
else if (t.cs() == "textnormal" || t.cs() == "normalfont") {
context.check_layout(os);
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
context.check_layout(os);
os << "\n\\color inherit\n";
+ preamble.registerAutomaticallyLoadedPackage("color");
} else
// for custom defined colors
handle_ert(os, t.asInput() + "{" + color + "}", context);
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
context.check_layout(os);
os << "\n\\bar default\n";
+ preamble.registerAutomaticallyLoadedPackage("ulem");
}
else if (t.cs() == "sout") {
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
context.check_layout(os);
os << "\n\\strikeout default\n";
+ preamble.registerAutomaticallyLoadedPackage("ulem");
}
else if (t.cs() == "uuline" || t.cs() == "uwave" ||
parse_text_snippet(p, os, FLAG_ITEM, outer, context);
context.check_layout(os);
os << "\n\\" << t.cs() << " default\n";
+ if (t.cs() == "uuline" || t.cs() == "uwave")
+ preamble.registerAutomaticallyLoadedPackage("ulem");
+ }
+
+ else if (t.cs() == "lyxadded" || t.cs() == "lyxdeleted") {
+ context.check_layout(os);
+ string name = p.getArg('{', '}');
+ string localtime = p.getArg('{', '}');
+ preamble.registerAuthor(name);
+ Author const & author = preamble.getAuthor(name);
+ // from_ctime() will fail if LyX decides to output the
+ // time in the text language. It might also use a wrong
+ // time zone (if the original LyX document was exported
+ // with a different time zone).
+ time_t ptime = from_ctime(localtime);
+ if (ptime == static_cast<time_t>(-1)) {
+ cerr << "Warning: Could not parse time `" << localtime
+ << "´ for change tracking, using current time instead.\n";
+ ptime = current_time();
+ }
+ if (t.cs() == "lyxadded")
+ os << "\n\\change_inserted ";
+ else
+ os << "\n\\change_deleted ";
+ os << author.bufferId() << ' ' << ptime << '\n';
+ parse_text_snippet(p, os, FLAG_ITEM, outer, context);
+ bool dvipost = LaTeXPackages::isAvailable("dvipost");
+ bool xcolorulem = LaTeXPackages::isAvailable("ulem") &&
+ LaTeXPackages::isAvailable("xcolor");
+ // No need to test for luatex, since luatex comes in
+ // two flavours (dvi and pdf), like latex, and those
+ // are detected by pdflatex.
+ if (pdflatex || xetex) {
+ if (xcolorulem) {
+ preamble.registerAutomaticallyLoadedPackage("ulem");
+ preamble.registerAutomaticallyLoadedPackage("xcolor");
+ preamble.registerAutomaticallyLoadedPackage("pdfcolmk");
+ }
+ } else {
+ if (dvipost) {
+ preamble.registerAutomaticallyLoadedPackage("dvipost");
+ } else if (xcolorulem) {
+ preamble.registerAutomaticallyLoadedPackage("ulem");
+ preamble.registerAutomaticallyLoadedPackage("xcolor");
+ }
+ }
}
- // FIXME, the inset layout should be plain, not standard, see bug #7846
else if (t.cs() == "phantom" || t.cs() == "hphantom" ||
t.cs() == "vphantom") {
context.check_layout(os);
if (t.cs() == "phantom")
begin_inset(os, "Phantom Phantom\n");
if (t.cs() == "hphantom")
- begin_inset(os, "Phantom Hhantom\n");
+ begin_inset(os, "Phantom HPhantom\n");
if (t.cs() == "vphantom")
- begin_inset(os, "Phantom Vhantom\n");
+ begin_inset(os, "Phantom VPhantom\n");
os << "status open\n";
- parse_text_in_inset(p, os, FLAG_ITEM, outer, context);
+ parse_text_in_inset(p, os, FLAG_ITEM, outer, context,
+ "Phantom");
end_inset(os);
}
end_inset(os);
skip_spaces_braces(p);
}
-
+
else if (t.cs() == "lyxline") {
// swallow size argument (it is not used anyway)
p.getArg('{', '}');
// about the empty paragraph.
context.new_paragraph(os);
}
- if (h_paragraph_separation == "indent") {
+ if (preamble.indentParagraphs()) {
// we need to unindent, lest the line be too long
context.add_par_extra_stuff("\\noindent\n");
}
}
else if (t.cs() == "rule") {
- string offset = "";
- string width;
- string thickness;
- if (p.hasOpt())
- offset = p.getArg('[', ']');
- width = p.getArg('{', '}');
- thickness = p.getArg('{', '}');
-
+ string const offset = (p.hasOpt() ? p.getArg('[', ']') : string());
+ string const width = p.getArg('{', '}');
+ string const thickness = p.getArg('{', '}');
context.check_layout(os);
begin_command_inset(os, "line", "rule");
if (!offset.empty())
is_known(p.next_token().cs(), known_phrases))) {
// LyX sometimes puts a \protect in front, so we have to ignore it
// FIXME: This needs to be changed when bug 4752 is fixed.
- char const * const * where = is_known(
+ where = is_known(
t.cs() == "protect" ? p.get_token().cs() : t.cs(),
known_phrases);
context.check_layout(os);
skip_spaces_braces(p);
}
- else if (is_known(t.cs(), known_ref_commands)) {
+ else if ((where = is_known(t.cs(), known_ref_commands))) {
string const opt = p.getOpt();
if (opt.empty()) {
context.check_layout(os);
- char const * const * where = is_known(t.cs(),
- known_ref_commands);
begin_command_inset(os, "ref",
known_coded_ref_commands[where - known_ref_commands]);
os << "reference \""
<< convert_command_inset_arg(p.verbatim_item())
<< "\"\n";
end_inset(os);
+ if (t.cs() == "vref" || t.cs() == "vpageref")
+ preamble.registerAutomaticallyLoadedPackage("varioref");
+
} else {
// LyX does not support optional arguments of ref commands
handle_ert(os, t.asInput() + '[' + opt + "]{" +
p.get_token();
}
char argumentOrder = '\0';
- vector<string> const & options = used_packages["jurabib"];
+ vector<string> const options =
+ preamble.getPackageOptions("jurabib");
if (find(options.begin(), options.end(),
"natbiborder") != options.end())
argumentOrder = 'n';
btprint = key;
}
- else if (t.cs() == "index") {
+ else if (t.cs() == "index" ||
+ (t.cs() == "sindex" && preamble.use_indices() == "true")) {
context.check_layout(os);
- begin_inset(os, "Index idx\n");
- os << "status collapsed\n";
+ string const arg = (t.cs() == "sindex" && p.hasOpt()) ?
+ p.getArg('[', ']') : "";
+ string const kind = arg.empty() ? "idx" : arg;
+ begin_inset(os, "Index ");
+ os << kind << "\nstatus collapsed\n";
parse_text_in_inset(p, os, FLAG_ITEM, false, context, "Index");
end_inset(os);
+ if (kind != "idx")
+ preamble.registerAutomaticallyLoadedPackage("splitidx");
}
else if (t.cs() == "nomenclature") {
<< convert_command_inset_arg(p.verbatim_item())
<< "\"\n";
end_inset(os);
+ preamble.registerAutomaticallyLoadedPackage("nomencl");
}
-
+
else if (t.cs() == "label") {
context.check_layout(os);
begin_command_inset(os, "label", "label");
os << "type \"idx\"\n";
end_inset(os);
skip_spaces_braces(p);
+ preamble.registerAutomaticallyLoadedPackage("makeidx");
+ if (preamble.use_indices() == "true")
+ preamble.registerAutomaticallyLoadedPackage("splitidx");
}
else if (t.cs() == "printnomenclature") {
os << "width \"" << width << '\"';
end_inset(os);
skip_spaces_braces(p);
+ preamble.registerAutomaticallyLoadedPackage("nomencl");
}
else if ((t.cs() == "textsuperscript" || t.cs() == "textsubscript")) {
os << t.cs().substr(4) << '\n';
parse_text_in_inset(p, os, FLAG_ITEM, false, context);
end_inset(os);
+ if (t.cs() == "textsubscript")
+ preamble.registerAutomaticallyLoadedPackage("subscript");
}
- else if (is_known(t.cs(), known_quotes)) {
- char const * const * where = is_known(t.cs(), known_quotes);
+ else if ((where = is_known(t.cs(), known_quotes))) {
context.check_layout(os);
begin_inset(os, "Quotes ");
os << known_coded_quotes[where - known_quotes];
skip_braces(p);
}
- else if (is_known(t.cs(), known_sizes) &&
+ else if ((where = is_known(t.cs(), known_sizes)) &&
context.new_layout_allowed) {
- char const * const * where = is_known(t.cs(), known_sizes);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.size = known_coded_sizes[where - known_sizes];
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_font_families) &&
+ else if ((where = is_known(t.cs(), known_font_families)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_font_families);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.family =
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_font_series) &&
+ else if ((where = is_known(t.cs(), known_font_series)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_font_series);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.series =
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_font_shapes) &&
+ else if ((where = is_known(t.cs(), known_font_shapes)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_font_shapes);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.shape =
output_font_change(os, oldFont, context.font);
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_old_font_families) &&
+ else if ((where = is_known(t.cs(), known_old_font_families)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_old_font_families);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.init();
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_old_font_series) &&
+ else if ((where = is_known(t.cs(), known_old_font_series)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_old_font_series);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.init();
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_old_font_shapes) &&
+ else if ((where = is_known(t.cs(), known_old_font_shapes)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_old_font_shapes);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.init();
else if (t.cs() == "selectlanguage") {
context.check_layout(os);
// save the language for the case that a
- // \foreignlanguage is used
-
+ // \foreignlanguage is used
context.font.language = babel2lyx(p.verbatim_item());
os << "\n\\lang " << context.font.language << "\n";
}
context, "\\lang",
context.font.language, lang);
}
+
+ else if (is_known(t.cs().substr(4, string::npos), polyglossia_languages)) {
+ // scheme is \textLANGUAGE{text} where LANGUAGE is in polyglossia_languages[]
+ string lang;
+ // We have to output the whole command if it has an option
+ // because LyX doesn't support this yet, see bug #8214,
+ // only if there is a single option specifying a variant, we can handle it.
+ if (p.hasOpt()) {
+ string langopts = p.getOpt();
+ // check if the option contains a variant, if yes, extract it
+ string::size_type pos_var = langopts.find("variant");
+ string::size_type i = langopts.find(',');
+ if (pos_var != string::npos){
+ string variant;
+ if (i == string::npos) {
+ variant = langopts.substr(pos_var + 8, langopts.length() - pos_var - 9);
+ lang = polyglossia2lyx(variant);
+ parse_text_attributes(p, os, FLAG_ITEM, outer,
+ context, "\\lang",
+ context.font.language, lang);
+ }
+ else
+ handle_ert(os, t.asInput() + langopts, context);
+ } else
+ handle_ert(os, t.asInput() + langopts, context);
+ } else {
+ lang = polyglossia2lyx(t.cs().substr(4, string::npos));
+ parse_text_attributes(p, os, FLAG_ITEM, outer,
+ context, "\\lang",
+ context.font.language, lang);
+ }
+ }
else if (t.cs() == "inputencoding") {
// nothing to write here
p.setEncoding(enc);
}
- else if (t.cs() == "ldots") {
- context.check_layout(os);
- os << "\\SpecialChar \\ldots{}\n";
- skip_spaces_braces(p);
- }
-
- else if (t.cs() == "lyxarrow") {
- context.check_layout(os);
- os << "\\SpecialChar \\menuseparator\n";
- skip_spaces_braces(p);
- }
-
- else if (t.cs() == "textcompwordmark") {
+ else if ((where = is_known(t.cs(), known_special_chars))) {
context.check_layout(os);
- os << "\\SpecialChar \\textcompwordmark{}\n";
- skip_spaces_braces(p);
- }
-
- else if (t.cs() == "slash") {
- context.check_layout(os);
- os << "\\SpecialChar \\slash{}\n";
+ os << "\\SpecialChar \\"
+ << known_coded_special_chars[where - known_special_chars]
+ << '\n';
skip_spaces_braces(p);
}
handle_ert(os, t.asInput(), context);
// accents (see Table 6 in Comprehensive LaTeX Symbol List)
- else if (t.cs().size() == 1
+ else if (t.cs().size() == 1
&& contains("\"'.=^`bcdHkrtuv~", t.cs())) {
context.check_layout(os);
// try to see whether the string is in unicodesymbols
+ bool termination;
docstring rem;
- string command = t.asInput() + "{"
+ string command = t.asInput() + "{"
+ trimSpaceAndEol(p.verbatim_item())
+ "}";
- docstring s = encodings.fromLaTeXCommand(from_utf8(command), rem);
+ set<string> req;
+ docstring s = encodings.fromLaTeXCommand(from_utf8(command),
+ Encodings::TEXT_CMD | Encodings::MATH_CMD,
+ termination, rem, &req);
if (!s.empty()) {
if (!rem.empty())
- cerr << "When parsing " << command
+ cerr << "When parsing " << command
<< ", result is " << to_utf8(s)
<< "+" << to_utf8(rem) << endl;
os << to_utf8(s);
+ for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
} else
// we did not find a non-ert version
handle_ert(os, command, context);
begin_inset(os, "External\n");
os << "\ttemplate XFig\n"
<< "\tfilename " << outname << '\n';
+ registerExternalTemplatePackages("XFig");
} else {
begin_command_inset(os, "include", name);
os << "preview false\n"
"filename \"" << outname << "\"\n";
+ if (t.cs() == "verbatiminput")
+ preamble.registerAutomaticallyLoadedPackage("verbatim");
}
end_inset(os);
}
end_inset(os);
}
- else if (t.cs() == "parbox")
- parse_box(p, os, 0, FLAG_ITEM, outer, context, "", "", t.cs());
+ else if (t.cs() == "parbox") {
+ // Test whether this is an outer box of a shaded box
+ p.pushPosition();
+ // swallow arguments
+ while (p.hasOpt()) {
+ p.getArg('[', ']');
+ p.skip_spaces(true);
+ }
+ p.getArg('{', '}');
+ p.skip_spaces(true);
+ // eat the '{'
+ if (p.next_token().cat() == catBegin)
+ p.get_token();
+ p.skip_spaces(true);
+ Token to = p.get_token();
+ bool shaded = false;
+ if (to.asInput() == "\\begin") {
+ p.skip_spaces(true);
+ if (p.getArg('{', '}') == "shaded")
+ shaded = true;
+ }
+ p.popPosition();
+ if (shaded) {
+ parse_outer_box(p, os, FLAG_ITEM, outer,
+ context, "parbox", "shaded");
+ } else
+ parse_box(p, os, 0, FLAG_ITEM, outer, context,
+ "", "", t.cs());
+ }
else if (t.cs() == "ovalbox" || t.cs() == "Ovalbox" ||
t.cs() == "shadowbox" || t.cs() == "doublebox")
parse_outer_box(p, os, FLAG_ITEM, outer, context, t.cs(), "");
else if (t.cs() == "framebox") {
- string special = p.getFullOpt();
- special += p.getOpt();
- parse_outer_box(p, os, FLAG_ITEM, outer, context, t.cs(), special);
+ if (p.next_token().character() == '(') {
+ //the syntax is: \framebox(x,y)[position]{content}
+ string arg = t.asInput();
+ arg += p.getFullParentheseArg();
+ arg += p.getFullOpt();
+ eat_whitespace(p, os, context, false);
+ handle_ert(os, arg + '{', context);
+ eat_whitespace(p, os, context, false);
+ parse_text(p, os, FLAG_ITEM, outer, context);
+ handle_ert(os, "}", context);
+ } else {
+ string special = p.getFullOpt();
+ special += p.getOpt();
+ parse_outer_box(p, os, FLAG_ITEM, outer,
+ context, t.cs(), special);
+ }
}
//\makebox() is part of the picture environment and different from \makebox{}
- //\makebox{} will be parsed by parse_box when bug 2956 is fixed
+ //\makebox{} will be parsed by parse_box
else if (t.cs() == "makebox") {
- string arg = t.asInput();
- if (p.next_token().character() == '(')
+ if (p.next_token().character() == '(') {
//the syntax is: \makebox(x,y)[position]{content}
+ string arg = t.asInput();
arg += p.getFullParentheseArg();
- else
- //the syntax is: \makebox[width][position]{content}
arg += p.getFullOpt();
- handle_ert(os, arg + p.getFullOpt(), context);
+ eat_whitespace(p, os, context, false);
+ handle_ert(os, arg + '{', context);
+ eat_whitespace(p, os, context, false);
+ parse_text(p, os, FLAG_ITEM, outer, context);
+ handle_ert(os, "}", context);
+ } else
+ //the syntax is: \makebox[width][position]{content}
+ parse_box(p, os, 0, FLAG_ITEM, outer, context,
+ "", "", t.cs());
}
else if (t.cs() == "smallskip" ||
skip_spaces_braces(p);
}
- else if (is_known(t.cs(), known_spaces)) {
- char const * const * where = is_known(t.cs(), known_spaces);
+ else if ((where = is_known(t.cs(), known_spaces))) {
context.check_layout(os);
begin_inset(os, "space ");
os << '\\' << known_coded_spaces[where - known_spaces]
t.cs() == "DeclareRobustCommandx" ||
t.cs() == "newcommand" ||
t.cs() == "newcommandx" ||
- t.cs() == "providecommand" ||
+ t.cs() == "providecommand" ||
t.cs() == "providecommandx" ||
t.cs() == "renewcommand" ||
t.cs() == "renewcommandx") {
end_inset(os);
}
+ else if (t.cs() == "includepdf") {
+ p.skip_spaces();
+ string const arg = p.getArg('[', ']');
+ map<string, string> opts;
+ vector<string> keys;
+ split_map(arg, opts, keys);
+ string name = normalize_filename(p.verbatim_item());
+ string const path = getMasterFilePath();
+ // We want to preserve relative / absolute filenames,
+ // therefore path is only used for testing
+ if (!makeAbsPath(name, path).exists()) {
+ // The file extension is probably missing.
+ // Now try to find it out.
+ char const * const pdfpages_format[] = {"pdf", 0};
+ string const pdftex_name =
+ find_file(name, path, pdfpages_format);
+ if (!pdftex_name.empty()) {
+ name = pdftex_name;
+ pdflatex = true;
+ }
+ }
+ if (makeAbsPath(name, path).exists())
+ fix_relative_filename(name);
+ else
+ cerr << "Warning: Could not find file '"
+ << name << "'." << endl;
+ // write output
+ context.check_layout(os);
+ begin_inset(os, "External\n\ttemplate ");
+ os << "PDFPages\n\tfilename "
+ << name << "\n";
+ // parse the options
+ if (opts.find("pages") != opts.end())
+ os << "\textra LaTeX \"pages="
+ << opts["pages"] << "\"\n";
+ if (opts.find("angle") != opts.end())
+ os << "\trotateAngle "
+ << opts["angle"] << '\n';
+ if (opts.find("origin") != opts.end()) {
+ ostringstream ss;
+ string const opt = opts["origin"];
+ if (opt == "tl") ss << "topleft";
+ if (opt == "bl") ss << "bottomleft";
+ if (opt == "Bl") ss << "baselineleft";
+ if (opt == "c") ss << "center";
+ if (opt == "tc") ss << "topcenter";
+ if (opt == "bc") ss << "bottomcenter";
+ if (opt == "Bc") ss << "baselinecenter";
+ if (opt == "tr") ss << "topright";
+ if (opt == "br") ss << "bottomright";
+ if (opt == "Br") ss << "baselineright";
+ if (!ss.str().empty())
+ os << "\trotateOrigin " << ss.str() << '\n';
+ else
+ cerr << "Warning: Ignoring unknown includegraphics origin argument '" << opt << "'\n";
+ }
+ if (opts.find("width") != opts.end())
+ os << "\twidth "
+ << translate_len(opts["width"]) << '\n';
+ if (opts.find("height") != opts.end())
+ os << "\theight "
+ << translate_len(opts["height"]) << '\n';
+ if (opts.find("keepaspectratio") != opts.end())
+ os << "\tkeepAspectRatio\n";
+ end_inset(os);
+ context.check_layout(os);
+ registerExternalTemplatePackages("PDFPages");
+ }
+
+ else if (t.cs() == "loadgame") {
+ p.skip_spaces();
+ string name = normalize_filename(p.verbatim_item());
+ string const path = getMasterFilePath();
+ // We want to preserve relative / absolute filenames,
+ // therefore path is only used for testing
+ if (!makeAbsPath(name, path).exists()) {
+ // The file extension is probably missing.
+ // Now try to find it out.
+ char const * const lyxskak_format[] = {"fen", 0};
+ string const lyxskak_name =
+ find_file(name, path, lyxskak_format);
+ if (!lyxskak_name.empty())
+ name = lyxskak_name;
+ }
+ if (makeAbsPath(name, path).exists())
+ fix_relative_filename(name);
+ else
+ cerr << "Warning: Could not find file '"
+ << name << "'." << endl;
+ context.check_layout(os);
+ begin_inset(os, "External\n\ttemplate ");
+ os << "ChessDiagram\n\tfilename "
+ << name << "\n";
+ end_inset(os);
+ context.check_layout(os);
+ // after a \loadgame follows a \showboard
+ if (p.get_token().asInput() == "showboard")
+ p.get_token();
+ registerExternalTemplatePackages("ChessDiagram");
+ }
+
else {
// try to see whether the string is in unicodesymbols
// Only use text mode commands, since we are in text mode here,
// and math commands may be invalid (bug 6797)
+ bool termination;
docstring rem;
+ set<string> req;
docstring s = encodings.fromLaTeXCommand(from_utf8(t.asInput()),
- rem, Encodings::TEXT_CMD);
+ Encodings::TEXT_CMD, termination, rem, &req);
if (!s.empty()) {
if (!rem.empty())
- cerr << "When parsing " << t.cs()
+ cerr << "When parsing " << t.cs()
<< ", result is " << to_utf8(s)
<< "+" << to_utf8(rem) << endl;
context.check_layout(os);
os << to_utf8(s);
- skip_spaces_braces(p);
+ if (termination)
+ skip_spaces_braces(p);
+ for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
}
//cerr << "#: " << t << " mode: " << mode << endl;
// heuristic: read up to next non-nested space