/*!
* natbib commands.
- * The starred forms are also known.
+ * The starred forms are also known except for "citefullauthor",
+ * "citeyear" and "citeyearpar".
*/
char const * const known_natbib_commands[] = { "cite", "citet", "citep",
"citealt", "citealp", "citeauthor", "citeyear", "citeyearpar",
// "fullcite",
// "footcite", "footcitet", "footcitep", "footcitealt", "footcitealp",
// "footciteauthor", "footciteyear", "footciteyearpar",
-"citefield", "citetitle", "cite*", 0 };
+"citefield", "citetitle", 0 };
/// LaTeX names for quotes
char const * const known_quotes[] = { "dq", "guillemotleft", "flqq", "og",
"negthinspace{}", 0};
-/// splits "x=z, y=b" into a map
-map<string, string> split_map(string const & s)
+/// splits "x=z, y=b" into a map and an ordered keyword vector
+void split_map(string const & s, map<string, string> & res, vector<string> & keys)
{
- map<string, string> res;
vector<string> v;
split(s, v);
+ res.clear();
+ keys.resize(v.size());
for (size_t i = 0; i < v.size(); ++i) {
size_t const pos = v[i].find('=');
- string const index = v[i].substr(0, pos);
- string const value = v[i].substr(pos + 1, string::npos);
- res[trim(index)] = trim(value);
+ string const index = trim(v[i].substr(0, pos));
+ string const value = trim(v[i].substr(pos + 1, string::npos));
+ res[index] = value;
+ keys[i] = index;
}
- return res;
}
os << "\n\\begin_inset " << name;
}
-/*// use this void when format 288 is supported
+
void begin_command_inset(ostream & os, string const & name,
- string const & latexname)
+ string const & latexname)
{
- os << "\n\\begin_inset CommandInset " << name;
- os << "\nLatexCommand " << latexname << "\n";
-}*/
+ begin_inset(os, "CommandInset ");
+ os << name << "\nLatexCommand " << latexname << '\n';
+}
void end_inset(ostream & os)
}
-void skip_braces(Parser & p)
+bool skip_braces(Parser & p)
{
if (p.next_token().cat() != catBegin)
- return;
+ return false;
p.get_token();
if (p.next_token().cat() == catEnd) {
p.get_token();
- return;
+ return true;
}
p.putback();
+ return false;
}
void eat_whitespace(Parser &, ostream &, Context &, bool);
+/*!
+ * Skips whitespace and braces.
+ * This should be called after a command has been parsed that is not put into
+ * ERT, and where LyX adds "{}" if needed.
+ */
+void skip_spaces_braces(Parser & p)
+{
+ /* The following four examples produce the same typeset output and
+ should be handled by this function:
+ - abc \j{} xyz
+ - abc \j {} xyz
+ - abc \j
+ {} xyz
+ - abc \j %comment
+ {} xyz
+ */
+ // Unfortunately we need to skip comments, too.
+ // We can't use eat_whitespace since writing them after the {}
+ // results in different output in some cases.
+ bool const skipped_spaces = p.skip_spaces(true);
+ bool const skipped_braces = skip_braces(p);
+ if (skipped_spaces && !skipped_braces)
+ // put back the space (it is better handled by check_space)
+ p.unskip_spaces(true);
+}
+
+
void output_command_layout(ostream & os, Parser & p, bool outer,
Context & parent_context,
Layout const * newlayout)
}
context.check_deeper(os);
context.check_layout(os);
- if (context.layout->optionalargs > 0) {
+ unsigned int optargs = 0;
+ while (optargs < context.layout->optargs) {
eat_whitespace(p, os, context, false);
- if (p.next_token().character() == '[') {
- p.get_token(); // eat '['
- begin_inset(os, "OptArg\n");
- os << "status collapsed\n\n";
- parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
- end_inset(os);
- eat_whitespace(p, os, context, false);
- }
+ if (p.next_token().character() != '[')
+ break;
+ p.get_token(); // eat '['
+ begin_inset(os, "OptArg\n");
+ os << "status collapsed\n\n";
+ parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
+ end_inset(os);
+ eat_whitespace(p, os, context, false);
+ ++optargs;
}
+#if 0
+ // This is the code needed to parse required arguments, but
+ // required arguments come into being only much later than the
+ // file format tex2lyx is presently outputting.
+ unsigned int reqargs = 0;
+ while (reqargs < context.layout->reqargs) {
+ eat_whitespace(p, os, context, false);
+ if (p.next_token().character() != '{')
+ break;
+ p.get_token(); // eat '{'
+ begin_inset(os, "OptArg\n");
+ os << "status collapsed\n\n";
+ parse_text_in_inset(p, os, FLAG_BRACE_LAST, outer, context);
+ end_inset(os);
+ eat_whitespace(p, os, context, false);
+ ++reqargs;
+ }
+#endif
parse_text(p, os, FLAG_ITEM, outer, context);
context.check_end_layout(os);
if (parent_context.deeper_paragraph) {
ert += '{' + p.verbatim_item() + '}';
break;
case optional:
- ert += p.getOpt();
+ // true because we must not eat whitespace
+ ert += p.getOpt(true);
break;
}
}
void parse_environment(Parser & p, ostream & os, bool outer,
- Context & parent_context)
+ string & last_env, Context & parent_context)
{
Layout const * newlayout;
string const name = p.getArg('{', '}');
context.need_end_deeper = true;
}
parent_context.check_end_layout(os);
+ if (last_env == name) {
+ // we need to output a separator since LyX would export
+ // the two environments as one otherwise (bug 5716)
+ docstring const sep = from_ascii("--Separator--");
+ TeX2LyXDocClass const & textclass(parent_context.textclass);
+ if (LYX_FORMAT >= 273 && textclass.hasLayout(sep)) {
+ Context newcontext(parent_context);
+ newcontext.layout = &(textclass[sep]);
+ newcontext.check_layout(os);
+ newcontext.check_end_layout(os);
+ } else {
+ parent_context.check_layout(os);
+ begin_inset(os, "Note Note\n");
+ os << "status closed\n";
+ Context newcontext(true, textclass,
+ &(textclass.defaultLayout()));
+ newcontext.check_layout(os);
+ newcontext.check_end_layout(os);
+ end_inset(os);
+ parent_context.check_end_layout(os);
+ }
+ }
switch (context.layout->latextype) {
case LATEX_LIST_ENVIRONMENT:
context.add_par_extra_stuff("\\labelwidthstring "
parse_unknown_environment(p, name, os, FLAG_END, outer,
parent_context);
+ last_env = name;
active_environments.pop_back();
}
if (FileName::isAbsolute(name))
return;
- name = to_utf8(makeRelPath(from_utf8(makeAbsPath(name, getMasterFilePath()).absFilename()),
+ name = to_utf8(makeRelPath(from_utf8(makeAbsPath(name, getMasterFilePath()).absFileName()),
from_utf8(getParentFilePath())));
}
string bibliographystyle;
bool const use_natbib = used_packages.find("natbib") != used_packages.end();
bool const use_jurabib = used_packages.find("jurabib") != used_packages.end();
+ string last_env;
while (p.good()) {
Token const & t = p.get_token();
if (t.character() == ']' && (flags & FLAG_BRACK_LAST))
return;
+ if (t.character() == '}' && (flags & FLAG_BRACE_LAST))
+ return;
+
+ // If there is anything between \end{env} and \begin{env} we
+ // don't need to output a separator.
+ if (t.cat() != catSpace && t.cat() != catNewline &&
+ t.asInput() != "\\begin")
+ last_env = "";
//
// cat codes
}
else if (t.cs() == "begin")
- parse_environment(p, os, outer, context);
+ parse_environment(p, os, outer, last_env, context);
else if (t.cs() == "end") {
if (flags & FLAG_END) {
else if (t.cs() == "bibitem") {
context.set_item();
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "bibitem", "bibitem");
os << "label \"" << p.getOptContent() << "\"\n";
os << "key \"" << p.verbatim_item() << "\"\n";
end_inset(os);
bool const clip = p.next_token().asInput() == "*";
if (clip)
p.get_token();
- map<string, string> opts = split_map(p.getArg('[', ']'));
+ string const arg = p.getArg('[', ']');
+ map<string, string> opts;
+ vector<string> keys;
+ split_map(arg, opts, keys);
if (clip)
opts["clip"] = string();
string name = normalize_filename(p.verbatim_item());
val = val*100;
os << "\tscale " << val << '\n';
}
- if (opts.find("angle") != opts.end())
+ if (opts.find("angle") != opts.end()) {
os << "\trotateAngle "
<< opts["angle"] << '\n';
+ vector<string>::const_iterator a =
+ find(keys.begin(), keys.end(), "angle");
+ vector<string>::const_iterator s =
+ find(keys.begin(), keys.end(), "width");
+ if (s == keys.end())
+ s = find(keys.begin(), keys.end(), "height");
+ if (s == keys.end())
+ s = find(keys.begin(), keys.end(), "scale");
+ if (s != keys.end() && distance(s, a) > 0)
+ os << "\tscaleBeforeRotation\n";
+ }
if (opts.find("origin") != opts.end()) {
ostringstream ss;
string const opt = opts["origin"];
else if (t.cs() == "makeindex" || t.cs() == "maketitle") {
// FIXME: Somehow prevent title layouts if
// "maketitle" was not found
- p.skip_spaces();
- skip_braces(p); // swallow this
+ // swallow this
+ skip_spaces_braces(p);
}
else if (t.cs() == "tableofcontents") {
- p.skip_spaces();
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "toc", "tableofcontents");
end_inset(os);
- skip_braces(p); // swallow this
+ skip_spaces_braces(p);
}
else if (t.cs() == "listoffigures") {
- p.skip_spaces();
context.check_layout(os);
begin_inset(os, "FloatList figure\n");
end_inset(os);
- skip_braces(p); // swallow this
+ skip_spaces_braces(p);
}
else if (t.cs() == "listoftables") {
- p.skip_spaces();
context.check_layout(os);
begin_inset(os, "FloatList table\n");
end_inset(os);
- skip_braces(p); // swallow this
+ skip_spaces_braces(p);
}
else if (t.cs() == "listof") {
else if (is_known(t.cs(), known_ref_commands)) {
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "ref", t.cs());
// LyX cannot handle newlines in a latex command
// FIXME: Move the substitution into parser::getOpt()?
os << subst(p.getOpt(), "\n", " ");
// LyX cannot handle newlines in the parameter
before = subst(before, "\n", " ");
}
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "citation", command);
os << "after " << '"' << after << '"' << "\n";
os << "before " << '"' << before << '"' << "\n";
os << "key " << '"' << p.verbatim_item() << '"' << "\n";
}
else if (use_jurabib &&
- is_known(t.cs(), known_jurabib_commands)) {
+ is_known(t.cs(), known_jurabib_commands) &&
+ (t.cs() == "cite" || p.next_token().asInput() != "*")) {
context.check_layout(os);
- string const command = t.cs();
+ string command = t.cs();
+ if (p.next_token().asInput() == "*") {
+ command += '*';
+ p.get_token();
+ }
char argumentOrder = '\0';
vector<string> const & options = used_packages["jurabib"];
if (find(options.begin(), options.end(),
before.erase(0, 1);
before.erase(before.length() - 1, 1);
}
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "citation", command);
os << "after " << '"' << after << '"' << "\n";
os << "before " << '"' << before << '"' << "\n";
os << "key " << '"' << citation << '"' << "\n";
context.check_layout(os);
// LyX cannot handle newlines in a latex command
string after = subst(p.getOptContent(), "\n", " ");
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "citation", "cite");
os << "after " << '"' << after << '"' << "\n";
os << "key " << '"' << subst(p.verbatim_item(), "\n", " ") << '"' << "\n";
end_inset(os);
else if (t.cs() == "index") {
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
- // LyX cannot handle newlines in a latex command
- os << "name " << '"' << subst(p.verbatim_item(), "\n", " ") << '"' << "\n";
+ begin_inset(os, "Index\n");
+ os << "status collapsed\n";
+ parse_text_in_inset(p, os, FLAG_ITEM, false, context);
end_inset(os);
}
else if (t.cs() == "nomenclature") {
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "nomenclature", "nomenclature");
// LyX cannot handle newlines in a latex command
string prefix = subst(p.getOptContent(), "\n", " ");
if (!prefix.empty())
else if (t.cs() == "label") {
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "label", "label");
// LyX cannot handle newlines in a latex command
os << "name " << '"' << subst(p.verbatim_item(), "\n", " ") << '"' << "\n";
end_inset(os);
else if (t.cs() == "printindex") {
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "index_print", "printindex");
end_inset(os);
- skip_braces(p);
+ skip_spaces_braces(p);
}
else if (t.cs() == "printnomenclature") {
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
+ begin_command_inset(os, "nomencl_print", "printnomenclature");
end_inset(os);
- skip_braces(p);
+ skip_spaces_braces(p);
}
else if (t.cs() == "url") {
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << t.cs() << "\n";
- // LyX cannot handle newlines in a latex command
- os << "target " << '"' << subst(p.verbatim_item(), "\n", " ") << '"' << "\n";
+ begin_inset(os, "Flex URL\n");
+ os << "status collapsed\n";
+ parse_text_in_inset(p, os, FLAG_ITEM, false, context);
+ end_inset(os);
+ }
+
+ else if (LYX_FORMAT >= 408 &&
+ (t.cs() == "textsuperscript" || t.cs() == "textsubscript")) {
+ context.check_layout(os);
+ begin_inset(os, "script ");
+ os << t.cs().substr(4) << '\n';
+ parse_text_in_inset(p, os, FLAG_ITEM, false, context);
end_inset(os);
}
|| t.cs() == "LaTeX") {
context.check_layout(os);
os << t.cs();
- skip_braces(p); // eat {}
+ skip_spaces_braces(p);
}
else if (t.cs() == "LaTeXe") {
context.check_layout(os);
os << "LaTeX2e";
- skip_braces(p); // eat {}
+ skip_spaces_braces(p);
}
else if (t.cs() == "ldots") {
context.check_layout(os);
- skip_braces(p);
os << "\\SpecialChar \\ldots{}\n";
+ skip_spaces_braces(p);
}
else if (t.cs() == "lyxarrow") {
context.check_layout(os);
os << "\\SpecialChar \\menuseparator\n";
- skip_braces(p);
+ skip_spaces_braces(p);
}
else if (t.cs() == "textcompwordmark") {
context.check_layout(os);
os << "\\SpecialChar \\textcompwordmark{}\n";
- skip_braces(p);
+ skip_spaces_braces(p);
+ }
+
+ else if (LYX_FORMAT >= 307 && t.cs() == "slash") {
+ context.check_layout(os);
+ os << "\\SpecialChar \\slash{}\n";
+ skip_spaces_braces(p);
+ }
+
+ else if (LYX_FORMAT >= 307 && t.cs() == "nobreakdash") {
+ context.check_layout(os);
+ os << "\\SpecialChar \\nobreakdash\n";
}
else if (t.cs() == "textquotedbl") {
else if (t.cs() == "textasciitilde") {
context.check_layout(os);
os << '~';
- skip_braces(p);
+ skip_spaces_braces(p);
}
else if (t.cs() == "textasciicircum") {
context.check_layout(os);
os << '^';
- skip_braces(p);
+ skip_spaces_braces(p);
}
else if (t.cs() == "textbackslash") {
context.check_layout(os);
os << "\n\\backslash\n";
- skip_braces(p);
+ skip_spaces_braces(p);
}
else if (t.cs() == "_" || t.cs() == "&" || t.cs() == "#"
else if (t.cs() == "newline") {
context.check_layout(os);
os << "\n\\" << t.cs() << "\n";
- skip_braces(p); // eat {}
+ skip_spaces_braces(p);
}
else if (t.cs() == "input" || t.cs() == "include"
|| t.cs() == "verbatiminput") {
- string name = '\\' + t.cs();
+ string name = t.cs();
if (t.cs() == "verbatiminput"
&& p.next_token().asInput() == "*")
name += p.get_token().asInput();
context.check_layout(os);
- begin_inset(os, "Include ");
string filename(normalize_filename(p.getArg('{', '}')));
string const path = getMasterFilePath();
// We want to preserve relative / absolute filenames,
if (!tex_name.empty())
filename = tex_name;
}
+ bool xfig = false;
+ string outname;
if (makeAbsPath(filename, path).exists()) {
string const abstexname =
- makeAbsPath(filename, path).absFilename();
+ makeAbsPath(filename, path).absFileName();
string const abslyxname =
changeExtension(abstexname, ".lyx");
+ string const absfigname =
+ changeExtension(abstexname, ".fig");
fix_relative_filename(filename);
string const lyxname =
changeExtension(filename, ".lyx");
- if (t.cs() != "verbatiminput" &&
+ if (t.cs() == "input" && FileName(absfigname).exists()) {
+ FileName const absepsname(
+ changeExtension(abstexname, ".eps"));
+ FileName const abspdfname(
+ changeExtension(abstexname, ".pdf"));
+ string const ext = getExtension(abstexname);
+ bool const xfigpdf =
+ abspdfname.exists() && ext == "pdftex_t";
+ bool const xfigeps =
+ absepsname.exists() && ext == "pstex_t";
+ xfig = xfigpdf || xfigeps;
+ }
+ if (xfig) {
+ outname = changeExtension(filename, ".fig");
+ } else if (t.cs() != "verbatiminput" &&
tex2lyx(abstexname, FileName(abslyxname),
p.getEncoding())) {
- os << name << '{' << lyxname << "}\n";
+ outname = lyxname;
} else {
- os << name << '{' << filename << "}\n";
+ outname = filename;
}
} else {
cerr << "Warning: Could not find included file '"
<< filename << "'." << endl;
- os << name << '{' << filename << "}\n";
+ outname = filename;
+ }
+ if (xfig) {
+ begin_inset(os, "External\n");
+ os << "\ttemplate XFig\n"
+ << "\tfilename " << outname << '\n';
+ } else {
+ begin_command_inset(os, "include", name);
+ os << "preview false\n"
+ "filename \"" << outname << "\"\n";
}
- os << "preview false\n";
end_inset(os);
}
else if (t.cs() == "bibliography") {
context.check_layout(os);
- begin_inset(os, "LatexCommand ");
- os << "bibtex" << "\n";
+ begin_command_inset(os, "bibtex", "bibtex");
os << "bibfiles " << '"' << p.verbatim_item() << '"' << "\n";
// Do we have a bibliographystyle set?
if (!bibliographystyle.empty())
begin_inset(os, "VSpace ");
os << t.cs();
end_inset(os);
- skip_braces(p);
+ skip_spaces_braces(p);
}
else if (is_known(t.cs(), known_spaces)) {
t.cs() == "cleardoublepage") {
context.check_layout(os);
os << "\n\\" << t.cs() << "\n";
- skip_braces(p); // eat {}
+ skip_spaces_braces(p);
}
else if (t.cs() == "newcommand" ||
else {
// try to see whether the string is in unicodesymbols
+ // Only use text mode commands, since we are in text mode here,
+ // and math commands may be invalid (bug 6797)
docstring rem;
- docstring s = encodings.fromLaTeXCommand(from_utf8(t.asInput()), rem);
+ docstring s = encodings.fromLaTeXCommand(from_utf8(t.asInput()),
+ rem, Encodings::TEXT_CMD);
if (!s.empty()) {
if (!rem.empty())
cerr << "When parsing " << t.cs()
<< "+" << to_utf8(rem) << endl;
context.check_layout(os);
os << to_utf8(s);
- p.skip_spaces();
- skip_braces(p); // eat {}
+ skip_spaces_braces(p);
}
//cerr << "#: " << t << " mode: " << mode << endl;
// heuristic: read up to next non-nested space