#include "Length.h"
#include "Preamble.h"
+#include "insets/ExternalTemplate.h"
+
#include "support/lassert.h"
#include "support/convert.h"
#include "support/FileName.h"
char const * const known_coded_ref_commands[] = { "ref", "pageref", "vref",
"vpageref", "formatted", "eqref", 0 };
+/**
+ * supported CJK encodings
+ * SJIS and Bg5 cannot be supported as they are not
+ * supported by iconv
+ * JIS does not work with LyX's encoding conversion
+ */
+const char * const supported_CJK_encodings[] = {
+"EUC-JP", "KS", "GB", "UTF8", 0};
+
+/**
+ * the same as supported_CJK_encodings with their corresponding LyX language name
+ * please keep this in sync with supported_CJK_encodings line by line!
+ */
+const char * const supported_CJK_languages[] = {
+"japanese-cjk", "korean", "chinese-simplified", "chinese-traditional", 0};
+
/*!
* natbib commands.
* The starred forms are also known except for "citefullauthor",
char const * const known_font_families[] = { "rmfamily", "sffamily",
"ttfamily", 0};
-/// the same as known_old_font_families and known_font_families with .lyx names
+/// LaTeX names for font family changing commands
+char const * const known_text_font_families[] = { "textrm", "textsf",
+"texttt", 0};
+
+/// The same as known_old_font_families, known_font_families and
+/// known_text_font_families with .lyx names
char const * const known_coded_font_families[] = { "roman", "sans",
"typewriter", 0};
/// LaTeX names for font series
char const * const known_font_series[] = { "bfseries", "mdseries", 0};
-/// the same as known_old_font_series and known_font_series with .lyx names
+/// LaTeX names for font series changing commands
+char const * const known_text_font_series[] = { "textbf", "textmd", 0};
+
+/// The same as known_old_font_series, known_font_series and
+/// known_text_font_series with .lyx names
char const * const known_coded_font_series[] = { "bold", "medium", 0};
/// LaTeX 2.09 names for font shapes
char const * const known_font_shapes[] = { "itshape", "slshape", "scshape",
"upshape", 0};
-/// the same as known_old_font_shapes and known_font_shapes with .lyx names
+/// LaTeX names for font shape changing commands
+char const * const known_text_font_shapes[] = { "textit", "textsl", "textsc",
+"textup", 0};
+
+/// The same as known_old_font_shapes, known_font_shapes and
+/// known_text_font_shapes with .lyx names
char const * const known_coded_font_shapes[] = { "italic", "slanted",
"smallcaps", "up", 0};
+/// Known special characters which need skip_spaces_braces() afterwards
+char const * const known_special_chars[] = {"ldots", "lyxarrow",
+"textcompwordmark", "slash", 0};
+
+/// the same as known_special_chars with .lyx names
+char const * const known_coded_special_chars[] = {"ldots{}", "menuseparator",
+"textcompwordmark{}", "slash{}", 0};
+
/*!
* Graphics file extensions known by the dvips driver of the graphics package.
* These extensions are used to complete the filename of an included
continue;
}
s = s.substr(i);
+ bool termination;
docstring rem;
- docstring parsed = encodings.fromLaTeXCommand(s, rem,
- Encodings::TEXT_CMD);
+ set<string> req;
+ docstring parsed = encodings.fromLaTeXCommand(s,
+ Encodings::TEXT_CMD, termination, rem, &req);
+ set<string>::const_iterator it = req.begin();
+ set<string>::const_iterator en = req.end();
+ for (; it != en; ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
os << parsed;
s = rem;
if (s.empty() || s[0] != '\\')
should be handled by this function:
- abc \j{} xyz
- abc \j {} xyz
- - abc \j
+ - abc \j
{} xyz
- abc \j %comment
{} xyz
}
context.check_deeper(os);
context.check_layout(os);
- unsigned int optargs = 0;
- while (optargs < context.layout->optargs) {
+ // FIXME: Adjust to format 446!
+ // Since format 446, layouts do not require anymore all optional
+ // arguments before the required ones. Needs to be implemented!
+ int optargs = 0;
+ while (optargs < context.layout->optArgs()) {
eat_whitespace(p, os, context, false);
if (p.next_token().cat() == catEscape ||
- p.next_token().character() != '[')
+ p.next_token().character() != '[')
break;
p.get_token(); // eat '['
- begin_inset(os, "Argument\n");
+ // FIXME: Just a workaround. InsetArgument::updateBuffer
+ // will compute a proper ID for all "999" Arguments
+ // (which is also what lyx2lyx produces).
+ // However, tex2lyx should be able to output proper IDs
+ // itself.
+ begin_inset(os, "Argument 999\n");
os << "status collapsed\n\n";
parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
end_inset(os);
eat_whitespace(p, os, context, false);
++optargs;
}
- unsigned int reqargs = 0;
- while (reqargs < context.layout->reqargs) {
+ int reqargs = 0;
+ while (reqargs < context.layout->requiredArgs()) {
eat_whitespace(p, os, context, false);
if (p.next_token().cat() != catBegin)
break;
p.get_token(); // eat '{'
- begin_inset(os, "Argument\n");
+ // FIXME: Just a workaround. InsetArgument::updateBuffer
+ // will compute a proper ID for all "999" Arguments
+ // (which is also what lyx2lyx produces).
+ // However, tex2lyx should be able to output proper IDs
+ // itself.
+ begin_inset(os, "Argument 999\n");
os << "status collapsed\n\n";
parse_text_in_inset(p, os, FLAG_BRACE_LAST, outer, context);
end_inset(os);
if (inner_type != "makebox") {
latex_height = p.getArg('[', ']');
translate_box_len(latex_height, height_value, height_unit, height_special);
- } else
- hor_pos = p.getArg('[', ']');
+ } else {
+ string const opt = p.getArg('[', ']');
+ if (!opt.empty()) {
+ hor_pos = opt;
+ if (hor_pos != "l" && hor_pos != "c" &&
+ hor_pos != "r" && hor_pos != "s") {
+ cerr << "invalid hor_pos " << hor_pos
+ << " for " << inner_type << endl;
+ hor_pos = "c";
+ }
+ }
+ }
if (p.hasOpt()) {
inner_pos = p.getArg('[', ']');
if (!opt.empty()) {
hor_pos = opt;
if (hor_pos != "l" && hor_pos != "c" &&
- hor_pos != "r") {
+ hor_pos != "r" && hor_pos != "s") {
cerr << "invalid hor_pos " << hor_pos
<< " for " << outer_type << endl;
hor_pos = "c";
// the inner env
if (!inner_type.empty() && (inner_flags & FLAG_END))
active_environments.pop_back();
-
+
// Ensure that the end of the outer box is parsed correctly:
// The opening brace has been eaten by parse_outer_box()
if (!outer_type.empty() && (outer_flags & FLAG_ITEM)) {
}
-void parse_listings(Parser & p, ostream & os, Context & parent_context)
+void parse_listings(Parser & p, ostream & os, Context & parent_context, bool in_line)
{
parent_context.check_layout(os);
begin_inset(os, "listings\n");
- os << "inline false\n"
- << "status collapsed\n";
+ if (p.hasOpt()) {
+ string arg = p.verbatimOption();
+ os << "lstparams " << '"' << arg << '"' << '\n';
+ if (arg.find("\\color") != string::npos)
+ preamble.registerAutomaticallyLoadedPackage("color");
+ }
+ if (in_line)
+ os << "inline true\n";
+ else
+ os << "inline false\n";
+ os << "status collapsed\n";
Context context(true, parent_context.textclass);
context.layout = &parent_context.textclass.plainLayout();
- context.check_layout(os);
- string const s = p.verbatimEnvironment("lstlisting");
+ string s;
+ if (in_line) {
+ s = p.plainCommand('!', '!', "lstinline");
+ context.new_paragraph(os);
+ context.check_layout(os);
+ } else
+ s = p.plainEnvironment("lstlisting");
for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
if (*it == '\\')
os << "\n\\backslash\n";
void parse_environment(Parser & p, ostream & os, bool outer,
- string & last_env, bool & title_layout_found,
- Context & parent_context)
+ string & last_env, Context & parent_context)
{
Layout const * newlayout;
InsetLayout const * newinsetlayout = 0;
}
}
+ else if (is_known(name, preamble.polyglossia_languages)) {
+ // We must begin a new paragraph if not already done
+ if (! parent_context.atParagraphStart()) {
+ parent_context.check_end_layout(os);
+ parent_context.new_paragraph(os);
+ }
+ // save the language in the context so that it is
+ // handled by parse_text
+ parent_context.font.language = preamble.polyglossia2lyx(name);
+ parse_text(p, os, FLAG_END, outer, parent_context);
+ // Just in case the environment is empty
+ parent_context.extra_stuff.erase();
+ // We must begin a new paragraph to reset the language
+ parent_context.new_paragraph(os);
+ p.skip_spaces();
+ }
+
else if (unstarred_name == "tabular" || name == "longtable") {
eat_whitespace(p, os, parent_context, false);
string width = "0pt";
}
else if (parent_context.textclass.floats().typeExist(unstarred_name)) {
+ eat_whitespace(p, os, parent_context, false);
+ string const opt = p.hasOpt() ? p.getArg('[', ']') : string();
eat_whitespace(p, os, parent_context, false);
parent_context.check_layout(os);
begin_inset(os, "Float " + unstarred_name + "\n");
float_type = unstarred_name;
else
float_type = "";
- if (p.hasOpt())
- os << "placement " << p.getArg('[', ']') << '\n';
+ if (!opt.empty())
+ os << "placement " << opt << '\n';
+ if (contains(opt, "H"))
+ preamble.registerAutomaticallyLoadedPackage("float");
+ else {
+ Floating const & fl = parent_context.textclass.floats()
+ .getType(unstarred_name);
+ if (!fl.floattype().empty() && fl.usesFloatPkg())
+ preamble.registerAutomaticallyLoadedPackage("float");
+ }
+
os << "wide " << convert<string>(is_starred)
<< "\nsideways false"
<< "\nstatus open\n\n";
// we must make sure that the next item gets a \begin_layout.
parent_context.new_paragraph(os);
p.skip_spaces();
+ preamble.registerAutomaticallyLoadedPackage("rotfloat");
}
else if (name == "wrapfigure" || name == "wraptable") {
// we must make sure that the next item gets a \begin_layout.
parent_context.new_paragraph(os);
p.skip_spaces();
+ preamble.registerAutomaticallyLoadedPackage("wrapfig");
}
else if (name == "minipage") {
end_inset(os);
p.skip_spaces();
skip_braces(p); // eat {} that might by set by LyX behind comments
+ preamble.registerAutomaticallyLoadedPackage("verbatim");
+ }
+
+ else if (name == "verbatim") {
+ os << "\n\\end_layout\n\n\\begin_layout Verbatim\n";
+ string const s = p.plainEnvironment("verbatim");
+ string::const_iterator it2 = s.begin();
+ for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
+ if (*it == '\\')
+ os << "\\backslash ";
+ else if (*it == '\n') {
+ it2 = it + 1;
+ // avoid adding an empty paragraph at the end
+ // FIXME: if there are 2 consecutive spaces at the end ignore it
+ // because LyX will re-add a \n
+ // This hack must be removed once bug 8049 is fixed!
+ if ((it + 1 != et) && (it + 2 != et || *it2 != '\n'))
+ os << "\n\\end_layout\n\\begin_layout Verbatim\n";
+ } else
+ os << *it;
+ }
+ os << "\n\\end_layout\n\n";
+ p.skip_spaces();
+ // reset to Standard layout
+ os << "\n\\begin_layout Standard\n";
+ }
+
+ else if (name == "CJK") {
+ // the scheme is \begin{CJK}{encoding}{mapping}{text}
+ // It is impossible to decide if a CJK environment was in its own paragraph or within
+ // a line. We therefore always assume a paragraph since the latter is a rare case.
+ eat_whitespace(p, os, parent_context, false);
+ parent_context.check_end_layout(os);
+ // store the encoding to be able to reset it
+ string const encoding_old = p.getEncoding();
+ string const encoding = p.getArg('{', '}');
+ // SJIS and Bg5 cannot be handled by iconv
+ // JIS does not work with LyX's encoding conversion
+ if (encoding != "Bg5" && encoding != "JIS" && encoding != "SJIS")
+ p.setEncoding(encoding);
+ else
+ p.setEncoding("utf8");
+ // LyX doesn't support the second argument so if
+ // this is used we need to output everything as ERT
+ string const mapping = p.getArg('{', '}');
+ char const * const * const where =
+ is_known(encoding, supported_CJK_encodings);
+ if ((!mapping.empty() && mapping != " ") || !where) {
+ parent_context.check_layout(os);
+ handle_ert(os, "\\begin{" + name + "}{" + encoding + "}{" + mapping + "}",
+ parent_context);
+ // we must parse the content as verbatim because e.g. JIS can contain
+ // normally invalid characters
+ string const s = p.plainEnvironment("CJK");
+ for (string::const_iterator it = s.begin(), et = s.end(); it != et; ++it) {
+ if (*it == '\\')
+ handle_ert(os, "\\", parent_context);
+ else if (*it == '$')
+ handle_ert(os, "$", parent_context);
+ else
+ os << *it;
+ }
+ handle_ert(os, "\\end{" + name + "}",
+ parent_context);
+ } else {
+ string const lang =
+ supported_CJK_languages[where - supported_CJK_encodings];
+ // store the language because we must reset it at the end
+ string const lang_old = parent_context.font.language;
+ parent_context.font.language = lang;
+ parse_text_in_inset(p, os, FLAG_END, outer, parent_context);
+ parent_context.font.language = lang_old;
+ parent_context.new_paragraph(os);
+ }
+ p.setEncoding(encoding_old);
+ p.skip_spaces();
}
else if (name == "lyxgreyedout") {
else if (name == "lstlisting") {
eat_whitespace(p, os, parent_context, false);
- // FIXME handle listings with parameters
- // If this is added, don't forgot to handle the
- // automatic color package loading
- if (p.hasOpt())
- parse_unknown_environment(p, name, os, FLAG_END,
- outer, parent_context);
- else
- parse_listings(p, os, parent_context);
+ parse_listings(p, os, parent_context, false);
p.skip_spaces();
}
parent_context.add_extra_stuff("\\align center\n");
else if (name == "singlespace")
parent_context.add_extra_stuff("\\paragraph_spacing single\n");
- else if (name == "onehalfspace")
+ else if (name == "onehalfspace") {
parent_context.add_extra_stuff("\\paragraph_spacing onehalf\n");
- else if (name == "doublespace")
+ preamble.registerAutomaticallyLoadedPackage("setspace");
+ } else if (name == "doublespace") {
parent_context.add_extra_stuff("\\paragraph_spacing double\n");
- else if (name == "spacing")
+ preamble.registerAutomaticallyLoadedPackage("setspace");
+ } else if (name == "spacing") {
parent_context.add_extra_stuff("\\paragraph_spacing other " + p.verbatim_item() + "\n");
+ preamble.registerAutomaticallyLoadedPackage("setspace");
+ }
parse_text(p, os, FLAG_END, outer, parent_context);
// Just in case the environment is empty
parent_context.extra_stuff.erase();
}
context.check_deeper(os);
// handle known optional and required arguments
- // layouts require all optional arguments before the required ones
+ // FIXME: Since format 446, layouts do not require anymore all optional
+ // arguments before the required ones. Needs to be implemented!
// Unfortunately LyX can't handle arguments of list arguments (bug 7468):
// It is impossible to place anything after the environment name,
// but before the first \\item.
if (context.layout->latextype == LATEX_ENVIRONMENT) {
bool need_layout = true;
- unsigned int optargs = 0;
- while (optargs < context.layout->optargs) {
+ int optargs = 0;
+ while (optargs < context.layout->optArgs()) {
eat_whitespace(p, os, context, false);
if (p.next_token().cat() == catEscape ||
- p.next_token().character() != '[')
+ p.next_token().character() != '[')
break;
p.get_token(); // eat '['
if (need_layout) {
context.check_layout(os);
need_layout = false;
}
- begin_inset(os, "Argument\n");
+ // FIXME: Just a workaround. InsetArgument::updateBuffer
+ // will compute a proper ID for all "999" Arguments
+ // (which is also what lyx2lyx produces).
+ // However, tex2lyx should be able to output proper IDs
+ // itself.
+ begin_inset(os, "Argument 999\n");
os << "status collapsed\n\n";
parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
end_inset(os);
eat_whitespace(p, os, context, false);
++optargs;
}
- unsigned int reqargs = 0;
- while (reqargs < context.layout->reqargs) {
+ int reqargs = 0;
+ while (reqargs < context.layout->requiredArgs()) {
eat_whitespace(p, os, context, false);
if (p.next_token().cat() != catBegin)
break;
context.check_layout(os);
need_layout = false;
}
- begin_inset(os, "Argument\n");
+ // FIXME: Just a workaround. InsetArgument::updateBuffer
+ // will compute a proper ID for all "999" Arguments
+ // (which is also what lyx2lyx produces).
+ // However, tex2lyx should be able to output proper IDs
+ // itself.
+ begin_inset(os, "Argument 999\n");
os << "status collapsed\n\n";
parse_text_in_inset(p, os, FLAG_BRACE_LAST, outer, context);
end_inset(os);
context.check_end_deeper(os);
parent_context.new_paragraph(os);
p.skip_spaces();
- if (!title_layout_found)
- title_layout_found = newlayout->intitle;
+ if (!preamble.titleLayoutFound())
+ preamble.titleLayoutFound(newlayout->intitle);
+ set<string> const & req = newlayout->requires();
+ set<string>::const_iterator it = req.begin();
+ set<string>::const_iterator en = req.end();
+ for (; it != en; ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
}
// The single '=' is meant here.
/// Convert \p name from TeX convention (relative to master file) to LyX
/// convention (relative to .lyx file) if it is relative
-void fix_relative_filename(string & name)
+void fix_child_filename(string & name)
{
- if (FileName::isAbsolute(name))
- return;
+ string const absMasterTeX = getMasterFilePath(true);
+ bool const isabs = FileName::isAbsolute(name);
+ // convert from "relative to .tex master" to absolute original path
+ if (!isabs)
+ name = makeAbsPath(name, absMasterTeX).absFileName();
+ bool copyfile = copyFiles();
+ string const absParentLyX = getParentFilePath(false);
+ string abs = name;
+ if (copyfile) {
+ // convert from absolute original path to "relative to master file"
+ string const rel = to_utf8(makeRelPath(from_utf8(name),
+ from_utf8(absMasterTeX)));
+ // re-interpret "relative to .tex file" as "relative to .lyx file"
+ // (is different if the master .lyx file resides in a
+ // different path than the master .tex file)
+ string const absMasterLyX = getMasterFilePath(false);
+ abs = makeAbsPath(rel, absMasterLyX).absFileName();
+ // Do not copy if the new path is impossible to create. Example:
+ // absMasterTeX = "/foo/bar/"
+ // absMasterLyX = "/bar/"
+ // name = "/baz.eps" => new absolute name would be "/../baz.eps"
+ if (contains(name, "/../"))
+ copyfile = false;
+ }
+ if (copyfile) {
+ if (isabs)
+ name = abs;
+ else {
+ // convert from absolute original path to
+ // "relative to .lyx file"
+ name = to_utf8(makeRelPath(from_utf8(abs),
+ from_utf8(absParentLyX)));
+ }
+ }
+ else if (!isabs) {
+ // convert from absolute original path to "relative to .lyx file"
+ name = to_utf8(makeRelPath(from_utf8(name),
+ from_utf8(absParentLyX)));
+ }
+}
+
- name = to_utf8(makeRelPath(from_utf8(makeAbsPath(name, getMasterFilePath()).absFileName()),
- from_utf8(getParentFilePath())));
+void copy_file(FileName const & src, string dstname)
+{
+ if (!copyFiles())
+ return;
+ string const absParent = getParentFilePath(false);
+ FileName dst;
+ if (FileName::isAbsolute(dstname))
+ dst = FileName(dstname);
+ else
+ dst = makeAbsPath(dstname, absParent);
+ string const absMaster = getMasterFilePath(false);
+ FileName const srcpath = src.onlyPath();
+ FileName const dstpath = dst.onlyPath();
+ if (equivalent(srcpath, dstpath))
+ return;
+ if (!dstpath.isDirectory()) {
+ if (!dstpath.createPath()) {
+ cerr << "Warning: Could not create directory for file `"
+ << dst.absFileName() << "´." << endl;
+ return;
+ }
+ }
+ if (dst.isReadableFile()) {
+ if (overwriteFiles())
+ cerr << "Warning: Overwriting existing file `"
+ << dst.absFileName() << "´." << endl;
+ else {
+ cerr << "Warning: Not overwriting existing file `"
+ << dst.absFileName() << "´." << endl;
+ return;
+ }
+ }
+ if (!src.copyTo(dst))
+ cerr << "Warning: Could not copy file `" << src.absFileName()
+ << "´ to `" << dst.absFileName() << "´." << endl;
}
-/// Parse a NoWeb Scrap section. The initial "<<" is already parsed.
+/// Parse a NoWeb Chunk section. The initial "<<" is already parsed.
void parse_noweb(Parser & p, ostream & os, Context & context)
{
// assemble the rest of the keyword
string name("<<");
- bool scrap = false;
+ bool chunk = false;
while (p.good()) {
Token const & t = p.get_token();
if (t.asInput() == ">" && p.next_token().asInput() == ">") {
name += ">>";
p.get_token();
- scrap = (p.good() && p.next_token().asInput() == "=");
- if (scrap)
+ chunk = (p.good() && p.next_token().asInput() == "=");
+ if (chunk)
name += p.get_token().asInput();
break;
}
name += t.asInput();
}
- if (!scrap || !context.new_layout_allowed ||
- !context.textclass.hasLayout(from_ascii("Scrap"))) {
+ if (!chunk || !context.new_layout_allowed ||
+ !context.textclass.hasLayout(from_ascii("Chunk"))) {
cerr << "Warning: Could not interpret '" << name
<< "'. Ignoring it." << endl;
return;
// always must be in an own paragraph.
context.new_paragraph(os);
Context newcontext(true, context.textclass,
- &context.textclass[from_ascii("Scrap")]);
+ &context.textclass[from_ascii("Chunk")]);
newcontext.check_layout(os);
os << name;
while (p.good()) {
else {
ostringstream oss;
Context tmp(false, context.textclass,
- &context.textclass[from_ascii("Scrap")]);
+ &context.textclass[from_ascii("Chunk")]);
tmp.need_end_layout = true;
tmp.check_layout(oss);
os << subst(t.asInput(), "\n", oss.str());
}
- // The scrap chunk is ended by an @ at the beginning of a line.
+ // The chunk is ended by an @ at the beginning of a line.
// After the @ the line may contain a comment and/or
// whitespace, but nothing else.
if (t.asInput() == "@" && p.prev_token().cat() == catNewline &&
handle_ert(os, command + ert, context);
}
+
+void registerExternalTemplatePackages(string const & name)
+{
+ external::TemplateManager const & etm = external::TemplateManager::get();
+ external::Template const * const et = etm.getTemplateByName(name);
+ if (!et)
+ return;
+ external::Template::Formats::const_iterator cit = et->formats.end();
+ if (pdflatex)
+ cit = et->formats.find("PDFLaTeX");
+ if (cit == et->formats.end())
+ // If the template has not specified a PDFLaTeX output,
+ // we try the LaTeX format.
+ cit = et->formats.find("LaTeX");
+ if (cit == et->formats.end())
+ return;
+ vector<string>::const_iterator qit = cit->second.requirements.begin();
+ vector<string>::const_iterator qend = cit->second.requirements.end();
+ for (; qit != qend; ++qit)
+ preamble.registerAutomaticallyLoadedPackage(*qit);
+}
+
} // anonymous namespace
{
Layout const * newlayout = 0;
InsetLayout const * newinsetlayout = 0;
+ char const * const * where = 0;
// Store the latest bibliographystyle and nocite{*} option
// (needed for bibtex inset)
string btprint;
- string bibliographystyle;
+ string bibliographystyle = "default";
bool const use_natbib = preamble.isPackageUsed("natbib");
bool const use_jurabib = preamble.isPackageUsed("jurabib");
string last_env;
- bool title_layout_found = false;
while (p.good()) {
Token const & t = p.get_token();
+ // it is impossible to determine the correct encoding for non-CJK Japanese.
+ // Therefore write a note at the beginning of the document
+ if (is_nonCJKJapanese) {
+ context.check_layout(os);
+ begin_inset(os, "Note Note\n");
+ os << "status open\n\\begin_layout Plain Layout\n"
+ << "\\series bold\n"
+ << "Important information:\n"
+ << "\\end_layout\n\n"
+ << "\\begin_layout Plain Layout\n"
+ << "This document is in Japanese (non-CJK).\n"
+ << " It was therefore impossible for tex2lyx to determine the correct encoding."
+ << " The encoding EUC-JP was assumed. If this is incorrect, please set the correct"
+ << " encoding in the document settings.\n"
+ << "\\end_layout\n";
+ end_inset(os);
+ is_nonCJKJapanese = false;
+ }
+
#ifdef FILEDEBUG
debugToken(cerr, t, flags);
#endif
skip_braces(p);
}
+ else if (t.asInput() == "<"
+ && p.next_token().asInput() == "<" && noweb_mode) {
+ p.get_token();
+ parse_noweb(p, os, context);
+ }
+
else if (t.asInput() == "<" && p.next_token().asInput() == "<") {
context.check_layout(os);
begin_inset(os, "Quotes ");
skip_braces(p);
}
- else if (t.asInput() == "<"
- && p.next_token().asInput() == "<" && noweb_mode) {
- p.get_token();
- parse_noweb(p, os, context);
- }
-
else if (t.cat() == catSpace || (t.cat() == catNewline && ! p.isParagraph()))
check_space(p, os, context);
os << t.cs();
}
- else if (t.cat() == catBegin &&
- p.next_token().cat() == catEnd) {
+ else if (t.cat() == catBegin) {
+ Token const next = p.next_token();
+ Token const end = p.next_next_token();
+ if (next.cat() == catEnd) {
// {}
Token const prev = p.prev_token();
p.get_token();
; // ignore it in {}`` or -{}-
else
handle_ert(os, "{}", context);
-
- }
-
- else if (t.cat() == catBegin) {
+ } else if (next.cat() == catEscape &&
+ is_known(next.cs(), known_quotes) &&
+ end.cat() == catEnd) {
+ // Something like {\textquoteright} (e.g.
+ // from writer2latex). LyX writes
+ // \textquoteright{}, so we may skip the
+ // braces here for better readability.
+ parse_text_snippet(p, os, FLAG_BRACE_LAST,
+ outer, context);
+ } else {
context.check_layout(os);
// special handling of font attribute changes
Token const prev = p.prev_token();
- Token const next = p.next_token();
TeXFont const oldFont = context.font;
if (next.character() == '[' ||
next.character() == ']' ||
parse_text_snippet(p, os, FLAG_BRACE_LAST,
outer, context);
handle_ert(os, "}", context);
+ }
}
}
else if (t.cs() == "begin")
parse_environment(p, os, outer, last_env,
- title_layout_found, context);
+ context);
else if (t.cs() == "end") {
if (flags & FLAG_END) {
// FIXME: This swallows comments, but we cannot use
// eat_whitespace() since we must not output
// anything before the item.
- s = p.getArg('[', ']');
+ p.skip_spaces(true);
+ s = p.verbatimOption();
} else
p.skip_spaces(false);
context.set_item();
else if (t.cs() == "bibitem") {
context.set_item();
context.check_layout(os);
- string label = convert_command_inset_arg(p.getArg('[', ']'));
+ eat_whitespace(p, os, context, false);
+ string label = convert_command_inset_arg(p.verbatimOption());
string key = convert_command_inset_arg(p.verbatim_item());
if (contains(label, '\\') || contains(key, '\\')) {
// LyX can't handle LaTeX commands in labels or keys
}
}
- else if (is_macro(p))
- parse_macro(p, os, context);
+ else if (is_macro(p)) {
+ // catch the case of \def\inputGnumericTable
+ bool macro = true;
+ if (t.cs() == "def") {
+ Token second = p.next_token();
+ if (second.cs() == "inputGnumericTable") {
+ p.pushPosition();
+ p.get_token();
+ skip_braces(p);
+ Token third = p.get_token();
+ p.popPosition();
+ if (third.cs() == "input") {
+ p.get_token();
+ skip_braces(p);
+ p.get_token();
+ string name = normalize_filename(p.verbatim_item());
+ string const path = getMasterFilePath(true);
+ // We want to preserve relative / absolute filenames,
+ // therefore path is only used for testing
+ // The file extension is in every case ".tex".
+ // So we need to remove this extension and check for
+ // the original one.
+ name = removeExtension(name);
+ if (!makeAbsPath(name, path).exists()) {
+ char const * const Gnumeric_formats[] = {"gnumeric",
+ "ods", "xls", 0};
+ string const Gnumeric_name =
+ find_file(name, path, Gnumeric_formats);
+ if (!Gnumeric_name.empty())
+ name = Gnumeric_name;
+ }
+ FileName const absname = makeAbsPath(name, path);
+ if (absname.exists()) {
+ fix_child_filename(name);
+ copy_file(absname, name);
+ } else
+ cerr << "Warning: Could not find file '"
+ << name << "'." << endl;
+ context.check_layout(os);
+ begin_inset(os, "External\n\ttemplate ");
+ os << "GnumericSpreadsheet\n\tfilename "
+ << name << "\n";
+ end_inset(os);
+ context.check_layout(os);
+ macro = false;
+ // register the packages that are automatically loaded
+ // by the Gnumeric template
+ registerExternalTemplatePackages("GnumericSpreadsheet");
+ }
+ }
+ }
+ if (macro)
+ parse_macro(p, os, context);
+ }
else if (t.cs() == "noindent") {
p.skip_spaces();
// Must catch empty dates before findLayout is called below
else if (t.cs() == "date") {
+ eat_whitespace(p, os, context, false);
+ p.pushPosition();
string const date = p.verbatim_item();
- if (date.empty())
+ p.popPosition();
+ if (date.empty()) {
preamble.suppressDate(true);
- else {
+ p.verbatim_item();
+ } else {
preamble.suppressDate(false);
if (context.new_layout_allowed &&
(newlayout = findLayout(context.textclass,
// write the layout
output_command_layout(os, p, outer,
context, newlayout);
- p.skip_spaces();
- if (!title_layout_found)
- title_layout_found = newlayout->intitle;
+ parse_text_snippet(p, os, FLAG_ITEM, outer, context);
+ if (!preamble.titleLayoutFound())
+ preamble.titleLayoutFound(newlayout->intitle);
+ set<string> const & req = newlayout->requires();
+ set<string>::const_iterator it = req.begin();
+ set<string>::const_iterator en = req.end();
+ for (; it != en; ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
} else
- handle_ert(os, "\\date{" + date + '}',
- context);
+ handle_ert(os,
+ "\\date{" + p.verbatim_item() + '}',
+ context);
}
}
p.get_token();
output_command_layout(os, p, outer, context, newlayout);
p.skip_spaces();
- if (!title_layout_found)
- title_layout_found = newlayout->intitle;
+ if (!preamble.titleLayoutFound())
+ preamble.titleLayoutFound(newlayout->intitle);
+ set<string> const & req = newlayout->requires();
+ for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
}
// Section headings and the like
// write the layout
output_command_layout(os, p, outer, context, newlayout);
p.skip_spaces();
- if (!title_layout_found)
- title_layout_found = newlayout->intitle;
+ if (!preamble.titleLayoutFound())
+ preamble.titleLayoutFound(newlayout->intitle);
+ set<string> const & req = newlayout->requires();
+ for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
}
else if (t.cs() == "caption") {
context.check_layout(os);
p.skip_spaces();
begin_inset(os, "Caption\n");
- Context newcontext(true, context.textclass);
- newcontext.font = context.font;
+ Context newcontext(true, context.textclass, 0, 0, context.font);
newcontext.check_layout(os);
+ // FIXME InsetArgument is now properly implemented in InsetLayout
+ // (for captions, but also for others)
if (p.next_token().cat() != catEscape &&
p.next_token().character() == '[') {
p.get_token(); // eat '['
- begin_inset(os, "Argument\n");
+ begin_inset(os, "Argument 1\n");
os << "status collapsed\n";
parse_text_in_inset(p, os, FLAG_BRACK_LAST, outer, context);
end_inset(os);
os << "\n\\begin_layout Plain Layout";
p.skip_spaces();
begin_inset(os, "Caption\n");
- Context newcontext(true, context.textclass);
- newcontext.font = context.font;
+ Context newcontext(true, context.textclass,
+ 0, 0, context.font);
newcontext.check_layout(os);
os << caption << "\n";
newcontext.check_end_layout(os);
opts["clip"] = string();
string name = normalize_filename(p.verbatim_item());
- string const path = getMasterFilePath();
+ string const path = getMasterFilePath(true);
// We want to preserve relative / absolute filenames,
// therefore path is only used for testing
if (!makeAbsPath(name, path).exists()) {
}
}
- if (makeAbsPath(name, path).exists())
- fix_relative_filename(name);
- else
+ FileName const absname = makeAbsPath(name, path);
+ if (absname.exists()) {
+ fix_child_filename(name);
+ copy_file(absname, name);
+ } else
cerr << "Warning: Could not find graphics file '"
<< name << "'." << endl;
// Warn about invalid options.
// Check whether some option was given twice.
end_inset(os);
+ preamble.registerAutomaticallyLoadedPackage("graphicx");
}
else if (t.cs() == "footnote" ||
end_inset(os);
}
+ else if (t.cs() == "lstinline") {
+ p.skip_spaces();
+ parse_listings(p, os, context, true);
+ }
+
else if (t.cs() == "ensuremath") {
p.skip_spaces();
context.check_layout(os);
}
else if (t.cs() == "makeindex" || t.cs() == "maketitle") {
- if (title_layout_found) {
+ if (preamble.titleLayoutFound()) {
// swallow this
skip_spaces_braces(p);
} else
handle_ert(os, t.asInput(), context);
}
- else if (t.cs() == "tableofcontents") {
+ else if (t.cs() == "tableofcontents" || t.cs() == "lstlistoflistings") {
context.check_layout(os);
- begin_command_inset(os, "toc", "tableofcontents");
+ begin_command_inset(os, "toc", t.cs());
end_inset(os);
skip_spaces_braces(p);
+ if (t.cs() == "lstlistoflistings")
+ preamble.registerAutomaticallyLoadedPackage("listings");
}
else if (t.cs() == "listoffigures") {
handle_ert(os, "\\listof{" + name + "}", context);
}
- else if (t.cs() == "textrm")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\family",
- context.font.family, "roman");
-
- else if (t.cs() == "textsf")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\family",
- context.font.family, "sans");
-
- else if (t.cs() == "texttt")
+ else if ((where = is_known(t.cs(), known_text_font_families)))
parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\family",
- context.font.family, "typewriter");
+ context, "\\family", context.font.family,
+ known_coded_font_families[where - known_text_font_families]);
- else if (t.cs() == "textmd")
+ else if ((where = is_known(t.cs(), known_text_font_series)))
parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\series",
- context.font.series, "medium");
+ context, "\\series", context.font.series,
+ known_coded_font_series[where - known_text_font_series]);
- else if (t.cs() == "textbf")
+ else if ((where = is_known(t.cs(), known_text_font_shapes)))
parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\series",
- context.font.series, "bold");
-
- else if (t.cs() == "textup")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\shape",
- context.font.shape, "up");
-
- else if (t.cs() == "textit")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\shape",
- context.font.shape, "italic");
-
- else if (t.cs() == "textsl")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\shape",
- context.font.shape, "slanted");
-
- else if (t.cs() == "textsc")
- parse_text_attributes(p, os, FLAG_ITEM, outer,
- context, "\\shape",
- context.font.shape, "smallcaps");
+ context, "\\shape", context.font.shape,
+ known_coded_font_shapes[where - known_text_font_shapes]);
else if (t.cs() == "textnormal" || t.cs() == "normalfont") {
context.check_layout(os);
else if (t.cs() == "href") {
context.check_layout(os);
- string target = p.getArg('{', '}');
- string name = p.getArg('{', '}');
+ string target = convert_command_inset_arg(p.verbatim_item());
+ string name = convert_command_inset_arg(p.verbatim_item());
string type;
size_t i = target.find(':');
if (i != string::npos) {
end_inset(os);
skip_spaces_braces(p);
}
-
+
else if (t.cs() == "lyxline") {
// swallow size argument (it is not used anyway)
p.getArg('{', '}');
is_known(p.next_token().cs(), known_phrases))) {
// LyX sometimes puts a \protect in front, so we have to ignore it
// FIXME: This needs to be changed when bug 4752 is fixed.
- char const * const * where = is_known(
+ where = is_known(
t.cs() == "protect" ? p.get_token().cs() : t.cs(),
known_phrases);
context.check_layout(os);
skip_spaces_braces(p);
}
- else if (is_known(t.cs(), known_ref_commands)) {
+ else if ((where = is_known(t.cs(), known_ref_commands))) {
string const opt = p.getOpt();
if (opt.empty()) {
context.check_layout(os);
- char const * const * where = is_known(t.cs(),
- known_ref_commands);
begin_command_inset(os, "ref",
known_coded_ref_commands[where - known_ref_commands]);
os << "reference \""
<< convert_command_inset_arg(p.verbatim_item())
<< "\"\n";
end_inset(os);
+ if (t.cs() == "vref" || t.cs() == "vpageref")
+ preamble.registerAutomaticallyLoadedPackage("varioref");
+
} else {
// LyX does not support optional arguments of ref commands
handle_ert(os, t.asInput() + '[' + opt + "]{" +
btprint = key;
}
- else if (t.cs() == "index") {
+ else if (t.cs() == "index" ||
+ (t.cs() == "sindex" && preamble.use_indices() == "true")) {
context.check_layout(os);
- begin_inset(os, "Index idx\n");
- os << "status collapsed\n";
+ string const arg = (t.cs() == "sindex" && p.hasOpt()) ?
+ p.getArg('[', ']') : "";
+ string const kind = arg.empty() ? "idx" : arg;
+ begin_inset(os, "Index ");
+ os << kind << "\nstatus collapsed\n";
parse_text_in_inset(p, os, FLAG_ITEM, false, context, "Index");
end_inset(os);
+ if (kind != "idx")
+ preamble.registerAutomaticallyLoadedPackage("splitidx");
}
else if (t.cs() == "nomenclature") {
<< convert_command_inset_arg(p.verbatim_item())
<< "\"\n";
end_inset(os);
+ preamble.registerAutomaticallyLoadedPackage("nomencl");
}
-
+
else if (t.cs() == "label") {
context.check_layout(os);
begin_command_inset(os, "label", "label");
os << "type \"idx\"\n";
end_inset(os);
skip_spaces_braces(p);
+ preamble.registerAutomaticallyLoadedPackage("makeidx");
+ if (preamble.use_indices() == "true")
+ preamble.registerAutomaticallyLoadedPackage("splitidx");
}
else if (t.cs() == "printnomenclature") {
os << "width \"" << width << '\"';
end_inset(os);
skip_spaces_braces(p);
+ preamble.registerAutomaticallyLoadedPackage("nomencl");
}
else if ((t.cs() == "textsuperscript" || t.cs() == "textsubscript")) {
preamble.registerAutomaticallyLoadedPackage("subscript");
}
- else if (is_known(t.cs(), known_quotes)) {
- char const * const * where = is_known(t.cs(), known_quotes);
+ else if ((where = is_known(t.cs(), known_quotes))) {
context.check_layout(os);
begin_inset(os, "Quotes ");
os << known_coded_quotes[where - known_quotes];
skip_braces(p);
}
- else if (is_known(t.cs(), known_sizes) &&
+ else if ((where = is_known(t.cs(), known_sizes)) &&
context.new_layout_allowed) {
- char const * const * where = is_known(t.cs(), known_sizes);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.size = known_coded_sizes[where - known_sizes];
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_font_families) &&
+ else if ((where = is_known(t.cs(), known_font_families)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_font_families);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.family =
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_font_series) &&
+ else if ((where = is_known(t.cs(), known_font_series)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_font_series);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.series =
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_font_shapes) &&
+ else if ((where = is_known(t.cs(), known_font_shapes)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_font_shapes);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.shape =
output_font_change(os, oldFont, context.font);
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_old_font_families) &&
+ else if ((where = is_known(t.cs(), known_old_font_families)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_old_font_families);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.init();
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_old_font_series) &&
+ else if ((where = is_known(t.cs(), known_old_font_series)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_old_font_series);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.init();
eat_whitespace(p, os, context, false);
}
- else if (is_known(t.cs(), known_old_font_shapes) &&
+ else if ((where = is_known(t.cs(), known_old_font_shapes)) &&
context.new_layout_allowed) {
- char const * const * where =
- is_known(t.cs(), known_old_font_shapes);
context.check_layout(os);
TeXFont const oldFont = context.font;
context.font.init();
else if (t.cs() == "selectlanguage") {
context.check_layout(os);
// save the language for the case that a
- // \foreignlanguage is used
-
+ // \foreignlanguage is used
context.font.language = babel2lyx(p.verbatim_item());
os << "\n\\lang " << context.font.language << "\n";
}
context, "\\lang",
context.font.language, lang);
}
+
+ else if (prefixIs(t.cs(), "text")
+ && is_known(t.cs().substr(4), preamble.polyglossia_languages)) {
+ // scheme is \textLANGUAGE{text} where LANGUAGE is in polyglossia_languages[]
+ string lang;
+ // We have to output the whole command if it has an option
+ // because LyX doesn't support this yet, see bug #8214,
+ // only if there is a single option specifying a variant, we can handle it.
+ if (p.hasOpt()) {
+ string langopts = p.getOpt();
+ // check if the option contains a variant, if yes, extract it
+ string::size_type pos_var = langopts.find("variant");
+ string::size_type i = langopts.find(',');
+ string::size_type k = langopts.find('=', pos_var);
+ if (pos_var != string::npos && i == string::npos) {
+ string variant;
+ variant = langopts.substr(k + 1, langopts.length() - k - 2);
+ lang = preamble.polyglossia2lyx(variant);
+ parse_text_attributes(p, os, FLAG_ITEM, outer,
+ context, "\\lang",
+ context.font.language, lang);
+ } else
+ handle_ert(os, t.asInput() + langopts, context);
+ } else {
+ lang = preamble.polyglossia2lyx(t.cs().substr(4, string::npos));
+ parse_text_attributes(p, os, FLAG_ITEM, outer,
+ context, "\\lang",
+ context.font.language, lang);
+ }
+ }
else if (t.cs() == "inputencoding") {
// nothing to write here
p.setEncoding(enc);
}
- else if (t.cs() == "ldots") {
- context.check_layout(os);
- os << "\\SpecialChar \\ldots{}\n";
- skip_spaces_braces(p);
- }
-
- else if (t.cs() == "lyxarrow") {
- context.check_layout(os);
- os << "\\SpecialChar \\menuseparator\n";
- skip_spaces_braces(p);
- }
-
- else if (t.cs() == "textcompwordmark") {
- context.check_layout(os);
- os << "\\SpecialChar \\textcompwordmark{}\n";
- skip_spaces_braces(p);
- }
-
- else if (t.cs() == "slash") {
+ else if ((where = is_known(t.cs(), known_special_chars))) {
context.check_layout(os);
- os << "\\SpecialChar \\slash{}\n";
+ os << "\\SpecialChar \\"
+ << known_coded_special_chars[where - known_special_chars]
+ << '\n';
skip_spaces_braces(p);
}
else if (t.cs() == "verb") {
context.check_layout(os);
char const delimiter = p.next_token().character();
- string const arg = p.getArg(delimiter, delimiter);
+ // \verb is special: The usual escaping rules do not
+ // apply, e.g. "\verb+\+" is valid and denotes a single
+ // backslash (bug #4468). Therefore we do not allow
+ // escaping in getArg().
+ string const arg = p.getArg(delimiter, delimiter, false);
ostringstream oss;
oss << "\\verb" << delimiter << arg << delimiter;
handle_ert(os, oss.str(), context);
handle_ert(os, t.asInput(), context);
// accents (see Table 6 in Comprehensive LaTeX Symbol List)
- else if (t.cs().size() == 1
+ else if (t.cs().size() == 1
&& contains("\"'.=^`bcdHkrtuv~", t.cs())) {
context.check_layout(os);
// try to see whether the string is in unicodesymbols
+ bool termination;
docstring rem;
- string command = t.asInput() + "{"
+ string command = t.asInput() + "{"
+ trimSpaceAndEol(p.verbatim_item())
+ "}";
- docstring s = encodings.fromLaTeXCommand(from_utf8(command), rem);
+ set<string> req;
+ docstring s = encodings.fromLaTeXCommand(from_utf8(command),
+ Encodings::TEXT_CMD | Encodings::MATH_CMD,
+ termination, rem, &req);
if (!s.empty()) {
if (!rem.empty())
- cerr << "When parsing " << command
+ cerr << "When parsing " << command
<< ", result is " << to_utf8(s)
<< "+" << to_utf8(rem) << endl;
os << to_utf8(s);
+ for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
} else
// we did not find a non-ert version
handle_ert(os, command, context);
name += p.get_token().asInput();
context.check_layout(os);
string filename(normalize_filename(p.getArg('{', '}')));
- string const path = getMasterFilePath();
+ string const path = getMasterFilePath(true);
// We want to preserve relative / absolute filenames,
// therefore path is only used for testing
if ((t.cs() == "include" || t.cs() == "input") &&
if (makeAbsPath(filename, path).exists()) {
string const abstexname =
makeAbsPath(filename, path).absFileName();
- string const abslyxname =
- changeExtension(abstexname, ".lyx");
string const absfigname =
changeExtension(abstexname, ".fig");
- fix_relative_filename(filename);
+ fix_child_filename(filename);
string const lyxname =
changeExtension(filename, ".lyx");
+ string const abslyxname = makeAbsPath(
+ lyxname, getParentFilePath(false)).absFileName();
bool xfig = false;
- external = FileName(absfigname).exists();
- if (t.cs() == "input") {
+ if (!skipChildren())
+ external = FileName(absfigname).exists();
+ if (t.cs() == "input" && !skipChildren()) {
string const ext = getExtension(abstexname);
// Combined PS/LaTeX:
}
if (external) {
outname = changeExtension(filename, ".fig");
+ FileName abssrc(changeExtension(abstexname, ".fig"));
+ copy_file(abssrc, outname);
} else if (xfig) {
// Don't try to convert, the result
// would be full of ERT.
outname = filename;
+ FileName abssrc(abstexname);
+ copy_file(abssrc, outname);
} else if (t.cs() != "verbatiminput" &&
+ !skipChildren() &&
tex2lyx(abstexname, FileName(abslyxname),
p.getEncoding())) {
outname = lyxname;
+ // no need to call copy_file
+ // tex2lyx creates the file
} else {
outname = filename;
+ FileName abssrc(abstexname);
+ copy_file(abssrc, outname);
}
} else {
cerr << "Warning: Could not find included file '"
begin_inset(os, "External\n");
os << "\ttemplate XFig\n"
<< "\tfilename " << outname << '\n';
+ registerExternalTemplatePackages("XFig");
} else {
begin_command_inset(os, "include", name);
os << "preview false\n"
"filename \"" << outname << "\"\n";
+ if (t.cs() == "verbatiminput")
+ preamble.registerAutomaticallyLoadedPackage("verbatim");
}
end_inset(os);
}
parse_outer_box(p, os, FLAG_ITEM, outer, context, t.cs(), "");
else if (t.cs() == "framebox") {
- string special = p.getFullOpt();
- special += p.getOpt();
- parse_outer_box(p, os, FLAG_ITEM, outer, context, t.cs(), special);
+ if (p.next_token().character() == '(') {
+ //the syntax is: \framebox(x,y)[position]{content}
+ string arg = t.asInput();
+ arg += p.getFullParentheseArg();
+ arg += p.getFullOpt();
+ eat_whitespace(p, os, context, false);
+ handle_ert(os, arg + '{', context);
+ parse_text(p, os, FLAG_ITEM, outer, context);
+ handle_ert(os, "}", context);
+ } else {
+ string special = p.getFullOpt();
+ special += p.getOpt();
+ // LyX does not yet support \framebox without any option
+ if (!special.empty())
+ parse_outer_box(p, os, FLAG_ITEM, outer,
+ context, t.cs(), special);
+ else {
+ eat_whitespace(p, os, context, false);
+ handle_ert(os, "\\framebox{", context);
+ parse_text(p, os, FLAG_ITEM, outer, context);
+ handle_ert(os, "}", context);
+ }
+ }
}
//\makebox() is part of the picture environment and different from \makebox{}
//\makebox{} will be parsed by parse_box
else if (t.cs() == "makebox") {
- string arg = t.asInput();
if (p.next_token().character() == '(') {
//the syntax is: \makebox(x,y)[position]{content}
+ string arg = t.asInput();
arg += p.getFullParentheseArg();
arg += p.getFullOpt();
eat_whitespace(p, os, context, false);
handle_ert(os, arg + '{', context);
- eat_whitespace(p, os, context, false);
parse_text(p, os, FLAG_ITEM, outer, context);
handle_ert(os, "}", context);
} else
skip_spaces_braces(p);
}
- else if (is_known(t.cs(), known_spaces)) {
- char const * const * where = is_known(t.cs(), known_spaces);
+ else if ((where = is_known(t.cs(), known_spaces))) {
context.check_layout(os);
begin_inset(os, "space ");
os << '\\' << known_coded_spaces[where - known_spaces]
t.cs() == "DeclareRobustCommandx" ||
t.cs() == "newcommand" ||
t.cs() == "newcommandx" ||
- t.cs() == "providecommand" ||
+ t.cs() == "providecommand" ||
t.cs() == "providecommandx" ||
t.cs() == "renewcommand" ||
t.cs() == "renewcommandx") {
end_inset(os);
}
+ else if (t.cs() == "includepdf") {
+ p.skip_spaces();
+ string const arg = p.getArg('[', ']');
+ map<string, string> opts;
+ vector<string> keys;
+ split_map(arg, opts, keys);
+ string name = normalize_filename(p.verbatim_item());
+ string const path = getMasterFilePath(true);
+ // We want to preserve relative / absolute filenames,
+ // therefore path is only used for testing
+ if (!makeAbsPath(name, path).exists()) {
+ // The file extension is probably missing.
+ // Now try to find it out.
+ char const * const pdfpages_format[] = {"pdf", 0};
+ string const pdftex_name =
+ find_file(name, path, pdfpages_format);
+ if (!pdftex_name.empty()) {
+ name = pdftex_name;
+ pdflatex = true;
+ }
+ }
+ FileName const absname = makeAbsPath(name, path);
+ if (absname.exists())
+ {
+ fix_child_filename(name);
+ copy_file(absname, name);
+ } else
+ cerr << "Warning: Could not find file '"
+ << name << "'." << endl;
+ // write output
+ context.check_layout(os);
+ begin_inset(os, "External\n\ttemplate ");
+ os << "PDFPages\n\tfilename "
+ << name << "\n";
+ // parse the options
+ if (opts.find("pages") != opts.end())
+ os << "\textra LaTeX \"pages="
+ << opts["pages"] << "\"\n";
+ if (opts.find("angle") != opts.end())
+ os << "\trotateAngle "
+ << opts["angle"] << '\n';
+ if (opts.find("origin") != opts.end()) {
+ ostringstream ss;
+ string const opt = opts["origin"];
+ if (opt == "tl") ss << "topleft";
+ if (opt == "bl") ss << "bottomleft";
+ if (opt == "Bl") ss << "baselineleft";
+ if (opt == "c") ss << "center";
+ if (opt == "tc") ss << "topcenter";
+ if (opt == "bc") ss << "bottomcenter";
+ if (opt == "Bc") ss << "baselinecenter";
+ if (opt == "tr") ss << "topright";
+ if (opt == "br") ss << "bottomright";
+ if (opt == "Br") ss << "baselineright";
+ if (!ss.str().empty())
+ os << "\trotateOrigin " << ss.str() << '\n';
+ else
+ cerr << "Warning: Ignoring unknown includegraphics origin argument '" << opt << "'\n";
+ }
+ if (opts.find("width") != opts.end())
+ os << "\twidth "
+ << translate_len(opts["width"]) << '\n';
+ if (opts.find("height") != opts.end())
+ os << "\theight "
+ << translate_len(opts["height"]) << '\n';
+ if (opts.find("keepaspectratio") != opts.end())
+ os << "\tkeepAspectRatio\n";
+ end_inset(os);
+ context.check_layout(os);
+ registerExternalTemplatePackages("PDFPages");
+ }
+
+ else if (t.cs() == "loadgame") {
+ p.skip_spaces();
+ string name = normalize_filename(p.verbatim_item());
+ string const path = getMasterFilePath(true);
+ // We want to preserve relative / absolute filenames,
+ // therefore path is only used for testing
+ if (!makeAbsPath(name, path).exists()) {
+ // The file extension is probably missing.
+ // Now try to find it out.
+ char const * const lyxskak_format[] = {"fen", 0};
+ string const lyxskak_name =
+ find_file(name, path, lyxskak_format);
+ if (!lyxskak_name.empty())
+ name = lyxskak_name;
+ }
+ FileName const absname = makeAbsPath(name, path);
+ if (absname.exists())
+ {
+ fix_child_filename(name);
+ copy_file(absname, name);
+ } else
+ cerr << "Warning: Could not find file '"
+ << name << "'." << endl;
+ context.check_layout(os);
+ begin_inset(os, "External\n\ttemplate ");
+ os << "ChessDiagram\n\tfilename "
+ << name << "\n";
+ end_inset(os);
+ context.check_layout(os);
+ // after a \loadgame follows a \showboard
+ if (p.get_token().asInput() == "showboard")
+ p.get_token();
+ registerExternalTemplatePackages("ChessDiagram");
+ }
+
else {
// try to see whether the string is in unicodesymbols
// Only use text mode commands, since we are in text mode here,
// and math commands may be invalid (bug 6797)
+ bool termination;
docstring rem;
+ set<string> req;
docstring s = encodings.fromLaTeXCommand(from_utf8(t.asInput()),
- rem, Encodings::TEXT_CMD);
+ Encodings::TEXT_CMD, termination, rem, &req);
if (!s.empty()) {
if (!rem.empty())
- cerr << "When parsing " << t.cs()
+ cerr << "When parsing " << t.cs()
<< ", result is " << to_utf8(s)
<< "+" << to_utf8(rem) << endl;
context.check_layout(os);
os << to_utf8(s);
- skip_spaces_braces(p);
+ if (termination)
+ skip_spaces_braces(p);
+ for (set<string>::const_iterator it = req.begin(); it != req.end(); ++it)
+ preamble.registerAutomaticallyLoadedPackage(*it);
}
//cerr << "#: " << t << " mode: " << mode << endl;
// heuristic: read up to next non-nested space
/*
string s = t.asInput();
string z = p.verbatim_item();
- while (p.good() && z != " " && z.size()) {
+ while (p.good() && z != " " && !z.empty()) {
//cerr << "read: " << z << endl;
s += z;
z = p.verbatim_item();
}
}
+
+string guessLanguage(Parser & p, string const & lang)
+{
+ typedef std::map<std::string, size_t> LangMap;
+ // map from language names to number of characters
+ LangMap used;
+ used[lang] = 0;
+ for (char const * const * i = supported_CJK_languages; *i; i++)
+ used[string(*i)] = 0;
+
+ while (p.good()) {
+ Token const t = p.get_token();
+ // comments are not counted for any language
+ if (t.cat() == catComment)
+ continue;
+ // commands are not counted as well, but we need to detect
+ // \begin{CJK} and switch encoding if needed
+ if (t.cat() == catEscape) {
+ if (t.cs() == "inputencoding") {
+ string const enc = subst(p.verbatim_item(), "\n", " ");
+ p.setEncoding(enc);
+ continue;
+ }
+ if (t.cs() != "begin")
+ continue;
+ } else {
+ // Non-CJK content is counted for lang.
+ // We do not care about the real language here:
+ // If we have more non-CJK contents than CJK contents,
+ // we simply use the language that was specified as
+ // babel main language.
+ used[lang] += t.asInput().length();
+ continue;
+ }
+ // Now we are starting an environment
+ p.pushPosition();
+ string const name = p.getArg('{', '}');
+ if (name != "CJK") {
+ p.popPosition();
+ continue;
+ }
+ // It is a CJK environment
+ p.popPosition();
+ /* name = */ p.getArg('{', '}');
+ string const encoding = p.getArg('{', '}');
+ /* mapping = */ p.getArg('{', '}');
+ string const encoding_old = p.getEncoding();
+ char const * const * const where =
+ is_known(encoding, supported_CJK_encodings);
+ if (where)
+ p.setEncoding(encoding);
+ else
+ p.setEncoding("utf8");
+ string const text = p.verbatimEnvironment("CJK");
+ p.setEncoding(encoding_old);
+ p.skip_spaces();
+ if (!where) {
+ // ignore contents in unknown CJK encoding
+ continue;
+ }
+ // the language of the text
+ string const cjk =
+ supported_CJK_languages[where - supported_CJK_encodings];
+ used[cjk] += text.length();
+ }
+ LangMap::const_iterator use = used.begin();
+ for (LangMap::const_iterator it = used.begin(); it != used.end(); ++it) {
+ if (it->second > use->second)
+ use = it;
+ }
+ return use->first;
+}
+
// }])