#include "LyX.h"
#include "LyXRC.h"
#include "LyXVC.h"
-#include "output_docbook.h"
#include "output.h"
#include "output_latex.h"
-#include "output_xhtml.h"
+#include "output_docbook.h"
#include "output_plaintext.h"
+#include "output_xhtml.h"
#include "Paragraph.h"
#include "ParagraphParameters.h"
#include "ParIterator.h"
#include "PDFOptions.h"
#include "Session.h"
#include "SpellChecker.h"
-#include "sgml.h"
+#include "xml.h"
#include "texstream.h"
#include "TexRow.h"
#include "Text.h"
#include "graphics/GraphicsCache.h"
#include "graphics/PreviewLoader.h"
+#include "frontends/Application.h"
#include "frontends/alert.h"
#include "frontends/Delegates.h"
#include "frontends/WorkAreaManager.h"
#include "support/textutils.h"
#include "support/types.h"
-#include "support/bind.h"
-
#include <algorithm>
#include <fstream>
#include <iomanip>
int const LYX_FORMAT = LYX_FORMAT_LYX;
typedef map<string, bool> DepClean;
-typedef map<docstring, pair<InsetLabel const *, Buffer::References> > RefCache;
-} // namespace
+// Information about labels and their associated refs
+struct LabelInfo {
+ /// label string
+ docstring label;
+ /// label inset
+ InsetLabel const * inset;
+ /// associated references cache
+ Buffer::References references;
+ /// whether this label is active (i.e., not deleted)
+ bool active;
+};
+
+typedef vector<LabelInfo> LabelCache;
+typedef map<docstring, Buffer::References> RefCache;
// A storehouse for the cloned buffers.
-list<CloneList *> cloned_buffers;
+typedef list<CloneList_ptr> CloneStore;
+CloneStore cloned_buffers;
+
+} // namespace
+
class Buffer::Impl
/// need to regenerate .tex?
DepClean dep_clean;
- /// is save needed?
- mutable bool lyx_clean;
-
- /// is autosave needed?
- mutable bool bak_clean;
-
- /// is this an unnamed file (New...)?
- bool unnamed;
-
- /// is this an internal bufffer?
- bool internal_buffer;
-
- /// buffer is r/o
- bool read_only;
-
/// name of the file the buffer is associated with.
FileName filename;
- /** Set to true only when the file is fully loaded.
- * Used to prevent the premature generation of previews
- * and by the citation inset.
- */
- bool file_fully_loaded;
-
- /// original format of loaded file
- int file_format;
-
- /// if the file was originally loaded from an older format, do
- /// we need to back it up still?
- bool need_format_backup;
-
- /// Ignore the parent (e.g. when exporting a child standalone)?
- bool ignore_parent;
-
///
mutable TocBackend toc_backend;
/// map from the macro name to the position map,
/// which maps the macro definition position to the scope and the MacroData.
NamePositionScopeMacroMap macros;
- /// This seem to change the way Buffer::getMacro() works
- mutable bool macro_lock;
/// positions of child buffers in the buffer
typedef map<Buffer const * const, DocIterator> BufferPositionMap;
struct ScopeBuffer {
- ScopeBuffer() : buffer(0) {}
+ ScopeBuffer() : buffer(nullptr) {}
ScopeBuffer(DocIterator const & s, Buffer const * b)
: scope(s), buffer(b) {}
DocIterator scope;
/// A cache for the bibfiles (including bibfiles of loaded child
/// documents), needed for appropriate update of natbib labels.
- mutable FileNamePairList bibfiles_cache_;
+ mutable docstring_list bibfiles_cache_;
// FIXME The caching mechanism could be improved. At present, we have a
// cache for each Buffer, that caches all the bibliography info for that
// file, and then to construct the Buffer's bibinfo from that.
/// A cache for bibliography info
mutable BiblioInfo bibinfo_;
- /// whether the bibinfo cache is valid
- mutable bool bibinfo_cache_valid_;
/// Cache of timestamps of .bib files
map<FileName, time_t> bibfile_status_;
- /// Indicates whether the bibinfo has changed since the last time
- /// we ran updateBuffer(), i.e., whether citation labels may need
- /// to be updated.
- mutable bool cite_labels_valid_;
/// These two hold the file name and format, written to by
/// Buffer::preview and read from by LFUN_BUFFER_VIEW_CACHE.
FileName preview_file_;
string preview_format_;
- /// If there was an error when previewing, on the next preview we do
- /// a fresh compile (e.g. in case the user installed a package that
- /// was missing).
- bool preview_error_;
+ /// Cache the references associated to a label and their positions
+ /// in the buffer.
mutable RefCache ref_cache_;
+ /// Cache the label insets and their activity status.
+ mutable LabelCache label_cache_;
/// our Text that should be wrapped in an InsetText
InsetText * inset;
///
PreviewLoader * preview_loader_;
+ /// If non zero, this buffer is a clone of existing buffer \p cloned_buffer_
+ /// This one is useful for preview detached in a thread.
+ Buffer const * cloned_buffer_;
+ ///
+ CloneList_ptr clone_list_;
+
+ ///
+ std::list<Buffer const *> include_list_;
+private:
+ /// So we can force access via the accessors.
+ mutable Buffer const * parent_buffer;
+
+ FileMonitorPtr file_monitor_;
+
+/// ints and bools are all listed last so as to avoid alignment issues
+public:
+ /// original format of loaded file
+ int file_format;
+
+ /// are we in the process of exporting this buffer?
+ mutable bool doing_export;
+
+ /// If there was an error when previewing, on the next preview we do
+ /// a fresh compile (e.g. in case the user installed a package that
+ /// was missing).
+ bool require_fresh_start_;
+
+ /// Indicates whether the bibinfo has changed since the last time
+ /// we ran updateBuffer(), i.e., whether citation labels may need
+ /// to be updated.
+ mutable bool cite_labels_valid_;
+ /// Do we have a bibliography environment?
+ mutable bool have_bibitems_;
+
+ /// is save needed?
+ mutable bool lyx_clean;
+
+ /// is autosave needed?
+ mutable bool bak_clean;
+
+ /// is this an unnamed file (New...)?
+ bool unnamed;
+
+ /// is this an internal bufffer?
+ bool internal_buffer;
+
+ /// buffer is r/o
+ bool read_only;
+
+ /** Set to true only when the file is fully loaded.
+ * Used to prevent the premature generation of previews
+ * and by the citation inset.
+ */
+ bool file_fully_loaded;
+
+ /// if the file was originally loaded from an older format, do
+ /// we need to back it up still?
+ bool need_format_backup;
+
+ /// Ignore the parent (e.g. when exporting a child standalone)?
+ bool ignore_parent;
+
+ /// This seem to change the way Buffer::getMacro() works
+ mutable bool macro_lock;
+
+ /// has been externally modified? Can be reset by the user.
+ mutable bool externally_modified_;
+
+ /// whether the bibinfo cache is valid
+ mutable bool bibinfo_cache_valid_;
+
+private:
+ int word_count_;
+ int char_count_;
+ int blank_count_;
+
+public:
/// This is here to force the test to be done whenever parent_buffer
/// is accessed.
Buffer const * parent() const
// ignore_parent temporarily "orphans" a buffer
// (e.g. if a child is compiled standalone)
if (ignore_parent)
- return 0;
+ return nullptr;
// if parent_buffer is not loaded, then it has been unloaded,
// which means that parent_buffer is an invalid pointer. So we
// set it to null in that case.
// they will always be regarded as unloaded. in that case, we hope
// for the best.
if (!cloned_buffer_ && !theBufferList().isLoaded(parent_buffer))
- parent_buffer = 0;
+ parent_buffer = nullptr;
return parent_buffer;
}
parent_buffer->invalidateBibinfoCache();
}
- /// If non zero, this buffer is a clone of existing buffer \p cloned_buffer_
- /// This one is useful for preview detached in a thread.
- Buffer const * cloned_buffer_;
- ///
- CloneList * clone_list_;
- /// are we in the process of exporting this buffer?
- mutable bool doing_export;
-
/// compute statistics
/// \p from initial position
/// \p to points to the end position
+ (with_blanks ? blank_count_ : 0);
}
- // does the buffer contain tracked changes? (if so, we automatically
- // display the review toolbar, for instance)
- mutable bool tracked_changes_present_;
-
// Make sure the file monitor monitors the good file.
void refreshFileMonitor();
/// Notify or clear of external modification
void fileExternallyModified(bool exists);
- /// has been externally modified? Can be reset by the user.
- mutable bool externally_modified_;
-
-private:
- /// So we can force access via the accessors.
- mutable Buffer const * parent_buffer;
-
- int word_count_;
- int char_count_;
- int blank_count_;
-
- FileMonitorPtr file_monitor_;
+ ///Binding LaTeX lines with buffer positions.
+ //Common routine for LaTeX and Reference errors listing.
+ void traverseErrors(TeXErrors::Errors::const_iterator err,
+ TeXErrors::Errors::const_iterator end,
+ ErrorList & errorList) const;
};
Buffer::Impl::Impl(Buffer * owner, FileName const & file, bool readonly_,
Buffer const * cloned_buffer)
- : owner_(owner), lyx_clean(true), bak_clean(true), unnamed(false),
- internal_buffer(false), read_only(readonly_), filename(file),
- file_fully_loaded(false), file_format(LYX_FORMAT), need_format_backup(false),
- ignore_parent(false), toc_backend(owner), macro_lock(false),
- checksum_(0), wa_(0), gui_(0), undo_(*owner), bibinfo_cache_valid_(false),
- cite_labels_valid_(false), preview_error_(false),
- inset(0), preview_loader_(0), cloned_buffer_(cloned_buffer),
- clone_list_(0), doing_export(false),
- tracked_changes_present_(0), externally_modified_(false), parent_buffer(0),
+ : owner_(owner), filename(file), toc_backend(owner), checksum_(0),
+ wa_(nullptr), gui_(nullptr), undo_(*owner), inset(nullptr),
+ preview_loader_(nullptr), cloned_buffer_(cloned_buffer),
+ clone_list_(nullptr), parent_buffer(nullptr), file_format(LYX_FORMAT),
+ doing_export(false), require_fresh_start_(false), cite_labels_valid_(false),
+ have_bibitems_(false), lyx_clean(true), bak_clean(true), unnamed(false),
+ internal_buffer(false), read_only(readonly_), file_fully_loaded(false),
+ need_format_backup(false), ignore_parent(false), macro_lock(false),
+ externally_modified_(false), bibinfo_cache_valid_(false),
word_count_(0), char_count_(0), blank_count_(0)
{
refreshFileMonitor();
if (!cloned_buffer_) {
temppath = createBufferTmpDir();
lyxvc.setBuffer(owner_);
+ Language const * inplang = theApp() ?
+ languages.getFromCode(theApp()->inputLanguageCode())
+ : nullptr;
+ if (inplang)
+ params.language = inplang;
if (use_gui)
wa_ = new frontend::WorkAreaManager;
return;
bibinfo_cache_valid_ = cloned_buffer_->d->bibinfo_cache_valid_;
bibfile_status_ = cloned_buffer_->d->bibfile_status_;
cite_labels_valid_ = cloned_buffer_->d->cite_labels_valid_;
+ have_bibitems_ = cloned_buffer_->d->have_bibitems_;
unnamed = cloned_buffer_->d->unnamed;
internal_buffer = cloned_buffer_->d->internal_buffer;
layout_position = cloned_buffer_->d->layout_position;
preview_file_ = cloned_buffer_->d->preview_file_;
preview_format_ = cloned_buffer_->d->preview_format_;
- preview_error_ = cloned_buffer_->d->preview_error_;
- tracked_changes_present_ = cloned_buffer_->d->tracked_changes_present_;
+ require_fresh_start_ = cloned_buffer_->d->require_fresh_start_;
}
// saved properly, before it goes into the void.
// GuiView already destroyed
- d->gui_ = 0;
+ d->gui_ = nullptr;
if (isInternal()) {
// No need to do additional cleanups for internal buffer.
// loop over children
for (auto const & p : d->children_positions) {
Buffer * child = const_cast<Buffer *>(p.first);
- if (d->clone_list_->erase(child))
- delete child;
+ if (d->clone_list_->erase(child))
+ delete child;
}
// if we're the master buffer, then we should get rid of the list
// of clones
// children still has a reference to this list. But we will try to
// continue, rather than shut down.
LATTEST(d->clone_list_->empty());
- list<CloneList *>::iterator it =
+ // The clone list itself is empty, but it's still referenced in our list
+ // of clones. So let's find it and remove it.
+ CloneStore::iterator it =
find(cloned_buffers.begin(), cloned_buffers.end(), d->clone_list_);
if (it == cloned_buffers.end()) {
// We will leak in this case, but it is safe to continue.
LATTEST(false);
} else
cloned_buffers.erase(it);
- delete d->clone_list_;
}
// FIXME Do we really need to do this right before we delete d?
// clear references to children in macro tables
Buffer * child = const_cast<Buffer *>(p.first);
if (theBufferList().isLoaded(child)) {
if (theBufferList().isOthersChild(this, child))
- child->setParent(0);
+ child->setParent(nullptr);
else
theBufferList().release(child);
}
}
-Buffer * Buffer::cloneFromMaster() const
+Buffer * Buffer::cloneWithChildren() const
{
BufferMap bufmap;
- cloned_buffers.push_back(new CloneList);
- CloneList * clones = cloned_buffers.back();
+ cloned_buffers.emplace_back(new CloneList);
+ CloneList_ptr clones = cloned_buffers.back();
- masterBuffer()->cloneWithChildren(bufmap, clones);
+ cloneWithChildren(bufmap, clones);
// make sure we got cloned
BufferMap::const_iterator bit = bufmap.find(this);
- LASSERT(bit != bufmap.end(), return 0);
+ LASSERT(bit != bufmap.end(), return nullptr);
Buffer * cloned_buffer = bit->second;
return cloned_buffer;
}
-void Buffer::cloneWithChildren(BufferMap & bufmap, CloneList * clones) const
+void Buffer::cloneWithChildren(BufferMap & bufmap, CloneList_ptr clones) const
{
// have we already been cloned?
if (bufmap.find(this) != bufmap.end())
Buffer * Buffer::cloneBufferOnly() const {
- cloned_buffers.push_back(new CloneList);
- CloneList * clones = cloned_buffers.back();
+ cloned_buffers.emplace_back(new CloneList);
+ CloneList_ptr clones = cloned_buffers.back();
Buffer * buffer_clone = new Buffer(fileName().absFileName(), false, this);
// The clone needs its own DocumentClass, since running updateBuffer() will
if (branch_suffix.empty())
return filename;
- string const name = filename.onlyFileNameWithoutExt()
- + to_utf8(branch_suffix);
+ string const name = addExtension(filename.onlyFileNameWithoutExt()
+ + to_utf8(branch_suffix), filename.extension());
FileName res(filename.onlyPath().absFileName() + "/" + name);
- res.changeExtension(filename.extension());
return res;
}
params().options.erase();
params().master.erase();
params().float_placement.erase();
+ params().float_alignment.erase();
params().paperwidth.erase();
params().paperheight.erase();
params().leftmargin.erase();
params().headsep.erase();
params().footskip.erase();
params().columnsep.erase();
+ params().font_roman_opts.erase();
+ params().font_sans_opts.erase();
+ params().font_typewriter_opts.erase();
params().fonts_cjk.erase();
params().listings_params.clear();
params().clearLayoutModules();
params().biblatex_bibstyle.erase();
params().biblatex_citestyle.erase();
params().multibib.erase();
+ params().lineno_opts.clear();
for (int i = 0; i < 4; ++i) {
params().user_defined_bullet(i) = ITEMIZE_DEFAULTS[i];
readHeader(lex);
if (params().output_changes) {
- bool dvipost = LaTeXFeatures::isAvailable("dvipost");
bool xcolorulem = LaTeXFeatures::isAvailable("ulem") &&
LaTeXFeatures::isAvailable("xcolor");
- if (!dvipost && !xcolorulem) {
+ if (!xcolorulem) {
Alert::warning(_("Changes not shown in LaTeX output"),
_("Changes will not be highlighted in LaTeX output, "
- "because neither dvipost nor xcolor/ulem are installed.\n"
- "Please install these packages or redefine "
- "\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
- } else if (!xcolorulem) {
- Alert::warning(_("Changes not shown in LaTeX output"),
- _("Changes will not be highlighted in LaTeX output "
- "when using pdflatex, because xcolor and ulem are not installed.\n"
+ "because xcolor and ulem are not installed.\n"
"Please install both packages or redefine "
"\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
}
<< absFileName()
<< ") does not include "
"this document. Ignoring the master assignment.");
+ // If the master has just been created, un-hide it (#11162)
+ if (!master->fileName().exists())
+ lyx::dispatch(FuncRequest(LFUN_BUFFER_SWITCH,
+ master->absFileName()));
}
}
}
d->old_position = params().origin;
else
d->old_position = filePath();
+
+ if (!parent())
+ clearIncludeList();
+
bool const res = text().read(lex, errorList, d->inset);
d->old_position.clear();
return false;
FileName const lyx = tempFileName("Buffer_importFileXXXXXX.lyx");
- if (theConverters().convert(0, name, lyx, name, format, "lyx", errorList)) {
+ Converters::RetVal const retval =
+ theConverters().convert(nullptr, name, lyx, name, format, "lyx", errorList);
+ if (retval == Converters::SUCCESS) {
bool const success = readFile(lyx) == ReadSuccess;
removeTempFile(lyx);
return success;
}
-bool Buffer::lastPreviewError() const
+bool Buffer::freshStartRequired() const
+{
+ return d->require_fresh_start_;
+}
+
+void Buffer::requireFreshStart(bool const b) const
{
- return d->preview_error_;
+ d->require_fresh_start_ = b;
}
PreviewLoader * Buffer::loader() const
{
if (!isExporting() && lyxrc.preview == LyXRC::PREVIEW_OFF)
- return 0;
+ return nullptr;
if (!d->preview_loader_)
d->preview_loader_ = new PreviewLoader(*this);
return d->preview_loader_;
void Buffer::removePreviews() const
{
delete d->preview_loader_;
- d->preview_loader_ = 0;
+ d->preview_loader_ = nullptr;
}
FileName Buffer::getBackupName() const {
+ map<int, string> const file_formats = {
+ {544, "23"},
+ {508, "22"},
+ {474, "21"},
+ {413, "20"},
+ {345, "16"},
+ {276, "15"},
+ {245, "14"},
+ {221, "13"},
+ {220, "12"},
+ {218, "1163"},
+ {217, "116"},
+ {216, "115"},
+ {215, "11"},
+ {210, "010"},
+ {200, "006"}
+ };
FileName const & fn = fileName();
string const fname = fn.onlyFileNameWithoutExt();
string const fext = fn.extension() + "~";
string const fpath = lyxrc.backupdir_path.empty() ?
fn.onlyPath().absFileName() :
lyxrc.backupdir_path;
- string const fform = convert<string>(d->file_format);
- string const backname = fname + "-lyxformat-" + fform;
+ string backup_suffix;
+ // If file format is from a stable series use version instead of file format
+ auto const it = file_formats.find(d->file_format);
+ if (it != file_formats.end())
+ backup_suffix = "-lyx" + it->second;
+ else
+ backup_suffix = "-lyxformat-" + convert<string>(d->file_format);
+ string const backname = fname + backup_suffix;
FileName backup(addName(fpath, addExtension(backname, fext)));
// limit recursion, just in case
FileName savefile(tempfile->name());
LYXERR(Debug::FILES, "Saving to " << savefile.absFileName());
+ if (!savefile.clonePermissions(fileName()))
+ LYXERR0("Failed to clone the permission from " << fileName().absFileName() << " to " << savefile.absFileName());
+
if (!writeFile(savefile))
return false;
}
-bool Buffer::makeLaTeXFile(FileName const & fname,
+Buffer::ExportStatus Buffer::makeLaTeXFile(FileName const & fname,
string const & original_path,
OutputParams const & runparams_in,
OutputWhat output) const
{
OutputParams runparams = runparams_in;
- // XeTeX with TeX fonts is only safe with ASCII encoding (see also #9740),
- // Check here, because the "flavor" is not known in BufferParams::encoding()
- // (power users can override this safety measure selecting "utf8-plain").
- if (!params().useNonTeXFonts && (runparams.flavor == OutputParams::XETEX)
- && (runparams.encoding->name() != "utf8-plain"))
- runparams.encoding = encodings.fromLyXName("ascii");
-
string const encoding = runparams.encoding->iconvName();
LYXERR(Debug::LATEX, "makeLaTeXFile encoding: " << encoding << ", fname=" << fname.realPath());
try { ofs.reset(encoding); }
catch (iconv_codecvt_facet_exception const & e) {
lyxerr << "Caught iconv exception: " << e.what() << endl;
- Alert::error(_("Iconv software exception Detected"), bformat(_("Please "
- "verify that the support software for your encoding (%1$s) is "
- "properly installed"), from_ascii(encoding)));
- return false;
+ Alert::error(_("Iconv software exception Detected"),
+ bformat(_("Please verify that the `iconv' support software is"
+ " properly installed and supports the selected encoding"
+ " (%1$s), or change the encoding in"
+ " Document>Settings>Language."), from_ascii(encoding)));
+ return ExportError;
}
if (!openFileWrite(ofs, fname))
- return false;
+ return ExportError;
ErrorList & errorList = d->errorLists["Export"];
errorList.clear();
- bool failed_export = false;
+ ExportStatus status = ExportSuccess;
otexstream os(ofs);
// make sure we are ready to export
updateBuffer();
updateMacroInstances(OutputUpdate);
+ ExportStatus retval;
try {
- writeLaTeXSource(os, original_path, runparams, output);
+ retval = writeLaTeXSource(os, original_path, runparams, output);
+ if (retval == ExportKilled)
+ return ExportKilled;
}
catch (EncodingException const & e) {
docstring const failed(1, e.failed_char);
"Changing the document encoding to utf8 could help."),
{e.par_id, e.pos}, {e.par_id, e.pos + 1}));
}
- failed_export = true;
+ status = ExportError;
}
catch (iconv_codecvt_facet_exception const & e) {
errorList.push_back(ErrorItem(_("iconv conversion failed"),
_(e.what())));
- failed_export = true;
+ status = ExportError;
}
catch (exception const & e) {
errorList.push_back(ErrorItem(_("conversion failed"),
_(e.what())));
lyxerr << e.what() << endl;
- failed_export = true;
+ status = ExportError;
}
catch (...) {
lyxerr << "Caught some really weird exception..." << endl;
ofs.close();
if (ofs.fail()) {
- failed_export = true;
+ status = ExportError;
lyxerr << "File '" << fname << "' was not closed properly." << endl;
}
errorList.clear();
else
errors("Export");
- return !failed_export;
+ return status;
}
-void Buffer::writeLaTeXSource(otexstream & os,
+Buffer::ExportStatus Buffer::writeLaTeXSource(otexstream & os,
string const & original_path,
OutputParams const & runparams_in,
OutputWhat output) const
OutputParams runparams = runparams_in;
- // XeTeX with TeX fonts is only safe with ASCII encoding,
- // Check here, because the "flavor" is not known in BufferParams::encoding()
- // (power users can override this safety measure selecting "utf8-plain").
- if (!params().useNonTeXFonts && (runparams.flavor == OutputParams::XETEX)
- && (runparams.encoding->name() != "utf8-plain"))
- runparams.encoding = encodings.fromLyXName("ascii");
- // FIXME: when only the current paragraph is shown, this is ignored
- // (or not reached) and characters encodable in the current
- // encoding are not converted to ASCII-representation.
+ // Some macros rely on font encoding
+ runparams.main_fontenc = params().main_font_encoding();
// If we are compiling a file standalone, even if this is the
// child of some other buffer, let's cut the link here, so the
LaTeXFeatures features(*this, params(), runparams);
validate(features);
// This is only set once per document (in master)
- if (!runparams.is_child)
+ if (!runparams.is_child) {
runparams.use_polyglossia = features.usePolyglossia();
+ runparams.use_hyperref = features.isRequired("hyperref");
+ runparams.use_CJK = features.mustProvide("CJK");
+ }
LYXERR(Debug::LATEX, " Buffer validation done.");
bool const output_preamble =
"file path name."),
inputpath, uncodable_glyphs));
} else {
- string docdir =
- latex_path(original_path);
+ string docdir = os::latex_path(original_path);
if (contains(docdir, '#')) {
docdir = subst(docdir, "#", "\\#");
os << "\\catcode`\\#=11"
os << "\\catcode`\\%=11"
"\\def\\%{%}\\catcode`\\%=14\n";
}
+ if (contains(docdir, '~'))
+ docdir = subst(docdir, "~", "\\string~");
+ bool const nonascii = !isAscii(from_utf8(docdir));
+ // LaTeX 2019/10/01 handles non-ascii path without detokenize
+ bool const utfpathlatex = features.isAvailable("LaTeX-2019/10/01");
+ bool const detokenize = !utfpathlatex && nonascii;
+ bool const quote = contains(docdir, ' ');
+ if (utfpathlatex && nonascii)
+ os << "\\UseRawInputEncoding\n";
os << "\\makeatletter\n"
- << "\\def\\input@path{{"
- << docdir << "}}\n"
+ << "\\def\\input@path{{";
+ if (detokenize)
+ os << "\\detokenize{";
+ if (quote)
+ os << "\"";
+ os << docdir;
+ if (quote)
+ os << "\"";
+ if (detokenize)
+ os << "}";
+ os << "}}\n"
<< "\\makeatother\n";
}
}
runparams.use_babel = params().writeLaTeX(os, features,
d->filename.onlyPath());
+ // Active characters
+ runparams.active_chars = features.getActiveChars();
+
// Biblatex bibliographies are loaded here
if (params().useBiblatex()) {
- vector<docstring> const bibfiles =
+ vector<pair<docstring, string>> const bibfiles =
prepareBibFilePaths(runparams, getBibfiles(), true);
- for (docstring const & file: bibfiles)
- os << "\\addbibresource{" << file << "}\n";
+ for (pair<docstring, string> const & file: bibfiles) {
+ os << "\\addbibresource";
+ if (!file.second.empty())
+ os << "[bibencoding=" << file.second << "]";
+ os << "{" << file.first << "}\n";
+ }
}
if (!runparams.dryrun && features.hasPolyglossiaExclusiveLanguages()
// Restore the parenthood if needed
if (!runparams.is_child)
d->ignore_parent = false;
- return;
+ return ExportSuccess;
}
// make the body.
LYXERR(Debug::INFO, "preamble finished, now the body.");
// the real stuff
- latexParagraphs(*this, text(), os, runparams);
+ try {
+ latexParagraphs(*this, text(), os, runparams);
+ }
+ catch (ConversionException const &) { return ExportKilled; }
// Restore the parenthood if needed
if (!runparams.is_child)
LYXERR(Debug::INFO, "Finished making LaTeX file.");
LYXERR(Debug::INFO, "Row count was " << os.texrow().rows() - 1 << '.');
+ return ExportSuccess;
}
-void Buffer::makeDocBookFile(FileName const & fname,
+Buffer::ExportStatus Buffer::makeDocBookFile(FileName const & fname,
OutputParams const & runparams,
OutputWhat output) const
{
ofdocstream ofs;
if (!openFileWrite(ofs, fname))
- return;
+ return ExportError;
// make sure we are ready to export
// this needs to be done before we validate
updateBuffer();
updateMacroInstances(OutputUpdate);
- writeDocBookSource(ofs, fname.absFileName(), runparams, output);
+ ExportStatus const retval =
+ writeDocBookSource(ofs, runparams, output);
+ if (retval == ExportKilled)
+ return ExportKilled;
ofs.close();
if (ofs.fail())
lyxerr << "File '" << fname << "' was not closed properly." << endl;
+ return ExportSuccess;
}
-void Buffer::writeDocBookSource(odocstream & os, string const & fname,
+Buffer::ExportStatus Buffer::writeDocBookSource(odocstream & os,
OutputParams const & runparams,
OutputWhat output) const
{
LaTeXFeatures features(*this, params(), runparams);
validate(features);
+ d->bibinfo_.makeCitationLabels(*this);
d->texrow.reset();
DocumentClass const & tclass = params().documentClass();
- string const & top_element = tclass.latexname();
bool const output_preamble =
output == FullSource || output == OnlyPreamble;
bool const output_body =
output == FullSource || output == OnlyBody;
- if (output_preamble) {
- if (runparams.flavor == OutputParams::XML)
- os << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
-
- // FIXME UNICODE
- os << "<!DOCTYPE " << from_ascii(top_element) << ' ';
+ XMLStream xs(os);
- // FIXME UNICODE
- if (! tclass.class_header().empty())
- os << from_ascii(tclass.class_header());
- else if (runparams.flavor == OutputParams::XML)
- os << "PUBLIC \"-//OASIS//DTD DocBook XML V4.2//EN\" "
- << "\"https://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd\"";
- else
- os << " PUBLIC \"-//OASIS//DTD DocBook V4.2//EN\"";
-
- docstring preamble = params().preamble;
- if (runparams.flavor != OutputParams::XML ) {
- preamble += "<!ENTITY % output.print.png \"IGNORE\">\n";
- preamble += "<!ENTITY % output.print.pdf \"IGNORE\">\n";
- preamble += "<!ENTITY % output.print.eps \"IGNORE\">\n";
- preamble += "<!ENTITY % output.print.bmp \"IGNORE\">\n";
- }
+ if (output_preamble) {
+ // XML preamble, no doctype needed.
+ // Not using XMLStream for this, as the root tag would be in the tag stack and make troubles with the error
+ // detection mechanisms (these are called before the end tag is output, and thus interact with the canary
+ // parsep in output_docbook.cpp).
+ os << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ << "<!-- This DocBook file was created by LyX " << lyx_version
+ << "\n See http://www.lyx.org/ for more information -->\n";
- string const name = runparams.nice
- ? changeExtension(absFileName(), ".sgml") : fname;
- preamble += features.getIncludedFiles(name);
- preamble += features.getLyXSGMLEntities();
+ // Directly output the root tag, based on the current type of document.
+ string languageCode = params().language->code();
+ string params = "xml:lang=\"" + languageCode + '"'
+ + " xmlns=\"http://docbook.org/ns/docbook\""
+ + " xmlns:xlink=\"http://www.w3.org/1999/xlink\""
+ + " xmlns:m=\"http://www.w3.org/1998/Math/MathML\""
+ + " xmlns:xi=\"http://www.w3.org/2001/XInclude\""
+ + " version=\"5.2\"";
- if (!preamble.empty()) {
- os << "\n [ " << preamble << " ]";
- }
- os << ">\n\n";
+ os << "<" << from_ascii(tclass.docbookroot()) << " " << from_ascii(params) << ">\n";
}
if (output_body) {
- string top = top_element;
- top += " lang=\"";
- if (runparams.flavor == OutputParams::XML)
- top += params().language->code();
- else
- top += params().language->code().substr(0, 2);
- top += '"';
-
- if (!params().options.empty()) {
- top += ' ';
- top += params().options;
- }
-
- os << "<!-- " << ((runparams.flavor == OutputParams::XML)? "XML" : "SGML")
- << " file was created by LyX " << lyx_version
- << "\n See https://www.lyx.org/ for more information -->\n";
-
params().documentClass().counters().reset();
- sgml::openTag(os, top);
- os << '\n';
- docbookParagraphs(text(), *this, os, runparams);
- sgml::closeTag(os, top_element);
+ // Start to output the document.
+ docbookParagraphs(text(), *this, xs, runparams);
+ }
+
+ if (output_preamble) {
+ // Close the root element.
+ os << "\n</" << from_ascii(tclass.docbookroot()) << ">";
}
+ return ExportSuccess;
}
-void Buffer::makeLyXHTMLFile(FileName const & fname,
+Buffer::ExportStatus Buffer::makeLyXHTMLFile(FileName const & fname,
OutputParams const & runparams) const
{
LYXERR(Debug::LATEX, "makeLyXHTMLFile...");
ofdocstream ofs;
if (!openFileWrite(ofs, fname))
- return;
+ return ExportError;
// make sure we are ready to export
// this has to be done before we validate
updateBuffer(UpdateMaster, OutputUpdate);
updateMacroInstances(OutputUpdate);
- writeLyXHTMLSource(ofs, runparams, FullSource);
+ ExportStatus const retval = writeLyXHTMLSource(ofs, runparams, FullSource);
+ if (retval == ExportKilled)
+ return retval;
ofs.close();
if (ofs.fail())
lyxerr << "File '" << fname << "' was not closed properly." << endl;
+ return retval;
}
-void Buffer::writeLyXHTMLSource(odocstream & os,
+Buffer::ExportStatus Buffer::writeLyXHTMLSource(odocstream & os,
OutputParams const & runparams,
OutputWhat output) const
{
os << "<title>"
<< (doctitle.empty() ?
from_ascii("LyX Document") :
- html::htmlize(doctitle, XHTMLStream::ESCAPE_ALL))
+ xml::escapeString(doctitle, XMLStream::ESCAPE_ALL))
<< "</title>\n";
docstring styles = features.getTClassHTMLPreamble();
if (!styles.empty())
os << "\n<!-- Text Class Preamble -->\n" << styles << '\n';
- styles = features.getPreambleSnippets().str;
- if (!styles.empty())
- os << "\n<!-- Preamble Snippets -->\n" << styles << '\n';
-
// we will collect CSS information in a stream, and then output it
// either here, as part of the header, or else in a separate file.
odocstringstream css;
bool const output_body_tag = (output != IncludedFile);
if (output_body_tag)
os << "<body dir=\"auto\">\n";
- XHTMLStream xs(os);
+ XMLStream xs(os);
if (output != IncludedFile)
// if we're an included file, the counters are in the master.
params().documentClass().counters().reset();
- xhtmlParagraphs(text(), *this, xs, runparams);
+ try {
+ xhtmlParagraphs(text(), *this, xs, runparams);
+ }
+ catch (ConversionException const &) { return ExportKilled; }
if (output_body_tag)
os << "</body>\n";
}
if (output_preamble)
os << "</html>\n";
+
+ return ExportSuccess;
}
runparams.flavor = OutputParams::LATEX;
runparams.nice = false;
runparams.linelen = lyxrc.plaintext_linelen;
- makeLaTeXFile(FileName(name), org_path, runparams);
+ ExportStatus const retval =
+ makeLaTeXFile(FileName(name), org_path, runparams);
+ if (retval != ExportSuccess) {
+ // error code on failure
+ return -1;
+ }
TeXErrors terr;
Chktex chktex(lyxrc.chktex_command, onlyFileName(name), filePath());
if (!features.runparams().is_child)
params().validate(features);
+ if (!parent())
+ clearIncludeList();
+
for (Paragraph const & p : paragraphs())
p.validate(features);
}
-FileNamePairList const & Buffer::getBibfiles(UpdateScope scope) const
+docstring_list const & Buffer::getBibfiles(UpdateScope scope) const
{
// FIXME This is probably unnecessary, given where we call this.
// If this is a child document, use the master instead.
Buffer const * const pbuf = masterBuffer();
if (pbuf != this && scope != UpdateChildOnly)
return pbuf->getBibfiles();
+
+ // In 2.3.x, we have:
+ //if (!d->bibfile_cache_valid_)
+ // this->updateBibfilesCache(scope);
+ // I think that is a leftover, but there have been so many back-
+ // and-forths with this, due to Windows issues, that I am not sure.
+
return d->bibfiles_cache_;
}
}
-void Buffer::registerBibfiles(FileNamePairList const & bf) const {
+void Buffer::registerBibfiles(const docstring_list & bf) const
+{
// We register the bib files in the master buffer,
// if there is one, but also in every single buffer,
// in case a child is compiled alone.
tmp->registerBibfiles(bf);
for (auto const & p : bf) {
- FileNamePairList::const_iterator temp =
+ docstring_list::const_iterator temp =
find(d->bibfiles_cache_.begin(), d->bibfiles_cache_.end(), p);
if (temp == d->bibfiles_cache_.end())
d->bibfiles_cache_.push_back(p);
}
+static map<docstring, FileName> bibfileCache;
+
+FileName Buffer::getBibfilePath(docstring const & bibid) const
+{
+ map<docstring, FileName>::const_iterator it =
+ bibfileCache.find(bibid);
+ if (it != bibfileCache.end()) {
+ // i.e., return bibfileCache[bibid];
+ return it->second;
+ }
+
+ LYXERR(Debug::FILES, "Reading file location for " << bibid);
+ string const texfile = changeExtension(to_utf8(bibid), "bib");
+ // we need to check first if this file exists where it's said to be.
+ // there's a weird bug that occurs otherwise: if the file is in the
+ // Buffer's directory but has the same name as some file that would be
+ // found by kpsewhich, then we find the latter, not the former.
+ FileName const local_file = makeAbsPath(texfile, filePath());
+ FileName file = local_file;
+ if (!file.exists()) {
+ // there's no need now to check whether the file can be found
+ // locally
+ file = findtexfile(texfile, "bib", true);
+ if (file.empty())
+ file = local_file;
+ }
+ LYXERR(Debug::FILES, "Found at: " << file);
+
+ bibfileCache[bibid] = file;
+ return bibfileCache[bibid];
+}
+
+
void Buffer::checkIfBibInfoCacheIsValid() const
{
// use the master's cache
return;
}
- // if we already know the cache is invalid, no need to check
- // the timestamps
+ // If we already know the cache is invalid, stop here.
+ // This is important in the case when the bibliography
+ // environment (rather than Bib[la]TeX) is used.
+ // In that case, the timestamp check below gives no
+ // sensible result. Rather than that, the cache will
+ // be invalidated explicitly via invalidateBibInfoCache()
+ // by the Bibitem inset.
+ // Same applies for bib encoding changes, which trigger
+ // invalidateBibInfoCache() by InsetBibtex.
if (!d->bibinfo_cache_valid_)
return;
+ if (d->have_bibitems_) {
+ // We have a bibliography environment.
+ // Invalidate the bibinfo cache unconditionally.
+ // Cite labels will get invalidated by the inset if needed.
+ d->bibinfo_cache_valid_ = false;
+ return;
+ }
+
+ // OK. This is with Bib(la)tex. We'll assume the cache
+ // is valid and change this if we find changes in the bibs.
+ d->bibinfo_cache_valid_ = true;
+ d->cite_labels_valid_ = true;
+
// compare the cached timestamps with the actual ones.
- FileNamePairList const & bibfiles_cache = getBibfiles();
- FileNamePairList::const_iterator ei = bibfiles_cache.begin();
- FileNamePairList::const_iterator en = bibfiles_cache.end();
- for (; ei != en; ++ ei) {
- FileName const fn = ei->second;
+ docstring_list const & bibfiles_cache = getBibfiles();
+ for (auto const & bf : bibfiles_cache) {
+ FileName const fn = getBibfilePath(bf);
time_t lastw = fn.lastModified();
time_t prevw = d->bibfile_status_[fn];
if (lastw != prevw) {
}
-void Buffer::reloadBibInfoCache() const
+void Buffer::clearBibFileCache() const
+{
+ bibfileCache.clear();
+}
+
+
+void Buffer::reloadBibInfoCache(bool const force) const
{
+ // we should not need to do this for internal buffers
+ if (isInternal())
+ return;
+
// use the master's cache
Buffer const * const tmp = masterBuffer();
if (tmp != this) {
- tmp->reloadBibInfoCache();
+ tmp->reloadBibInfoCache(force);
return;
}
- checkIfBibInfoCacheIsValid();
- if (d->bibinfo_cache_valid_)
- return;
+ if (!force) {
+ checkIfBibInfoCacheIsValid();
+ if (d->bibinfo_cache_valid_)
+ return;
+ }
+ LYXERR(Debug::FILES, "Bibinfo cache was invalid.");
+ // re-read file locations when this info changes
+ // FIXME Is this sufficient? Or should we also force that
+ // in some other cases? If so, then it is easy enough to
+ // add the following line in some other places.
+ clearBibFileCache();
d->bibinfo_.clear();
FileNameList checkedFiles;
+ d->have_bibitems_ = false;
collectBibKeys(checkedFiles);
d->bibinfo_cache_valid_ = true;
}
void Buffer::collectBibKeys(FileNameList & checkedFiles) const
{
- for (InsetIterator it = inset_iterator_begin(inset()); it; ++it)
+ if (!parent())
+ clearIncludeList();
+
+ for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
it->collectBibKeys(it, checkedFiles);
+ if (it->lyxCode() == BIBITEM_CODE) {
+ if (parent() != nullptr)
+ parent()->d->have_bibitems_ = true;
+ else
+ d->have_bibitems_ = true;
+ }
+ }
}
BiblioInfo & bi = d->bibinfo_;
bi.mergeBiblioInfo(bin);
- if (parent() != 0) {
+ if (parent() != nullptr) {
BiblioInfo & masterbi = parent()->d->bibinfo_;
masterbi.mergeBiblioInfo(bin);
}
BiblioInfo & bi = d->bibinfo_;
bi[key] = bin;
- if (parent() != 0) {
- BiblioInfo & masterbi = parent()->d->bibinfo_;
+ if (parent() != nullptr) {
+ BiblioInfo & masterbi = masterBuffer()->d->bibinfo_;
masterbi[key] = bin;
}
}
// get buffer of external file
InsetInclude const & ins =
static_cast<InsetInclude const &>(*it);
- Buffer * child = ins.getChildBuffer();
+ Buffer * child = ins.loadIfNeeded();
if (!child)
continue;
child->dispatch(func, dr);
if (params().save_transient_properties)
undo().recordUndoBufferParams(CursorData());
params().track_changes = !params().track_changes;
- if (!params().track_changes)
- dr.forceChangesUpdate();
break;
case LFUN_CHANGES_OUTPUT:
undo().recordUndoBufferParams(CursorData());
params().output_changes = !params().output_changes;
if (params().output_changes) {
- bool dvipost = LaTeXFeatures::isAvailable("dvipost");
bool xcolorulem = LaTeXFeatures::isAvailable("ulem") &&
LaTeXFeatures::isAvailable("xcolor");
- if (!dvipost && !xcolorulem) {
+ if (!xcolorulem) {
Alert::warning(_("Changes not shown in LaTeX output"),
_("Changes will not be highlighted in LaTeX output, "
- "because neither dvipost nor xcolor/ulem are installed.\n"
- "Please install these packages or redefine "
- "\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
- } else if (!xcolorulem) {
- Alert::warning(_("Changes not shown in LaTeX output"),
- _("Changes will not be highlighted in LaTeX output "
- "when using pdflatex, because xcolor and ulem are not installed.\n"
+ "because xcolor and ulem are not installed.\n"
"Please install both packages or redefine "
"\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
}
for ( ; cur ; cur.forwardPar())
cur.paragraph().anonymize();
dr.forceBufferUpdate();
+ dr.screenUpdate(Update::Force);
break;
}
LASSERT(from, return);
LASSERT(to, return);
- for_each(par_iterator_begin(),
- par_iterator_end(),
- bind(&Paragraph::changeLanguage, _1, params(), from, to));
+ ParIterator it = par_iterator_begin();
+ ParIterator eit = par_iterator_end();
+ for (; it != eit; ++it)
+ it->changeLanguage(params(), from, to);
}
for (ParConstIterator it = par_iterator_begin(); it != end; ++it)
it->getLanguages(langs);
// also children
- ListOfBuffers clist = getDescendents();
+ ListOfBuffers clist = getDescendants();
for (auto const & cit : clist)
cit->getLanguages(langs);
}
}
-docstring const Buffer::B_(string const & l10n) const
+docstring Buffer::B_(string const & l10n) const
{
return params().B_(l10n);
}
}
-vector<docstring> const Buffer::prepareBibFilePaths(OutputParams const & runparams,
- FileNamePairList const bibfilelist,
+vector<pair<docstring, string>> const Buffer::prepareBibFilePaths(OutputParams const & runparams,
+ docstring_list const & bibfilelist,
bool const add_extension) const
{
// If we are processing the LaTeX file in a temp directory then
// Otherwise, store the (maybe absolute) path to the original,
// unmangled database name.
- vector<docstring> res;
+ vector<pair<docstring, string>> res;
// determine the export format
string const tex_format = flavor2format(runparams.flavor);
bool found_space = false;
for (auto const & bit : bibfilelist) {
- string utf8input = to_utf8(bit.first);
+ string utf8input = to_utf8(bit);
string database =
prepareFileNameForLaTeX(utf8input, ".bib", runparams.nice);
- FileName const try_in_file =
+ FileName try_in_file =
makeAbsPath(database + ".bib", filePath());
- bool const not_from_texmf = try_in_file.isReadableFile();
+ bool not_from_texmf = try_in_file.isReadableFile();
+ // If the file has not been found, try with the real file name
+ // (it might come from a child in a sub-directory)
+ if (!not_from_texmf) {
+ try_in_file = getBibfilePath(bit);
+ if (try_in_file.isReadableFile()) {
+ // Check if the file is in texmf
+ FileName kpsefile(findtexfile(changeExtension(utf8input, "bib"), "bib", true));
+ not_from_texmf = kpsefile.empty()
+ || kpsefile.absFileName() != try_in_file.absFileName();
+ if (not_from_texmf)
+ // If this exists, make path relative to the master
+ // FIXME Unicode
+ database =
+ removeExtension(prepareFileNameForLaTeX(
+ to_utf8(makeRelPath(from_utf8(try_in_file.absFileName()),
+ from_utf8(filePath()))),
+ ".bib", runparams.nice));
+ }
+ }
if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
not_from_texmf) {
if (contains(path, ' '))
found_space = true;
+ string enc;
+ if (params().useBiblatex() && !params().bibFileEncoding(utf8input).empty())
+ enc = params().bibFileEncoding(utf8input);
+
+ bool recorded = false;
+ for (auto const & pe : res) {
+ if (pe.first == path) {
+ recorded = true;
+ break;
+ }
- if (find(res.begin(), res.end(), path) == res.end())
- res.push_back(path);
+ }
+ if (!recorded)
+ res.push_back(make_pair(path, enc));
}
// Check if there are spaces in the path and warn BibTeX users, if so.
void Buffer::setParent(Buffer const * buffer)
{
- // Avoids recursive include.
- d->setParent(buffer == this ? 0 : buffer);
+ // We need to do some work here to avoid recursive parent structures.
+ // This is the easy case.
+ if (buffer == this) {
+ LYXERR0("Ignoring attempt to set self as parent in\n" << fileName());
+ return;
+ }
+ // Now we check parents going upward, to make sure that IF we set the
+ // parent as requested, we would not generate a recursive include.
+ set<Buffer const *> sb;
+ Buffer const * b = buffer;
+ bool found_recursion = false;
+ while (b) {
+ if (sb.find(b) != sb.end()) {
+ found_recursion = true;
+ break;
+ }
+ sb.insert(b);
+ b = b->parent();
+ }
+
+ if (found_recursion) {
+ LYXERR0("Ignoring attempt to set parent of\n" <<
+ fileName() <<
+ "\nto " <<
+ buffer->fileName() <<
+ "\nbecause that would create a recursive inclusion.");
+ return;
+ }
+
+ // We should be safe now.
+ d->setParent(buffer);
updateMacros();
}
ListOfBuffers Buffer::allRelatives() const
{
- ListOfBuffers lb = masterBuffer()->getDescendents();
+ ListOfBuffers lb = masterBuffer()->getDescendants();
lb.push_front(const_cast<Buffer *>(masterBuffer()));
return lb;
}
Buffer const * Buffer::masterBuffer() const
{
- // FIXME Should be make sure we are not in some kind
- // of recursive include? A -> B -> A will crash this.
Buffer const * const pbuf = d->parent();
if (!pbuf)
return this;
}
-ListOfBuffers Buffer::getDescendents() const
+ListOfBuffers Buffer::getDescendants() const
{
ListOfBuffers v;
collectChildren(v, true);
if (it == m.begin())
return m.end();
- it--;
+ --it;
return it;
}
// if paragraphs have no macro context set, pos will be empty
if (pos.empty())
- return 0;
+ return nullptr;
// we haven't found anything yet
DocIterator bestPos = owner_->par_iterator_begin();
- MacroData const * bestData = 0;
+ MacroData const * bestData = nullptr;
// find macro definitions for name
NamePositionScopeMacroMap::const_iterator nameIt = macros.find(name);
DocIterator const & pos, bool global) const
{
if (d->macro_lock)
- return 0;
+ return nullptr;
// query buffer macros
MacroData const * data = d->getBufferMacro(name, pos);
- if (data != 0)
+ if (data != nullptr)
return data;
// If there is a master buffer, query that
if (global) {
data = MacroTable::globalMacros().get(name);
- if (data != 0)
+ if (data != nullptr)
return data;
}
- return 0;
+ return nullptr;
}
// look where the child buffer is included first
Impl::BufferPositionMap::iterator it = d->children_positions.find(&child);
if (it == d->children_positions.end())
- return 0;
+ return nullptr;
// check for macros at the inclusion position
return getMacro(name, it->second, global);
InsetInclude const & incinset =
static_cast<InsetInclude const &>(*insit.inset);
macro_lock = true;
- Buffer * child = incinset.getChildBuffer();
+ Buffer * child = incinset.loadIfNeeded();
macro_lock = false;
if (!child)
continue;
// get buffer of external file
InsetInclude const & ins =
static_cast<InsetInclude const &>(*it);
- Buffer * child = ins.getChildBuffer();
+ Buffer * child = ins.loadIfNeeded();
if (!child)
continue;
child->getUsedBranches(result, true);
MacroContext mc = MacroContext(this, it);
for (DocIterator::idx_type i = 0; i < n; ++i) {
MathData & data = minset->cell(i);
- data.updateMacros(0, mc, utype, 0);
+ data.updateMacros(nullptr, mc, utype, 0);
}
}
}
RefCache::iterator it = d->ref_cache_.find(label);
if (it != d->ref_cache_.end())
- return it->second.second;
+ return it->second;
- static InsetLabel const * dummy_il = 0;
static References const dummy_refs = References();
it = d->ref_cache_.insert(
- make_pair(label, make_pair(dummy_il, dummy_refs))).first;
- return it->second.second;
+ make_pair(label, dummy_refs)).first;
+ return it->second;
}
}
-void Buffer::setInsetLabel(docstring const & label, InsetLabel const * il)
+void Buffer::setInsetLabel(docstring const & label, InsetLabel const * il,
+ bool const active)
{
- masterBuffer()->d->ref_cache_[label].first = il;
+ LabelInfo linfo;
+ linfo.label = label;
+ linfo.inset = il;
+ linfo.active = active;
+ masterBuffer()->d->label_cache_.push_back(linfo);
}
-InsetLabel const * Buffer::insetLabel(docstring const & label) const
+InsetLabel const * Buffer::insetLabel(docstring const & label,
+ bool const active) const
{
- return masterBuffer()->d->ref_cache_[label].first;
+ for (auto const & rc : masterBuffer()->d->label_cache_) {
+ if (rc.label == label && (rc.active || !active))
+ return rc.inset;
+ }
+ return nullptr;
+}
+
+
+bool Buffer::activeLabel(docstring const & label) const
+{
+ if (!insetLabel(label, true))
+ return false;
+
+ return true;
}
void Buffer::clearReferenceCache() const
{
- if (!d->parent())
+ if (!d->parent()) {
d->ref_cache_.clear();
+ d->label_cache_.clear();
+ }
}
return;
string const paramName = "key";
+ UndoGroupHelper ugh(this);
InsetIterator it = inset_iterator_begin(inset());
for (; it; ++it) {
if (it->lyxCode() != CITE_CODE)
continue;
InsetCommand * inset = it->asInsetCommand();
docstring const oldValue = inset->getParam(paramName);
- if (oldValue == from)
+ if (oldValue == from) {
+ undo().recordUndo(CursorData(it));
inset->setParam(paramName, to);
+ }
}
}
// No side effect of file copying and image conversion
runparams.dryrun = true;
+ // Some macros rely on font encoding
+ runparams.main_fontenc = params().main_font_encoding();
+
if (output == CurrentParagraph) {
runparams.par_begin = par_begin;
runparams.par_end = par_end;
par.write(ods, params(), dt);
os << from_utf8(ods.str());
} else if (runparams.flavor == OutputParams::HTML) {
- XHTMLStream xs(os);
+ XMLStream xs(os);
setMathFlavor(runparams);
xhtmlParagraphs(text(), *this, xs, runparams);
} else if (runparams.flavor == OutputParams::TEXT) {
// Probably should have some routine with a signature like them.
writePlaintextParagraph(*this,
text().paragraphs()[par_begin], os, runparams, dummy);
- } else if (params().isDocBook()) {
- docbookParagraphs(text(), *this, os, runparams);
+ } else if (runparams.flavor == OutputParams::DOCBOOK5) {
+ XMLStream xs{os};
+ docbookParagraphs(text(), *this, xs, runparams);
} else {
// If we are previewing a paragraph, even if this is the
// child of some other buffer, let's cut the link here,
LaTeXFeatures features(*this, params(), runparams);
validate(features);
runparams.use_polyglossia = features.usePolyglossia();
+ runparams.use_hyperref = features.isRequired("hyperref");
// latex or literate
otexstream ots(os);
// output above
os << "% "<< _("Plain text does not have a preamble.");
} else
writePlaintextFile(*this, os, runparams);
- } else if (params().isDocBook()) {
- writeDocBookSource(os, absFileName(), runparams, output);
+ } else if (runparams.flavor == OutputParams::DOCBOOK5) {
+ writeDocBookSource(os, runparams, output);
} else {
// latex or literate
otexstream ots(os);
}
-
-namespace {
-
-class AutoSaveBuffer : public ForkedProcess {
-public:
- ///
- AutoSaveBuffer(Buffer const & buffer, FileName const & fname)
- : buffer_(buffer), fname_(fname) {}
- ///
- virtual shared_ptr<ForkedProcess> clone() const
- {
- return make_shared<AutoSaveBuffer>(*this);
- }
- ///
- int start()
- {
- command_ = to_utf8(bformat(_("Auto-saving %1$s"),
- from_utf8(fname_.absFileName())));
- return run(DontWait);
- }
-private:
- ///
- virtual int generateChild();
- ///
- Buffer const & buffer_;
- FileName fname_;
-};
-
-
-int AutoSaveBuffer::generateChild()
-{
-#if defined(__APPLE__)
- /* FIXME fork() is not usable for autosave on Mac OS X 10.6 (snow leopard)
- * We should use something else like threads.
- *
- * Since I do not know how to determine at run time what is the OS X
- * version, I just disable forking altogether for now (JMarc)
- */
- pid_t const pid = -1;
-#else
- // tmp_ret will be located (usually) in /tmp
- // will that be a problem?
- // Note that this calls ForkedCalls::fork(), so it's
- // ok cross-platform.
- pid_t const pid = fork();
- // If you want to debug the autosave
- // you should set pid to -1, and comment out the fork.
- if (pid != 0 && pid != -1)
- return pid;
-#endif
-
- // pid = -1 signifies that lyx was unable
- // to fork. But we will do the save
- // anyway.
- bool failed = false;
- TempFile tempfile("lyxautoXXXXXX.lyx");
- tempfile.setAutoRemove(false);
- FileName const tmp_ret = tempfile.name();
- if (!tmp_ret.empty()) {
- if (!buffer_.writeFile(tmp_ret))
- failed = true;
- else if (!tmp_ret.moveTo(fname_))
- failed = true;
- } else
- failed = true;
-
- if (failed) {
- // failed to write/rename tmp_ret so try writing direct
- if (!buffer_.writeFile(fname_)) {
- // It is dangerous to do this in the child,
- // but safe in the parent, so...
- if (pid == -1) // emit message signal.
- buffer_.message(_("Autosave failed!"));
- }
- }
-
- if (pid == 0) // we are the child so...
- _exit(0);
-
- return pid;
-}
-
-} // namespace
-
-
FileName Buffer::getEmergencyFileName() const
{
return FileName(d->filename.absFileName() + ".emergency");
void Buffer::setExportStatus(bool e) const
{
d->doing_export = e;
- ListOfBuffers clist = getDescendents();
+ ListOfBuffers clist = getDescendants();
for (auto const & bit : clist)
bit->d->doing_export = e;
}
string & result_file) const
{
bool const update_unincluded =
- params().maintain_unincluded_children
+ params().maintain_unincluded_children != BufferParams::CM_None
&& !params().getIncludedChildren().empty();
// (1) export with all included children (omit \includeonly)
if (!put_in_tempdir) {
// Only show this alert if this is an export to a non-temporary
// file (not for previewing).
- Alert::error(_("Couldn't export file"), bformat(
- _("No information for exporting the format %1$s."),
- theFormats().prettyName(format)));
+ docstring s = bformat(_("No information for exporting the format %1$s."),
+ theFormats().prettyName(format));
+ if (format == "pdf4")
+ s += "\n"
+ + bformat(_("Hint: use non-TeX fonts or set input encoding "
+ " to '%1$s'"), from_utf8(encodings.fromLyXName("ascii")->guiName()));
+ Alert::error(_("Couldn't export file"), s);
}
return ExportNoPathToFormat;
}
runparams.flavor = converters.getFlavor(path, this);
+ runparams.hyperref_driver = converters.getHyperrefDriver(path);
for (auto const & edge : path)
if (theConverters().get(edge).nice()) {
need_nice_file = true;
// Plain text backend
if (backend_format == "text") {
runparams.flavor = OutputParams::TEXT;
- writePlaintextFile(*this, FileName(filename), runparams);
+ try {
+ writePlaintextFile(*this, FileName(filename), runparams);
+ }
+ catch (ConversionException const &) { return ExportCancel; }
}
// HTML backend
else if (backend_format == "xhtml") {
runparams.flavor = OutputParams::HTML;
setMathFlavor(runparams);
- makeLyXHTMLFile(FileName(filename), runparams);
+ if (makeLyXHTMLFile(FileName(filename), runparams) == ExportKilled)
+ return ExportKilled;
} else if (backend_format == "lyx")
writeFile(FileName(filename));
- // Docbook backend
- else if (params().isDocBook()) {
+ // DocBook backend
+ else if (backend_format == "docbook5") {
+ runparams.flavor = OutputParams::DOCBOOK5;
runparams.nice = !put_in_tempdir;
- makeDocBookFile(FileName(filename), runparams);
+ if (makeDocBookFile(FileName(filename), runparams) == ExportKilled)
+ return ExportKilled;
}
// LaTeX backend
else if (backend_format == format || need_nice_file) {
runparams.nice = true;
- bool const success = makeLaTeXFile(FileName(filename), string(), runparams);
+ ExportStatus const retval =
+ makeLaTeXFile(FileName(filename), string(), runparams);
+ if (retval == ExportKilled)
+ return ExportKilled;
if (d->cloned_buffer_)
d->cloned_buffer_->d->errorLists["Export"] = d->errorLists["Export"];
- if (!success)
- return ExportError;
+ if (retval != ExportSuccess)
+ return retval;
} else if (!lyxrc.tex_allows_spaces
&& contains(filePath(), ' ')) {
Alert::error(_("File name error"),
- _("The directory path to the document cannot contain spaces."));
+ bformat(_("The directory path to the document\n%1$s\n"
+ "contains spaces, but your TeX installation does "
+ "not allow them. You should save the file to a directory "
+ "whose name does not contain spaces."), from_utf8(filePath())));
return ExportTexPathHasSpaces;
} else {
runparams.nice = false;
- bool const success = makeLaTeXFile(
- FileName(filename), filePath(), runparams);
+ ExportStatus const retval =
+ makeLaTeXFile(FileName(filename), filePath(), runparams);
+ if (retval == ExportKilled)
+ return ExportKilled;
if (d->cloned_buffer_)
d->cloned_buffer_->d->errorLists["Export"] = d->errorLists["Export"];
- if (!success)
+ if (retval != ExportSuccess)
return ExportError;
}
ErrorList & error_list = d->errorLists[error_type];
string const ext = theFormats().extension(format);
FileName const tmp_result_file(changeExtension(filename, ext));
- bool const success = converters.convert(this, FileName(filename),
- tmp_result_file, FileName(absFileName()), backend_format, format,
- error_list);
+ Converters::RetVal const retval =
+ converters.convert(this, FileName(filename), tmp_result_file,
+ FileName(absFileName()), backend_format, format,
+ error_list, Converters::none, includeall);
+ if (retval == Converters::KILLED)
+ return ExportCancel;
+ bool success = (retval == Converters::SUCCESS);
// Emit the signal to show the error list or copy it back to the
// cloned Buffer so that it can be emitted afterwards.
else
errors(error_type);
// also to the children, in case of master-buffer-view
- ListOfBuffers clist = getDescendents();
+ ListOfBuffers clist = getDescendants();
for (auto const & bit : clist) {
if (runparams.silent)
bit->d->errorLists[error_type].clear();
result_file = dest_filename;
// We need to copy referenced files (e. g. included graphics
// if format == "dvi") to the result dir.
- vector<ExportedFile> const files =
+ vector<ExportedFile> const extfiles =
runparams.exportdata->externalFiles(format);
string const dest = runparams.export_folder.empty() ?
onlyPath(result_file) : runparams.export_folder;
: force_overwrite == ALL_FILES;
CopyStatus status = use_force ? FORCE : SUCCESS;
- for (ExportedFile const & exp : files) {
+ for (ExportedFile const & exp : extfiles) {
if (status == CANCEL) {
message(_("Document export cancelled."));
return ExportCancel;
Buffer::ExportStatus Buffer::preview(string const & format) const
{
bool const update_unincluded =
- params().maintain_unincluded_children
+ params().maintain_unincluded_children != BufferParams::CM_None
&& !params().getIncludedChildren().empty();
return preview(format, update_unincluded);
}
Impl * theimpl = isClone() ? d->cloned_buffer_->d : d;
theimpl->preview_file_ = previewFile;
theimpl->preview_format_ = format;
- theimpl->preview_error_ = (status != ExportSuccess);
+ theimpl->require_fresh_start_ = (status != ExportSuccess);
if (status != ExportSuccess)
return status;
_("&Remove"), _("&Keep"));
if (del_emerg == 0)
emergencyFile.removeFile();
+ else {
+ // See bug #11464
+ FileName newname;
+ string const ename = emergencyFile.absFileName();
+ bool noname = true;
+ // Surely we can find one in 100 tries?
+ for (int i = 1; i < 100; ++i) {
+ newname.set(ename + to_string(i) + ".lyx");
+ if (!newname.exists()) {
+ noname = false;
+ break;
+ }
+ }
+ if (!noname) {
+ // renameTo returns true on success. So inverting that
+ // will give us true if we fail.
+ noname = !emergencyFile.renameTo(newname);
+ }
+ if (noname) {
+ Alert::warning(_("Can't rename emergency file!"),
+ _("LyX was unable to rename the emergency file. "
+ "You should do so manually. Otherwise, you will be "
+ "asked about it again the next time you try to load "
+ "this file, and may over-write your own work."));
+ } else {
+ Alert::warning(_("Emergency File Renames"),
+ bformat(_("Emergency file renamed as:\n %1$s"),
+ from_utf8(newname.onlyFileName())));
+ }
+ }
return ReadOriginal;
}
}
-void Buffer::bufferErrors(TeXErrors const & terr, ErrorList & errorList) const
+void Buffer::Impl::traverseErrors(TeXErrors::Errors::const_iterator err, TeXErrors::Errors::const_iterator end, ErrorList & errorList) const
{
- for (auto const & err : terr) {
+ for (; err != end; ++err) {
TexRow::TextEntry start = TexRow::text_none, end = TexRow::text_none;
- int errorRow = err.error_in_line;
- Buffer const * buf = 0;
- Impl const * p = d;
- if (err.child_name.empty())
+ int errorRow = err->error_in_line;
+ Buffer const * buf = nullptr;
+ Impl const * p = this;
+ if (err->child_name.empty())
tie(start, end) = p->texrow.getEntriesFromRow(errorRow);
else {
// The error occurred in a child
- for (Buffer const * child : getDescendents()) {
+ for (Buffer const * child : owner_->getDescendants()) {
string const child_name =
DocFileName(changeExtension(child->absFileName(), "tex")).
mangledFileName();
- if (err.child_name != child_name)
+ if (err->child_name != child_name)
continue;
tie(start, end) = child->d->texrow.getEntriesFromRow(errorRow);
if (!TexRow::isNone(start)) {
- buf = d->cloned_buffer_
+ buf = this->cloned_buffer_
? child->d->cloned_buffer_->d->owner_
: child->d->owner_;
p = child->d;
}
}
}
- errorList.push_back(ErrorItem(err.error_desc, err.error_text,
+ errorList.push_back(ErrorItem(err->error_desc, err->error_text,
start, end, buf));
}
}
+void Buffer::bufferErrors(TeXErrors const & terr, ErrorList & errorList) const
+{
+ TeXErrors::Errors::const_iterator err = terr.begin();
+ TeXErrors::Errors::const_iterator end = terr.end();
+
+ d->traverseErrors(err, end, errorList);
+}
+
+
+void Buffer::bufferRefs(TeXErrors const & terr, ErrorList & errorList) const
+{
+ TeXErrors::Errors::const_iterator err = terr.begin_ref();
+ TeXErrors::Errors::const_iterator end = terr.end_ref();
+
+ d->traverseErrors(err, end, errorList);
+}
+
+
void Buffer::updateBuffer(UpdateScope scope, UpdateType utype) const
{
LBUFERR(!text().paragraphs().empty());
Buffer const * const master = masterBuffer();
DocumentClass const & textclass = master->params().documentClass();
- FileNamePairList old_bibfiles;
- // do this only if we are the top-level Buffer
- if (master == this) {
+ docstring_list old_bibfiles;
+ // Do this only if we are the top-level Buffer. We also need to account
+ // for the case of a previewed child with ignored parent here.
+ if (master == this && !d->ignore_parent) {
textclass.counters().reset(from_ascii("bibitem"));
reloadBibInfoCache();
// we will re-read this cache as we go through, but we need
// update all caches
clearReferenceCache();
updateMacros();
- setChangesPresent(false);
Buffer & cbuf = const_cast<Buffer &>(*this);
+ // if we are reloading, then we could have a dangling TOC,
+ // in effect. so we need to go ahead and reset, even though
+ // we will do so again when we rebuild the TOC later.
+ cbuf.tocBackend().reset();
// do the real work
ParIterator parit = cbuf.par_iterator_begin();
+ if (scope == UpdateMaster)
+ clearIncludeList();
updateBuffer(parit, utype);
// If this document has siblings, then update the TocBackend later. The
return;
// if the bibfiles changed, the cache of bibinfo is invalid
- sort(d->bibfiles_cache_.begin(), d->bibfiles_cache_.end());
- // the old one should already be sorted
- if (old_bibfiles != d->bibfiles_cache_) {
+ docstring_list new_bibfiles = d->bibfiles_cache_;
+ // this is a trick to determine whether the two vectors have
+ // the same elements.
+ sort(new_bibfiles.begin(), new_bibfiles.end());
+ sort(old_bibfiles.begin(), old_bibfiles.end());
+ if (old_bibfiles != new_bibfiles) {
+ LYXERR(Debug::FILES, "Reloading bibinfo cache.");
invalidateBibinfoCache();
reloadBibInfoCache();
// We relied upon the bibinfo cache when recalculating labels. But that
// labels. Nothing else will have changed. So we could create a new
// UpdateType that would signal that fact, if we needed to do so.
parit = cbuf.par_iterator_begin();
+ // we will be re-doing the counters and references and such.
+ textclass.counters().reset();
+ clearReferenceCache();
+ // we should not need to do this again?
+ // updateMacros();
updateBuffer(parit, utype);
+ // this will already have been done by reloadBibInfoCache();
+ // d->bibinfo_cache_valid_ = true;
}
- else
+ else {
+ LYXERR(Debug::FILES, "Bibfiles unchanged.");
+ // this is also set to true on the other path, by reloadBibInfoCache.
d->bibinfo_cache_valid_ = true;
+ }
d->cite_labels_valid_ = true;
/// FIXME: Perf
+ clearIncludeList();
cbuf.tocBackend().update(true, utype);
if (scope == UpdateMaster)
cbuf.structureChanged();
docstring itemlabel;
switch (par.itemdepth) {
case 0:
+ // • U+2022 BULLET
itemlabel = char_type(0x2022);
break;
case 1:
+ // – U+2013 EN DASH
itemlabel = char_type(0x2013);
break;
case 2:
+ // ∗ U+2217 ASTERISK OPERATOR
itemlabel = char_type(0x2217);
break;
case 3:
- itemlabel = char_type(0x2219); // or 0x00b7
+ // · U+00B7 MIDDLE DOT
+ itemlabel = char_type(0x00b7);
break;
}
par.params().labelString(itemlabel);
}
-void Buffer::updateBuffer(ParIterator & parit, UpdateType utype) const
+void Buffer::updateBuffer(ParIterator & parit, UpdateType utype, bool const deleted) const
{
+ pushIncludedBuffer(this);
// LASSERT: Is it safe to continue here, or should we just return?
LASSERT(parit.pit() == 0, /**/);
depth_type maxdepth = 0;
pit_type const lastpit = parit.lastpit();
+ bool changed = false;
for ( ; parit.pit() <= lastpit ; ++parit.pit()) {
// reduce depth if necessary
if (parit->params().depth() > maxdepth) {
// set the counter for this paragraph
d->setLabel(parit, utype);
- // update change-tracking flag
- parit->addChangesToBuffer(*this);
-
// now the insets
for (auto const & insit : parit->insetList()) {
parit.pos() = insit.pos;
- insit.inset->updateBuffer(parit, utype);
+ insit.inset->updateBuffer(parit, utype, deleted || parit->isDeleted(insit.pos));
+ changed |= insit.inset->isChanged();
}
+
+ // are there changes in this paragraph?
+ changed |= parit->isChanged();
}
+
+ // set change indicator for the inset (or the cell that the iterator
+ // points to, if applicable).
+ parit.text()->inset().isChanged(changed);
+ popIncludedBuffer();
}
}
+bool Buffer::areChangesPresent() const
+{
+ return inset().isChanged();
+}
+
+
Buffer::ReadStatus Buffer::reload()
{
setBusy(true);
docstring const disp_fn = makeDisplayPath(d->filename.absFileName());
// clear parent. this will get reset if need be.
- d->setParent(0);
+ d->setParent(nullptr);
ReadStatus const status = loadLyXFile();
if (status == ReadSuccess) {
updateBuffer();
if (oldloc == newloc)
continue;
// the location of the child file is incorrect.
- cbuf->setParent(0);
- inset_inc->setChildBuffer(0);
+ cbuf->setParent(nullptr);
+ inset_inc->setChildBuffer(nullptr);
}
// invalidate cache of children
d->children_positions.clear();
if (master->isChild(this))
setParent(master);
else
- setParent(0);
+ setParent(nullptr);
}
}
-void Buffer::setChangesPresent(bool b) const
-{
- d->tracked_changes_present_ = b;
-}
-
-
-bool Buffer::areChangesPresent() const
-{
- return d->tracked_changes_present_;
-}
-
-
-void Buffer::updateChangesPresent() const
-{
- LYXERR(Debug::CHANGES, "Buffer::updateChangesPresent");
- setChangesPresent(false);
- ParConstIterator it = par_iterator_begin();
- ParConstIterator const end = par_iterator_end();
- for (; !areChangesPresent() && it != end; ++it)
- it->addChangesToBuffer(*this);
-}
-
-
void Buffer::Impl::refreshFileMonitor()
{
if (file_monitor_ && file_monitor_->filename() == filename.absFileName()) {
}
+void Buffer::pushIncludedBuffer(Buffer const * buf) const
+{
+ masterBuffer()->d->include_list_.push_back(buf);
+ if (lyxerr.debugging(Debug::FILES)) {
+ LYXERR0("Pushed. Stack now:");
+ if (masterBuffer()->d->include_list_.empty())
+ LYXERR0("EMPTY!");
+ else
+ for (auto const & b : masterBuffer()->d->include_list_)
+ LYXERR0(b->fileName());
+ }
+}
+
+
+void Buffer::popIncludedBuffer() const
+{
+ masterBuffer()->d->include_list_.pop_back();
+ if (lyxerr.debugging(Debug::FILES)) {
+ LYXERR0("Popped. Stack now:");
+ if (masterBuffer()->d->include_list_.empty())
+ LYXERR0("EMPTY!");
+ else
+ for (auto const & b : masterBuffer()->d->include_list_)
+ LYXERR0(b->fileName());
+ }
+}
+
+
+bool Buffer::isBufferIncluded(Buffer const * buf) const
+{
+ if (!buf)
+ return false;
+ if (lyxerr.debugging(Debug::FILES)) {
+ LYXERR0("Checking for " << buf->fileName() << ". Stack now:");
+ if (masterBuffer()->d->include_list_.empty())
+ LYXERR0("EMPTY!");
+ else
+ for (auto const & b : masterBuffer()->d->include_list_)
+ LYXERR0(b->fileName());
+ }
+ list<Buffer const *> const & blist = masterBuffer()->d->include_list_;
+ return find(blist.begin(), blist.end(), buf) != blist.end();
+}
+
+
+void Buffer::clearIncludeList() const
+{
+ LYXERR(Debug::FILES, "Clearing include list for " << fileName());
+ d->include_list_.clear();
+}
+
} // namespace lyx