#include "ParagraphParameters.h"
#include "ParIterator.h"
#include "PDFOptions.h"
+#include "Session.h"
#include "SpellChecker.h"
#include "sgml.h"
+#include "texstream.h"
#include "TexRow.h"
#include "Text.h"
#include "TextClass.h"
#include "mathed/InsetMathHull.h"
#include "mathed/MacroTable.h"
-#include "mathed/MathMacroTemplate.h"
+#include "mathed/InsetMathMacroTemplate.h"
#include "mathed/MathSupport.h"
+#include "graphics/GraphicsCache.h"
#include "graphics/PreviewLoader.h"
#include "frontends/alert.h"
#include "support/debug.h"
#include "support/docstring_list.h"
#include "support/ExceptionMessage.h"
+#include "support/FileMonitor.h"
#include "support/FileName.h"
#include "support/FileNameList.h"
#include "support/filetools.h"
#include "support/types.h"
#include "support/bind.h"
-#include "support/shared_ptr.h"
#include <algorithm>
#include <fstream>
#include <iomanip>
#include <map>
+#include <memory>
#include <set>
#include <sstream>
#include <vector>
int const LYX_FORMAT = LYX_FORMAT_LYX;
typedef map<string, bool> DepClean;
-typedef map<docstring, pair<InsetLabel const *, Buffer::References> > RefCache;
-} // namespace anon
+// Information about labels and their associated refs
+struct LabelInfo {
+ /// label string
+ docstring label;
+ /// label inset
+ InsetLabel const * inset;
+ /// associated references cache
+ Buffer::References references;
+ /// whether this label is active (i.e., not deleted)
+ bool active;
+};
+
+typedef vector<LabelInfo> LabelCache;
+
+typedef map<docstring, Buffer::References> RefCache;
+
+} // namespace
// A storehouse for the cloned buffers.
/** If we have branches that use the file suffix
feature, return the file name with suffix appended.
*/
- support::FileName exportFileName() const;
+ FileName exportFileName() const;
Buffer * owner_;
*/
bool file_fully_loaded;
+ /// original format of loaded file
+ int file_format;
+
+ /// if the file was originally loaded from an older format, do
+ /// we need to back it up still?
+ bool need_format_backup;
+
/// Ignore the parent (e.g. when exporting a child standalone)?
bool ignore_parent;
/// positions of child buffers in the buffer
typedef map<Buffer const * const, DocIterator> BufferPositionMap;
struct ScopeBuffer {
- ScopeBuffer() {}
- ScopeBuffer(DocIterator const & s,Buffer const * b)
+ ScopeBuffer() : buffer(0) {}
+ ScopeBuffer(DocIterator const & s, Buffer const * b)
: scope(s), buffer(b) {}
DocIterator scope;
Buffer const * buffer;
/// Container for all sort of Buffer dependant errors.
map<string, ErrorList> errorLists;
- /// timestamp and checksum used to test if the file has been externally
- /// modified. (Used to properly enable 'File->Revert to saved', bug 4114).
- time_t timestamp_;
+ /// checksum used to test if the file has been externally modified. Used to
+ /// double check whether the file had been externally modified when saving.
unsigned long checksum_;
///
/// A cache for the bibfiles (including bibfiles of loaded child
/// documents), needed for appropriate update of natbib labels.
- mutable support::FileNameList bibfiles_cache_;
+ mutable docstring_list bibfiles_cache_;
// FIXME The caching mechanism could be improved. At present, we have a
// cache for each Buffer, that caches all the bibliography info for that
mutable BiblioInfo bibinfo_;
/// whether the bibinfo cache is valid
mutable bool bibinfo_cache_valid_;
- /// whether the bibfile cache is valid
- mutable bool bibfile_cache_valid_;
/// Cache of timestamps of .bib files
map<FileName, time_t> bibfile_status_;
/// Indicates whether the bibinfo has changed since the last time
/// we ran updateBuffer(), i.e., whether citation labels may need
/// to be updated.
mutable bool cite_labels_valid_;
+ /// Do we have a bibliography environment?
+ mutable bool have_bibitems_;
/// These two hold the file name and format, written to by
/// Buffer::preview and read from by LFUN_BUFFER_VIEW_CACHE.
/// was missing).
bool preview_error_;
+ /// Cache the references associated to a label and their positions
+ /// in the buffer.
mutable RefCache ref_cache_;
+ /// Cache the label insets and their activity status.
+ mutable LabelCache label_cache_;
/// our Text that should be wrapped in an InsetText
InsetText * inset;
if (!cloned_buffer_ && parent_buffer && pb)
LYXERR0("Warning: a buffer should not have two parents!");
parent_buffer = pb;
- if (!cloned_buffer_ && parent_buffer) {
- parent_buffer->invalidateBibfileCache();
+ if (!cloned_buffer_ && parent_buffer)
parent_buffer->invalidateBibinfoCache();
- }
}
/// If non zero, this buffer is a clone of existing buffer \p cloned_buffer_
+ (with_blanks ? blank_count_ : 0);
}
+ // does the buffer contain tracked changes? (if so, we automatically
+ // display the review toolbar, for instance)
+ mutable bool tracked_changes_present_;
+
+ // Make sure the file monitor monitors the good file.
+ void refreshFileMonitor();
+
+ /// Notify or clear of external modification
+ void fileExternallyModified(bool exists);
+
+ /// has been externally modified? Can be reset by the user.
+ mutable bool externally_modified_;
+
private:
/// So we can force access via the accessors.
mutable Buffer const * parent_buffer;
int char_count_;
int blank_count_;
+ FileMonitorPtr file_monitor_;
};
Buffer const * cloned_buffer)
: owner_(owner), lyx_clean(true), bak_clean(true), unnamed(false),
internal_buffer(false), read_only(readonly_), filename(file),
- file_fully_loaded(false), ignore_parent(false), toc_backend(owner),
- macro_lock(false), timestamp_(0), checksum_(0), wa_(0), gui_(0),
- undo_(*owner), bibinfo_cache_valid_(false), bibfile_cache_valid_(false),
- cite_labels_valid_(false), inset(0), preview_loader_(0),
- cloned_buffer_(cloned_buffer), clone_list_(0),
- doing_export(false), parent_buffer(0),
+ file_fully_loaded(false), file_format(LYX_FORMAT), need_format_backup(false),
+ ignore_parent(false), toc_backend(owner), macro_lock(false),
+ checksum_(0), wa_(0), gui_(0), undo_(*owner), bibinfo_cache_valid_(false),
+ cite_labels_valid_(false), have_bibitems_(false), preview_error_(false),
+ inset(0), preview_loader_(0), cloned_buffer_(cloned_buffer),
+ clone_list_(0), doing_export(false),
+ tracked_changes_present_(0), externally_modified_(false), parent_buffer(0),
word_count_(0), char_count_(0), blank_count_(0)
{
+ refreshFileMonitor();
if (!cloned_buffer_) {
temppath = createBufferTmpDir();
lyxvc.setBuffer(owner_);
bibfiles_cache_ = cloned_buffer_->d->bibfiles_cache_;
bibinfo_ = cloned_buffer_->d->bibinfo_;
bibinfo_cache_valid_ = cloned_buffer_->d->bibinfo_cache_valid_;
- bibfile_cache_valid_ = cloned_buffer_->d->bibfile_cache_valid_;
bibfile_status_ = cloned_buffer_->d->bibfile_status_;
cite_labels_valid_ = cloned_buffer_->d->cite_labels_valid_;
+ have_bibitems_ = cloned_buffer_->d->have_bibitems_;
unnamed = cloned_buffer_->d->unnamed;
internal_buffer = cloned_buffer_->d->internal_buffer;
layout_position = cloned_buffer_->d->layout_position;
preview_file_ = cloned_buffer_->d->preview_file_;
preview_format_ = cloned_buffer_->d->preview_format_;
preview_error_ = cloned_buffer_->d->preview_error_;
+ tracked_changes_present_ = cloned_buffer_->d->tracked_changes_present_;
}
// ourselves as a child.
d->clone_list_->erase(this);
// loop over children
- Impl::BufferPositionMap::iterator it = d->children_positions.begin();
- Impl::BufferPositionMap::iterator end = d->children_positions.end();
- for (; it != end; ++it) {
- Buffer * child = const_cast<Buffer *>(it->first);
+ for (auto const & p : d->children_positions) {
+ Buffer * child = const_cast<Buffer *>(p.first);
if (d->clone_list_->erase(child))
delete child;
}
d->position_to_children.clear();
} else {
// loop over children
- Impl::BufferPositionMap::iterator it = d->children_positions.begin();
- Impl::BufferPositionMap::iterator end = d->children_positions.end();
- for (; it != end; ++it) {
- Buffer * child = const_cast<Buffer *>(it->first);
- if (theBufferList().isLoaded(child))
- theBufferList().releaseChild(this, child);
+ for (auto const & p : d->children_positions) {
+ Buffer * child = const_cast<Buffer *>(p.first);
+ if (theBufferList().isLoaded(child)) {
+ if (theBufferList().isOthersChild(this, child))
+ child->setParent(0);
+ else
+ theBufferList().release(child);
+ }
}
if (!isClean()) {
docstring msg = _("LyX attempted to close a document that had unsaved changes!\n");
- msg += emergencyWrite();
+ try {
+ msg += emergencyWrite();
+ } catch (...) {
+ msg += " " + _("Save failed! Document is lost.");
+ }
Alert::warning(_("Attempting to close changed document!"), msg);
}
d->position_to_children.clear();
if (!d->temppath.destroyDirectory()) {
- Alert::warning(_("Could not remove temporary directory"),
- bformat(_("Could not remove the temporary directory %1$s"),
+ LYXERR0(bformat(_("Could not remove the temporary directory %1$s"),
from_utf8(d->temppath.absFileName())));
}
removePreviews();
}
-Buffer * Buffer::cloneFromMaster() const
+Buffer * Buffer::cloneWithChildren() const
{
BufferMap bufmap;
cloned_buffers.push_back(new CloneList);
CloneList * clones = cloned_buffers.back();
- masterBuffer()->cloneWithChildren(bufmap, clones);
+ cloneWithChildren(bufmap, clones);
// make sure we got cloned
BufferMap::const_iterator bit = bufmap.find(this);
// math macro caches need to be rethought and simplified.
// I am not sure wether we should handle Buffer cloning here or in BufferList.
// Right now BufferList knows nothing about buffer clones.
- Impl::PositionScopeBufferMap::iterator it = d->position_to_children.begin();
- Impl::PositionScopeBufferMap::iterator end = d->position_to_children.end();
- for (; it != end; ++it) {
- DocIterator dit = it->first.clone(buffer_clone);
+ for (auto const & p : d->position_to_children) {
+ DocIterator dit = p.first.clone(buffer_clone);
dit.setBuffer(buffer_clone);
- Buffer * child = const_cast<Buffer *>(it->second.buffer);
+ Buffer * child = const_cast<Buffer *>(p.second.buffer);
child->cloneWithChildren(bufmap, clones);
BufferMap::iterator const bit = bufmap.find(child);
BufferParams & mparams = const_cast<Buffer *>(masterBuffer())->params();
// Copy child authors to the params. We need those pointers.
- AuthorList const & child_authors = params().authors();
- AuthorList::Authors::const_iterator it = child_authors.begin();
- for (; it != child_authors.end(); ++it)
- mparams.authors().record(*it);
+ for (Author const & a : params().authors())
+ mparams.authors().record(a);
return mparams;
}
double Buffer::fontScalingFactor() const
{
return isExporting() ? 75.0 * params().html_math_img_scale
- : 0.01 * lyxrc.dpi * lyxrc.zoom * lyxrc.preview_scale_factor * params().display_pixel_ratio;
+ : 0.01 * lyxrc.dpi * lyxrc.currentZoom * lyxrc.preview_scale_factor * params().display_pixel_ratio;
}
if (branch_suffix.empty())
return filename;
- string const name = filename.onlyFileNameWithoutExt()
- + to_utf8(branch_suffix);
+ string const name = addExtension(filename.onlyFileNameWithoutExt()
+ + to_utf8(branch_suffix), filename.extension());
FileName res(filename.onlyPath().absFileName() + "/" + name);
- res.changeExtension(filename.extension());
return res;
}
FileName const bname(
addName(path, onlyFileName(
changeExtension(filename,
- formats.extension(params().bufferFormat()) + ".out"))));
+ theFormats().extension(params().bufferFormat()) + ".out"))));
// Also consider the master buffer log file
FileName masterfname = fname;
{
bool const changed = fname != d->filename;
d->filename = fname;
+ d->refreshFileMonitor();
if (changed)
lyxvc().file_found_hook(fname);
setReadonly(d->filename.isReadOnly());
params().options.erase();
params().master.erase();
params().float_placement.erase();
+ params().float_alignment.erase();
params().paperwidth.erase();
params().paperheight.erase();
params().leftmargin.erase();
params().html_latex_end.clear();
params().html_math_img_scale = 1.0;
params().output_sync_macro.erase();
- params().setLocalLayout(string(), false);
- params().setLocalLayout(string(), true);
+ params().setLocalLayout(docstring(), false);
+ params().setLocalLayout(docstring(), true);
+ params().biblio_opts.erase();
+ params().biblatex_bibstyle.erase();
+ params().biblatex_citestyle.erase();
+ params().multibib.erase();
for (int i = 0; i < 4; ++i) {
params().user_defined_bullet(i) = ITEMIZE_DEFAULTS[i];
"%1$s %2$s\n"),
from_utf8(token),
lex.getDocString());
- errorList.push_back(ErrorItem(_("Document header error"),
- s, -1, 0, 0));
+ errorList.push_back(ErrorItem(_("Document header error"), s));
}
}
}
if (begin_header_line) {
docstring const s = _("\\begin_header is missing");
- errorList.push_back(ErrorItem(_("Document header error"),
- s, -1, 0, 0));
+ errorList.push_back(ErrorItem(_("Document header error"), s));
}
+ params().shell_escape = theSession().shellescapeFiles().find(absFileName());
+
params().makeDocumentClass();
return unknown_tokens;
if (!lex.checkFor("\\begin_document")) {
docstring const s = _("\\begin_document is missing");
- errorList.push_back(ErrorItem(_("Document header error"),
- s, -1, 0, 0));
+ errorList.push_back(ErrorItem(_("Document header error"), s));
}
readHeader(lex);
<< absFileName()
<< ") does not include "
"this document. Ignoring the master assignment.");
+ // If the master has just been created, un-hide it (#11162)
+ if (!master->fileName().exists())
+ lyx::dispatch(FuncRequest(LFUN_BUFFER_SWITCH,
+ master->absFileName()));
}
}
}
params().indiceslist().addDefault(B_("Index"));
// read main text
- d->old_position = originFilePath();
+ if (FileName::isAbsolute(params().origin))
+ d->old_position = params().origin;
+ else
+ d->old_position = filePath();
bool const res = text().read(lex, errorList, d->inset);
d->old_position.clear();
// inform parent buffer about local macros
if (parent()) {
Buffer const * pbuf = parent();
- UserMacroSet::const_iterator cit = usermacros.begin();
- UserMacroSet::const_iterator end = usermacros.end();
- for (; cit != end; ++cit)
- pbuf->usermacros.insert(*cit);
+ for (auto const & m : usermacros)
+ pbuf->usermacros.insert(m);
}
usermacros.clear();
updateMacros();
bool Buffer::importString(string const & format, docstring const & contents, ErrorList & errorList)
{
- Format const * fmt = formats.getFormat(format);
+ Format const * fmt = theFormats().getFormat(format);
if (!fmt)
return false;
// It is important to use the correct extension here, since some
// converters create a wrong output file otherwise (e.g. html2latex)
- TempFile const tempfile("Buffer_importStringXXXXXX." + fmt->extension());
- FileName const name(tempfile.name());
+ FileName const name = tempFileName("Buffer_importStringXXXXXX." + fmt->extension());
ofdocstream os(name.toFilesystemEncoding().c_str());
// Do not convert os implicitly to bool, since that is forbidden in C++11.
bool const success = !(os << contents).fail();
converted = importFile(format, name, errorList);
}
- if (name.exists())
- name.removeFile();
+ removeTempFile(name);
return converted;
}
if (!theConverters().isReachable(format, "lyx"))
return false;
- TempFile const tempfile("Buffer_importFileXXXXXX.lyx");
- FileName const lyx(tempfile.name());
- if (theConverters().convert(0, name, lyx, name, format, "lyx", errorList))
- return readFile(lyx) == ReadSuccess;
+ FileName const lyx = tempFileName("Buffer_importFileXXXXXX.lyx");
+ Converters::RetVal const retval =
+ theConverters().convert(0, name, lyx, name, format, "lyx", errorList);
+ if (retval == Converters::SUCCESS) {
+ bool const success = readFile(lyx) == ReadSuccess;
+ removeTempFile(lyx);
+ return success;
+ }
return false;
}
if (file_format != LYX_FORMAT) {
FileName tmpFile;
- ReadStatus const ret_clf = convertLyXFormat(fn, tmpFile, file_format);
+ ReadStatus ret_clf = convertLyXFormat(fn, tmpFile, file_format);
if (ret_clf != ReadSuccess)
return ret_clf;
- return readFile(tmpFile);
+ ret_clf = readFile(tmpFile);
+ if (ret_clf == ReadSuccess) {
+ d->file_format = file_format;
+ d->need_format_backup = true;
+ }
+ return ret_clf;
}
// FIXME: InsetInfo needs to know whether the file is under VCS
d->file_fully_loaded = true;
d->read_only = !d->filename.isWritable();
- params().compressed = formats.isZippedFile(d->filename);
+ params().compressed = theFormats().isZippedFile(d->filename);
saveCheckSum();
return ReadSuccess;
}
command << os::python()
<< ' ' << quoteName(lyx2lyx.toFilesystemEncoding())
<< " -t " << convert<string>(LYX_FORMAT)
- << " -o " << quoteName(tmpfile.toFilesystemEncoding())
+ << " -o " << quoteName(tmpfile.toSafeFilesystemEncoding())
<< ' ' << quoteName(fn.toSafeFilesystemEncoding());
string const command_str = command.str();
}
+FileName Buffer::getBackupName() const {
+ map<int, string> const file_formats = {
+ {544, "23"},
+ {508, "22"},
+ {474, "21"},
+ {413, "20"},
+ {345, "16"},
+ {276, "15"},
+ {245, "14"},
+ {221, "13"},
+ {220, "12"},
+ {218, "1163"},
+ {217, "116"},
+ {216, "115"},
+ {215, "11"},
+ {210, "010"},
+ {200, "006"}
+ };
+ FileName const & fn = fileName();
+ string const fname = fn.onlyFileNameWithoutExt();
+ string const fext = fn.extension() + "~";
+ string const fpath = lyxrc.backupdir_path.empty() ?
+ fn.onlyPath().absFileName() :
+ lyxrc.backupdir_path;
+ string backup_suffix;
+ // If file format is from a stable series use version instead of file format
+ auto const it = file_formats.find(d->file_format);
+ if (it != file_formats.end())
+ backup_suffix = "-lyx" + it->second;
+ else
+ backup_suffix = "-lyxformat-" + convert<string>(d->file_format);
+ string const backname = fname + backup_suffix;
+ FileName backup(addName(fpath, addExtension(backname, fext)));
+
+ // limit recursion, just in case
+ int v = 1;
+ unsigned long orig_checksum = 0;
+ while (backup.exists() && v < 100) {
+ if (orig_checksum == 0)
+ orig_checksum = fn.checksum();
+ unsigned long new_checksum = backup.checksum();
+ if (orig_checksum == new_checksum) {
+ LYXERR(Debug::FILES, "Not backing up " << fn <<
+ "since " << backup << "has the same checksum.");
+ // a bit of a hack, but we have to check this anyway
+ // below, and setting this is simpler than introducing
+ // a special boolean for this purpose.
+ v = 1000;
+ break;
+ }
+ string const newbackname = backname + "-" + convert<string>(v);
+ backup.set(addName(fpath, addExtension(newbackname, fext)));
+ v++;
+ }
+ return v < 100 ? backup : FileName();
+}
+
+
// Should probably be moved to somewhere else: BufferView? GuiView?
bool Buffer::save() const
{
}
// ask if the disk file has been externally modified (use checksum method)
- if (fileName().exists() && isExternallyModified(checksum_method)) {
+ if (fileName().exists() && isChecksumModified()) {
docstring text =
bformat(_("Document %1$s has been externally modified. "
"Are you sure you want to overwrite this file?"), file);
// if the file does not yet exist, none of the backup activity
// that follows is necessary
- if (!fileName().exists()) {
+ if (!fileName().exists()) {
if (!writeFile(fileName()))
- return false;
- markClean();
- return true;
- }
+ return false;
+ markClean();
+ return true;
+ }
// we first write the file to a new name, then move it to its
// proper location once that has been done successfully. that
// way we preserve the original file if something goes wrong.
string const justname = fileName().onlyFileNameWithoutExt();
- boost::scoped_ptr<TempFile>
- tempfile(new TempFile(fileName().onlyPath(),
- justname + "-XXXXXX.lyx"));
+ auto tempfile = make_unique<TempFile>(fileName().onlyPath(),
+ justname + "-XXXXXX.lyx");
bool const symlink = fileName().isSymLink();
if (!symlink)
tempfile->setAutoRemove(false);
FileName savefile(tempfile->name());
LYXERR(Debug::FILES, "Saving to " << savefile.absFileName());
+ if (!savefile.clonePermissions(fileName()))
+ LYXERR0("Failed to clone the permission from " << fileName().absFileName() << " to " << savefile.absFileName());
+
if (!writeFile(savefile))
return false;
// we will set this to false if we fail
bool made_backup = true;
- FileName backupName(absFileName() + '~');
- if (lyxrc.make_backup) {
- if (!lyxrc.backupdir_path.empty()) {
- string const mangledName =
- subst(subst(backupName.absFileName(), '/', '!'), ':', '!');
- backupName = FileName(addName(lyxrc.backupdir_path,
- mangledName));
+ FileName backupName;
+ bool const needBackup = lyxrc.make_backup || d->need_format_backup;
+ if (needBackup) {
+ if (d->need_format_backup)
+ backupName = getBackupName();
+
+ // If we for some reason failed to find a backup name in case of
+ // a format change, this will still set one. It's the best we can
+ // do in this case.
+ if (backupName.empty()) {
+ backupName.set(fileName().absFileName() + "~");
+ if (!lyxrc.backupdir_path.empty()) {
+ string const mangledName =
+ subst(subst(backupName.absFileName(), '/', '!'), ':', '!');
+ backupName.set(addName(lyxrc.backupdir_path, mangledName));
+ }
}
LYXERR(Debug::FILES, "Backing up original file to " <<
"Please check whether the directory exists and is writable."),
from_utf8(backupName.absFileName())));
//LYXERR(Debug::DEBUG, "Fs error: " << fe.what());
+ } else if (d->need_format_backup) {
+ // the original file has been backed up successfully, so we
+ // will not need to do that again
+ d->need_format_backup = false;
}
}
// time stamp is invalidated by copying/moving
saveCheckSum();
markClean();
+ if (d->file_format != LYX_FORMAT)
+ // the file associated with this buffer is now in the current format
+ d->file_format = LYX_FORMAT;
return true;
}
// else we saved the file, but failed to move it to the right location.
- if (lyxrc.make_backup && made_backup && !symlink) {
- // the original file was moved to filename.lyx~, so it will look
+ if (needBackup && made_backup && !symlink) {
+ // the original file was moved to some new location, so it will look
// to the user as if it was deleted. (see bug #9234.) we could try
// to restore it, but that would basically mean trying to do again
// what we just failed to do. better to leave things as they are.
return user_message;
}
- user_message += " " + _("Save failed! Bummer. Document is lost.");
+ user_message += " " + _("Save failed! Document is lost.");
// Don't try again.
markClean();
return user_message;
// Important: Keep the version formatting in sync with lyx2lyx and
// tex2lyx (bug 7951)
ofs << "#LyX " << lyx_version_major << "." << lyx_version_minor
- << " created this file. For more info see http://www.lyx.org/\n"
+ << " created this file. For more info see https://www.lyx.org/\n"
<< "\\lyxformat " << LYX_FORMAT << "\n"
<< "\\begin_document\n";
/// For each author, set 'used' to true if there is a change
/// by this author in the document; otherwise set it to 'false'.
- AuthorList::Authors::const_iterator a_it = params().authors().begin();
- AuthorList::Authors::const_iterator a_end = params().authors().end();
- for (; a_it != a_end; ++a_it)
- a_it->setUsed(false);
+ for (Author const & a : params().authors())
+ a.setUsed(false);
ParIterator const end = const_cast<Buffer *>(this)->par_iterator_end();
ParIterator it = const_cast<Buffer *>(this)->par_iterator_begin();
}
-bool Buffer::makeLaTeXFile(FileName const & fname,
+Buffer::ExportStatus Buffer::makeLaTeXFile(FileName const & fname,
string const & original_path,
OutputParams const & runparams_in,
OutputWhat output) const
{
OutputParams runparams = runparams_in;
- // This is necessary for LuaTeX/XeTeX with tex fonts.
- // See FIXME in BufferParams::encoding()
- if (runparams.isFullUnicode())
- runparams.encoding = encodings.fromLyXName("utf8-plain");
-
string const encoding = runparams.encoding->iconvName();
LYXERR(Debug::LATEX, "makeLaTeXFile encoding: " << encoding << ", fname=" << fname.realPath());
try { ofs.reset(encoding); }
catch (iconv_codecvt_facet_exception const & e) {
lyxerr << "Caught iconv exception: " << e.what() << endl;
- Alert::error(_("Iconv software exception Detected"), bformat(_("Please "
- "verify that the support software for your encoding (%1$s) is "
- "properly installed"), from_ascii(encoding)));
- return false;
+ Alert::error(_("Iconv software exception Detected"),
+ bformat(_("Please verify that the `iconv' support software is"
+ " properly installed and supports the selected encoding"
+ " (%1$s), or change the encoding in"
+ " Document>Settings>Language."), from_ascii(encoding)));
+ return ExportError;
}
if (!openFileWrite(ofs, fname))
- return false;
+ return ExportError;
ErrorList & errorList = d->errorLists["Export"];
errorList.clear();
- bool failed_export = false;
- otexstream os(ofs, d->texrow);
+ ExportStatus status = ExportSuccess;
+ otexstream os(ofs);
// make sure we are ready to export
// this needs to be done before we validate
updateBuffer();
updateMacroInstances(OutputUpdate);
+ ExportStatus retval;
try {
- os.texrow().reset();
- writeLaTeXSource(os, original_path, runparams, output);
+ retval = writeLaTeXSource(os, original_path, runparams, output);
+ if (retval == ExportKilled)
+ return ExportKilled;
}
catch (EncodingException const & e) {
docstring const failed(1, e.failed_char);
ostringstream oss;
oss << "0x" << hex << e.failed_char << dec;
- docstring msg = bformat(_("Could not find LaTeX command for character '%1$s'"
- " (code point %2$s)"),
- failed, from_utf8(oss.str()));
- errorList.push_back(ErrorItem(msg, _("Some characters of your document are probably not "
- "representable in the chosen encoding.\n"
- "Changing the document encoding to utf8 could help."),
- e.par_id, e.pos, e.pos + 1));
- failed_export = true;
+ if (getParFromID(e.par_id).paragraph().layout().pass_thru) {
+ docstring msg = bformat(_("Uncodable character '%1$s'"
+ " (code point %2$s)"),
+ failed, from_utf8(oss.str()));
+ errorList.push_back(ErrorItem(msg, _("Some characters of your document are not "
+ "representable in specific verbatim contexts.\n"
+ "Changing the document encoding to utf8 could help."),
+ {e.par_id, e.pos}, {e.par_id, e.pos + 1}));
+ } else {
+ docstring msg = bformat(_("Could not find LaTeX command for character '%1$s'"
+ " (code point %2$s)"),
+ failed, from_utf8(oss.str()));
+ errorList.push_back(ErrorItem(msg, _("Some characters of your document are probably not "
+ "representable in the chosen encoding.\n"
+ "Changing the document encoding to utf8 could help."),
+ {e.par_id, e.pos}, {e.par_id, e.pos + 1}));
+ }
+ status = ExportError;
}
catch (iconv_codecvt_facet_exception const & e) {
errorList.push_back(ErrorItem(_("iconv conversion failed"),
- _(e.what()), -1, 0, 0));
- failed_export = true;
+ _(e.what())));
+ status = ExportError;
}
catch (exception const & e) {
errorList.push_back(ErrorItem(_("conversion failed"),
- _(e.what()), -1, 0, 0));
- failed_export = true;
+ _(e.what())));
+ lyxerr << e.what() << endl;
+ status = ExportError;
}
catch (...) {
lyxerr << "Caught some really weird exception..." << endl;
lyx_exit(1);
}
+ d->texrow = move(os.texrow());
+
ofs.close();
if (ofs.fail()) {
- failed_export = true;
+ status = ExportError;
lyxerr << "File '" << fname << "' was not closed properly." << endl;
}
errorList.clear();
else
errors("Export");
- return !failed_export;
+ return status;
}
-void Buffer::writeLaTeXSource(otexstream & os,
+Buffer::ExportStatus Buffer::writeLaTeXSource(otexstream & os,
string const & original_path,
OutputParams const & runparams_in,
OutputWhat output) const
OutputParams runparams = runparams_in;
- // This is necessary for LuaTeX/XeTeX with tex fonts.
- // See FIXME in BufferParams::encoding()
- if (runparams.isFullUnicode())
- runparams.encoding = encodings.fromLyXName("utf8-plain");
+ // Some macros rely on font encoding
+ runparams.main_fontenc = params().main_font_encoding();
// If we are compiling a file standalone, even if this is the
// child of some other buffer, let's cut the link here, so the
LaTeXFeatures features(*this, params(), runparams);
validate(features);
// This is only set once per document (in master)
- if (!runparams.is_child)
+ if (!runparams.is_child) {
runparams.use_polyglossia = features.usePolyglossia();
+ runparams.use_CJK = features.mustProvide("CJK");
+ }
LYXERR(Debug::LATEX, " Buffer validation done.");
bool const output_preamble =
// first paragraph of the document. (Asger)
if (output_preamble && runparams.nice) {
os << "%% LyX " << lyx_version << " created this file. "
- "For more info, see http://www.lyx.org/.\n"
+ "For more info, see https://www.lyx.org/.\n"
"%% Do not edit unless you really know what "
"you are doing.\n";
}
if (!uncodable_glyphs.empty()) {
frontend::Alert::warning(
_("Uncodable character in file path"),
- support::bformat(
+ bformat(
_("The path of your document\n"
"(%1$s)\n"
"contains glyphs that are unknown "
"file path name."),
inputpath, uncodable_glyphs));
} else {
- string docdir =
- support::latex_path(original_path);
+ string docdir = os::latex_path(original_path);
if (contains(docdir, '#')) {
docdir = subst(docdir, "#", "\\#");
os << "\\catcode`\\#=11"
os << "\\catcode`\\%=11"
"\\def\\%{%}\\catcode`\\%=14\n";
}
+ bool const detokenize = !isAscii(from_utf8(docdir));
+ bool const quote = contains(docdir, ' ');
os << "\\makeatletter\n"
- << "\\def\\input@path{{"
- << docdir << "}}\n"
+ << "\\def\\input@path{{";
+ if (detokenize)
+ os << "\\detokenize{";
+ if (quote)
+ os << "\"";
+ os << docdir;
+ if (quote)
+ os << "\"";
+ if (detokenize)
+ os << "}";
+ os << "}}\n"
<< "\\makeatother\n";
}
}
runparams.use_babel = params().writeLaTeX(os, features,
d->filename.onlyPath());
+ // Biblatex bibliographies are loaded here
+ if (params().useBiblatex()) {
+ vector<pair<docstring, string>> const bibfiles =
+ prepareBibFilePaths(runparams, getBibfiles(), true);
+ for (pair<docstring, string> const & file: bibfiles) {
+ os << "\\addbibresource";
+ if (!file.second.empty())
+ os << "[bibencoding=" << file.second << "]";
+ os << "{" << file.first << "}\n";
+ }
+ }
+
+ if (!runparams.dryrun && features.hasPolyglossiaExclusiveLanguages()
+ && !features.hasOnlyPolyglossiaLanguages()) {
+ docstring blangs;
+ docstring plangs;
+ vector<string> bll = features.getBabelExclusiveLanguages();
+ vector<string> pll = features.getPolyglossiaExclusiveLanguages();
+ if (!bll.empty()) {
+ docstring langs;
+ for (string const & sit : bll) {
+ if (!langs.empty())
+ langs += ", ";
+ langs += _(sit);
+ }
+ blangs = bll.size() > 1 ?
+ bformat(_("The languages %1$s are only supported by Babel."), langs)
+ : bformat(_("The language %1$s is only supported by Babel."), langs);
+ }
+ if (!pll.empty()) {
+ docstring langs;
+ for (string const & pit : pll) {
+ if (!langs.empty())
+ langs += ", ";
+ langs += _(pit);
+ }
+ plangs = pll.size() > 1 ?
+ bformat(_("The languages %1$s are only supported by Polyglossia."), langs)
+ : bformat(_("The language %1$s is only supported by Polyglossia."), langs);
+ if (!blangs.empty())
+ plangs += "\n";
+ }
+
+ frontend::Alert::warning(
+ _("Incompatible Languages!"),
+ bformat(
+ _("You cannot use the following languages "
+ "together in one LaTeX document because "
+ "they require conflicting language packages:\n"
+ "%1$s%2$s"),
+ plangs, blangs));
+ }
+
// Japanese might be required only in some children of a document,
// but once required, we must keep use_japanese true.
runparams.use_japanese |= features.isRequired("japanese");
// Restore the parenthood if needed
if (!runparams.is_child)
d->ignore_parent = false;
- return;
+ return ExportSuccess;
}
// make the body.
+ // mark the beginning of the body to separate it from InPreamble insets
+ os.texrow().start(TexRow::beginDocument());
os << "\\begin{document}\n";
+ // mark the start of a new paragraph by simulating a newline,
+ // so that os.afterParbreak() returns true at document start
+ os.lastChar('\n');
+
// output the parent macros
- MacroSet::iterator it = parentMacros.begin();
- MacroSet::iterator end = parentMacros.end();
- for (; it != end; ++it) {
- int num_lines = (*it)->write(os.os(), true);
+ for (auto const & mac : parentMacros) {
+ int num_lines = mac->write(os.os(), true);
os.texrow().newlines(num_lines);
}
} // output_preamble
- os.texrow().start(paragraphs().begin()->id(), 0);
-
LYXERR(Debug::INFO, "preamble finished, now the body.");
// the real stuff
- latexParagraphs(*this, text(), os, runparams);
+ try {
+ latexParagraphs(*this, text(), os, runparams);
+ }
+ catch (ConversionException const &) { return ExportKilled; }
// Restore the parenthood if needed
if (!runparams.is_child)
}
runparams_in.encoding = runparams.encoding;
- // Just to be sure. (Asger)
- os.texrow().newline();
-
- //for (int i = 0; i<d->texrow.rows(); i++) {
- // int id,pos;
- // if (d->texrow.getIdFromRow(i+1,id,pos) && id>0)
- // lyxerr << i+1 << ":" << id << ":" << getParFromID(id).paragraph().asString()<<"\n";
- //}
-
LYXERR(Debug::INFO, "Finished making LaTeX file.");
LYXERR(Debug::INFO, "Row count was " << os.texrow().rows() - 1 << '.');
+ return ExportSuccess;
}
-void Buffer::makeDocBookFile(FileName const & fname,
+Buffer::ExportStatus Buffer::makeDocBookFile(FileName const & fname,
OutputParams const & runparams,
OutputWhat output) const
{
ofdocstream ofs;
if (!openFileWrite(ofs, fname))
- return;
+ return ExportError;
// make sure we are ready to export
// this needs to be done before we validate
updateBuffer();
updateMacroInstances(OutputUpdate);
- writeDocBookSource(ofs, fname.absFileName(), runparams, output);
+ ExportStatus const retval =
+ writeDocBookSource(ofs, fname.absFileName(), runparams, output);
+ if (retval == ExportKilled)
+ return ExportKilled;
ofs.close();
if (ofs.fail())
lyxerr << "File '" << fname << "' was not closed properly." << endl;
+ return ExportSuccess;
}
-void Buffer::writeDocBookSource(odocstream & os, string const & fname,
+Buffer::ExportStatus Buffer::writeDocBookSource(odocstream & os, string const & fname,
OutputParams const & runparams,
OutputWhat output) const
{
if (! tclass.class_header().empty())
os << from_ascii(tclass.class_header());
else if (runparams.flavor == OutputParams::XML)
- os << "PUBLIC \"-//OASIS//DTD DocBook XML//EN\" "
- << "\"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd\"";
+ os << "PUBLIC \"-//OASIS//DTD DocBook XML V4.2//EN\" "
+ << "\"https://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd\"";
else
os << " PUBLIC \"-//OASIS//DTD DocBook V4.2//EN\"";
- docstring preamble = from_utf8(params().preamble);
+ docstring preamble = params().preamble;
if (runparams.flavor != OutputParams::XML ) {
preamble += "<!ENTITY % output.print.png \"IGNORE\">\n";
preamble += "<!ENTITY % output.print.pdf \"IGNORE\">\n";
os << "<!-- " << ((runparams.flavor == OutputParams::XML)? "XML" : "SGML")
<< " file was created by LyX " << lyx_version
- << "\n See http://www.lyx.org/ for more information -->\n";
+ << "\n See https://www.lyx.org/ for more information -->\n";
params().documentClass().counters().reset();
sgml::openTag(os, top);
os << '\n';
- docbookParagraphs(text(), *this, os, runparams);
+ try {
+ docbookParagraphs(text(), *this, os, runparams);
+ }
+ catch (ConversionException const &) { return ExportKilled; }
sgml::closeTag(os, top_element);
}
+ return ExportSuccess;
}
-void Buffer::makeLyXHTMLFile(FileName const & fname,
+Buffer::ExportStatus Buffer::makeLyXHTMLFile(FileName const & fname,
OutputParams const & runparams) const
{
LYXERR(Debug::LATEX, "makeLyXHTMLFile...");
ofdocstream ofs;
if (!openFileWrite(ofs, fname))
- return;
+ return ExportError;
// make sure we are ready to export
// this has to be done before we validate
updateBuffer(UpdateMaster, OutputUpdate);
updateMacroInstances(OutputUpdate);
- writeLyXHTMLSource(ofs, runparams, FullSource);
+ ExportStatus const retval = writeLyXHTMLSource(ofs, runparams, FullSource);
+ if (retval == ExportKilled)
+ return retval;
ofs.close();
if (ofs.fail())
lyxerr << "File '" << fname << "' was not closed properly." << endl;
+ return retval;
}
-void Buffer::writeLyXHTMLSource(odocstream & os,
+Buffer::ExportStatus Buffer::writeLyXHTMLSource(odocstream & os,
OutputParams const & runparams,
OutputWhat output) const
{
if (!styles.empty())
os << "\n<!-- Text Class Preamble -->\n" << styles << '\n';
- styles = from_utf8(features.getPreambleSnippets());
+ styles = features.getPreambleSnippets().str;
if (!styles.empty())
os << "\n<!-- Preamble Snippets -->\n" << styles << '\n';
// we will collect CSS information in a stream, and then output it
// either here, as part of the header, or else in a separate file.
odocstringstream css;
- styles = from_utf8(features.getCSSSnippets());
+ styles = features.getCSSSnippets();
if (!styles.empty())
css << "/* LyX Provided Styles */\n" << styles << '\n';
if (output_body) {
bool const output_body_tag = (output != IncludedFile);
if (output_body_tag)
- os << "<body>\n";
+ os << "<body dir=\"auto\">\n";
XHTMLStream xs(os);
if (output != IncludedFile)
// if we're an included file, the counters are in the master.
params().documentClass().counters().reset();
- xhtmlParagraphs(text(), *this, xs, runparams);
+ try {
+ xhtmlParagraphs(text(), *this, xs, runparams);
+ }
+ catch (ConversionException const &) { return ExportKilled; }
if (output_body_tag)
os << "</body>\n";
}
if (output_preamble)
os << "</html>\n";
+
+ return ExportSuccess;
}
runparams.flavor = OutputParams::LATEX;
runparams.nice = false;
runparams.linelen = lyxrc.plaintext_linelen;
- makeLaTeXFile(FileName(name), org_path, runparams);
+ ExportStatus const retval =
+ makeLaTeXFile(FileName(name), org_path, runparams);
+ if (retval != ExportSuccess) {
+ // error code on failure
+ return -1;
+ }
TeXErrors terr;
Chktex chktex(lyxrc.chktex_command, onlyFileName(name), filePath());
if (!features.runparams().is_child)
params().validate(features);
- for_each(paragraphs().begin(), paragraphs().end(),
- bind(&Paragraph::validate, _1, ref(features)));
+ for (Paragraph const & p : paragraphs())
+ p.validate(features);
if (lyxerr.debugging(Debug::LATEX)) {
features.showStruct();
}
list.clear();
- Toc & toc = d->toc_backend.toc("label");
- TocIterator toc_it = toc.begin();
- TocIterator end = toc.end();
- for (; toc_it != end; ++toc_it) {
- if (toc_it->depth() == 0)
- list.push_back(toc_it->str());
- }
-}
-
-
-void Buffer::updateBibfilesCache(UpdateScope scope) const
-{
- // FIXME This is probably unnecssary, given where we call this.
- // If this is a child document, use the parent's cache instead.
- if (parent() && scope != UpdateChildOnly) {
- masterBuffer()->updateBibfilesCache();
- return;
- }
-
- d->bibfiles_cache_.clear();
- for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
- if (it->lyxCode() == BIBTEX_CODE) {
- InsetBibtex const & inset = static_cast<InsetBibtex const &>(*it);
- support::FileNameList const bibfiles = inset.getBibFiles();
- d->bibfiles_cache_.insert(d->bibfiles_cache_.end(),
- bibfiles.begin(),
- bibfiles.end());
- } else if (it->lyxCode() == INCLUDE_CODE) {
- InsetInclude & inset = static_cast<InsetInclude &>(*it);
- Buffer const * const incbuf = inset.getChildBuffer();
- if (!incbuf)
- continue;
- support::FileNameList const & bibfiles =
- incbuf->getBibfilesCache(UpdateChildOnly);
- if (!bibfiles.empty()) {
- d->bibfiles_cache_.insert(d->bibfiles_cache_.end(),
- bibfiles.begin(),
- bibfiles.end());
- }
- }
+ shared_ptr<Toc> toc = d->toc_backend.toc("label");
+ for (auto const & tocit : *toc) {
+ if (tocit.depth() == 0)
+ list.push_back(tocit.str());
}
- d->bibfile_cache_valid_ = true;
- d->bibinfo_cache_valid_ = false;
- d->cite_labels_valid_ = false;
}
{
d->bibinfo_cache_valid_ = false;
d->cite_labels_valid_ = false;
+ removeBiblioTempFiles();
// also invalidate the cache for the parent buffer
Buffer const * const pbuf = d->parent();
if (pbuf)
}
-void Buffer::invalidateBibfileCache() const
-{
- d->bibfile_cache_valid_ = false;
- d->bibinfo_cache_valid_ = false;
- d->cite_labels_valid_ = false;
- // also invalidate the cache for the parent buffer
- Buffer const * const pbuf = d->parent();
- if (pbuf)
- pbuf->invalidateBibfileCache();
-}
-
-
-support::FileNameList const & Buffer::getBibfilesCache(UpdateScope scope) const
+docstring_list const & Buffer::getBibfiles(UpdateScope scope) const
{
// FIXME This is probably unnecessary, given where we call this.
- // If this is a child document, use the master's cache instead.
+ // If this is a child document, use the master instead.
Buffer const * const pbuf = masterBuffer();
if (pbuf != this && scope != UpdateChildOnly)
- return pbuf->getBibfilesCache();
+ return pbuf->getBibfiles();
- if (!d->bibfile_cache_valid_)
- this->updateBibfilesCache(scope);
+ // In 2.3.x, we have:
+ //if (!d->bibfile_cache_valid_)
+ // this->updateBibfilesCache(scope);
+ // I think that is a leftover, but there have been so many back-
+ // and-forths with this, due to Windows issues, that I am not sure.
return d->bibfiles_cache_;
}
}
+BiblioInfo const & Buffer::bibInfo() const
+{
+ return d->bibinfo_;
+}
+
+
+void Buffer::registerBibfiles(const docstring_list & bf) const
+{
+ // We register the bib files in the master buffer,
+ // if there is one, but also in every single buffer,
+ // in case a child is compiled alone.
+ Buffer const * const tmp = masterBuffer();
+ if (tmp != this)
+ tmp->registerBibfiles(bf);
+
+ for (auto const & p : bf) {
+ docstring_list::const_iterator temp =
+ find(d->bibfiles_cache_.begin(), d->bibfiles_cache_.end(), p);
+ if (temp == d->bibfiles_cache_.end())
+ d->bibfiles_cache_.push_back(p);
+ }
+}
+
+
+static map<docstring, FileName> bibfileCache;
+
+FileName Buffer::getBibfilePath(docstring const & bibid) const
+{
+ map<docstring, FileName>::const_iterator it =
+ bibfileCache.find(bibid);
+ if (it != bibfileCache.end()) {
+ // i.e., bibfileCache[bibid]
+ return it->second;
+ }
+
+ LYXERR(Debug::FILES, "Reading file location for " << bibid);
+ string texfile = changeExtension(to_utf8(bibid), "bib");
+ // note that, if the filename can be found directly from the path,
+ // findtexfile will just return a FileName object for that path.
+ FileName file(findtexfile(texfile, "bib"));
+ if (file.empty())
+ file = FileName(makeAbsPath(texfile, filePath()));
+ LYXERR(Debug::FILES, "Found at: " << file);
+
+ bibfileCache[bibid] = file;
+ return bibfileCache[bibid];
+}
+
+
void Buffer::checkIfBibInfoCacheIsValid() const
{
// use the master's cache
return;
}
+ // If we already know the cache is invalid, stop here.
+ // This is important in the case when the bibliography
+ // environment (rather than Bib[la]TeX) is used.
+ // In that case, the timestamp check below gives no
+ // sensible result. Rather than that, the cache will
+ // be invalidated explicitly via invalidateBibInfoCache()
+ // by the Bibitem inset.
+ // Same applies for bib encoding changes, which trigger
+ // invalidateBibInfoCache() by InsetBibtex.
+ if (!d->bibinfo_cache_valid_)
+ return;
+
+ if (d->have_bibitems_) {
+ // We have a bibliography environment.
+ // Invalidate the bibinfo cache unconditionally.
+ // Cite labels will get invalidated by the inset if needed.
+ d->bibinfo_cache_valid_ = false;
+ return;
+ }
+
+ // OK. This is with Bib(la)tex. We'll assume the cache
+ // is valid and change this if we find changes in the bibs.
+ d->bibinfo_cache_valid_ = true;
+ d->cite_labels_valid_ = true;
+
// compare the cached timestamps with the actual ones.
- FileNameList const & bibfiles_cache = getBibfilesCache();
- FileNameList::const_iterator ei = bibfiles_cache.begin();
- FileNameList::const_iterator en = bibfiles_cache.end();
- for (; ei != en; ++ ei) {
- time_t lastw = ei->lastModified();
- time_t prevw = d->bibfile_status_[*ei];
+ docstring_list const & bibfiles_cache = getBibfiles();
+ for (auto const & bf : bibfiles_cache) {
+ FileName const fn = getBibfilePath(bf);
+ time_t lastw = fn.lastModified();
+ time_t prevw = d->bibfile_status_[fn];
if (lastw != prevw) {
d->bibinfo_cache_valid_ = false;
d->cite_labels_valid_ = false;
- d->bibfile_status_[*ei] = lastw;
+ d->bibfile_status_[fn] = lastw;
}
}
}
-void Buffer::reloadBibInfoCache() const
+void Buffer::clearBibFileCache() const
+{
+ bibfileCache.clear();
+}
+
+
+void Buffer::reloadBibInfoCache(bool const force) const
{
+ // we should not need to do this for internal buffers
+ if (isInternal())
+ return;
+
// use the master's cache
Buffer const * const tmp = masterBuffer();
if (tmp != this) {
- tmp->reloadBibInfoCache();
+ tmp->reloadBibInfoCache(force);
return;
}
- checkIfBibInfoCacheIsValid();
- if (d->bibinfo_cache_valid_)
- return;
+ if (!force) {
+ checkIfBibInfoCacheIsValid();
+ if (d->bibinfo_cache_valid_)
+ return;
+ }
+ LYXERR(Debug::FILES, "Bibinfo cache was invalid.");
+ // re-read file locations when this info changes
+ // FIXME Is this sufficient? Or should we also force that
+ // in some other cases? If so, then it is easy enough to
+ // add the following line in some other places.
+ clearBibFileCache();
d->bibinfo_.clear();
- collectBibKeys();
+ FileNameList checkedFiles;
+ d->have_bibitems_ = false;
+ collectBibKeys(checkedFiles);
d->bibinfo_cache_valid_ = true;
}
-void Buffer::collectBibKeys() const
+void Buffer::collectBibKeys(FileNameList & checkedFiles) const
{
- for (InsetIterator it = inset_iterator_begin(inset()); it; ++it)
- it->collectBibKeys(it);
+ for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
+ it->collectBibKeys(it, checkedFiles);
+ if (it->lyxCode() == BIBITEM_CODE) {
+ if (parent() != 0)
+ parent()->d->have_bibitems_ = true;
+ else
+ d->have_bibitems_ = true;
+ }
+ }
}
-void Buffer::addBiblioInfo(BiblioInfo const & bi) const
+void Buffer::addBiblioInfo(BiblioInfo const & bin) const
{
- Buffer const * tmp = masterBuffer();
- BiblioInfo & masterbi = (tmp == this) ?
- d->bibinfo_ : tmp->d->bibinfo_;
- masterbi.mergeBiblioInfo(bi);
+ // We add the biblio info to the master buffer,
+ // if there is one, but also to every single buffer,
+ // in case a child is compiled alone.
+ BiblioInfo & bi = d->bibinfo_;
+ bi.mergeBiblioInfo(bin);
+
+ if (parent() != 0) {
+ BiblioInfo & masterbi = parent()->d->bibinfo_;
+ masterbi.mergeBiblioInfo(bin);
+ }
}
-void Buffer::addBibTeXInfo(docstring const & key, BibTeXInfo const & bi) const
+void Buffer::addBibTeXInfo(docstring const & key, BibTeXInfo const & bin) const
{
- Buffer const * tmp = masterBuffer();
- BiblioInfo & masterbi = (tmp == this) ?
- d->bibinfo_ : tmp->d->bibinfo_;
- masterbi[key] = bi;
+ // We add the bibtex info to the master buffer,
+ // if there is one, but also to every single buffer,
+ // in case a child is compiled alone.
+ BiblioInfo & bi = d->bibinfo_;
+ bi[key] = bin;
+
+ if (parent() != 0) {
+ BiblioInfo & masterbi = masterBuffer()->d->bibinfo_;
+ masterbi[key] = bin;
+ }
}
}
+void Buffer::invalidateCiteLabels() const
+{
+ masterBuffer()->d->cite_labels_valid_ = false;
+}
+
bool Buffer::citeLabelsValid() const
{
return masterBuffer()->d->cite_labels_valid_;
switch (cmd.action()) {
case LFUN_BUFFER_TOGGLE_READ_ONLY:
- flag.setOnOff(isReadonly());
+ flag.setOnOff(hasReadonlyFlag());
break;
// FIXME: There is need for a command-line import.
enable = true;
break;
}
- string format = to_utf8(arg);
+ string format = (arg.empty() || arg == "default") ?
+ params().getDefaultOutputFormat() : to_utf8(arg);
size_t pos = format.find(' ');
if (pos != string::npos)
format = format.substr(0, pos);
- enable = params().isExportable(format);
+ enable = params().isExportable(format, false);
if (!enable)
flag.message(bformat(
_("Don't know how to export to format: %1$s"), arg));
break;
}
- case LFUN_BUFFER_CHKTEX:
- enable = params().isLatex() && !lyxrc.chktex_command.empty();
- break;
-
case LFUN_BUILD_PROGRAM:
- enable = params().isExportable("program");
+ enable = params().isExportable("program", false);
break;
case LFUN_BRANCH_ACTIVATE:
case LFUN_BUFFER_VIEW_CACHE:
(d->preview_file_).refresh();
- enable = (d->preview_file_).exists();
+ enable = (d->preview_file_).exists() && !(d->preview_file_).isFileEmpty();
+ break;
+
+ case LFUN_CHANGES_TRACK:
+ flag.setEnabled(true);
+ flag.setOnOff(params().track_changes);
+ break;
+
+ case LFUN_CHANGES_OUTPUT:
+ flag.setEnabled(true);
+ flag.setOnOff(params().output_changes);
+ break;
+
+ case LFUN_BUFFER_TOGGLE_COMPRESSION:
+ flag.setOnOff(params().compressed);
+ break;
+
+ case LFUN_BUFFER_TOGGLE_OUTPUT_SYNC:
+ flag.setOnOff(params().output_sync);
+ break;
+
+ case LFUN_BUFFER_ANONYMIZE:
break;
default:
string const argument = to_utf8(func.argument());
// We'll set this back to false if need be.
bool dispatched = true;
- undo().beginUndoGroup();
+ // This handles undo groups automagically
+ UndoGroupHelper ugh(this);
switch (func.action()) {
case LFUN_BUFFER_TOGGLE_READ_ONLY:
dr.setMessage(log);
}
else
- setReadonly(!isReadonly());
+ setReadonly(!hasReadonlyFlag());
break;
case LFUN_BUFFER_EXPORT: {
- ExportStatus const status = doExport(argument, false);
+ string const format = (argument.empty() || argument == "default") ?
+ params().getDefaultOutputFormat() : argument;
+ ExportStatus const status = doExport(format, false);
dr.setError(status != ExportSuccess);
if (status != ExportSuccess)
dr.setMessage(bformat(_("Error exporting to format: %1$s."),
- func.argument()));
+ from_utf8(format)));
break;
}
break;
}
- case LFUN_BUFFER_CHKTEX:
- runChktex();
- break;
-
case LFUN_BUFFER_EXPORT_CUSTOM: {
string format_name;
string command = split(argument, format_name, ' ');
- Format const * format = formats.getFormat(format_name);
+ Format const * format = theFormats().getFormat(format_name);
if (!format) {
lyxerr << "Format \"" << format_name
<< "\" not recognized!"
}
case LFUN_BRANCH_ADD: {
- docstring branch_name = func.argument();
- if (branch_name.empty()) {
+ docstring branchnames = func.argument();
+ if (branchnames.empty()) {
dispatched = false;
break;
}
BranchList & branch_list = params().branchlist();
vector<docstring> const branches =
- getVectorFromString(branch_name, branch_list.separator());
+ getVectorFromString(branchnames, branch_list.separator());
docstring msg;
- for (vector<docstring>::const_iterator it = branches.begin();
- it != branches.end(); ++it) {
- branch_name = *it;
+ for (docstring const & branch_name : branches) {
Branch * branch = branch_list.find(branch_name);
if (branch) {
LYXERR0("Branch " << branch_name << " already exists.");
}
case LFUN_BUFFER_VIEW_CACHE:
- if (!formats.view(*this, d->preview_file_,
+ if (!theFormats().view(*this, d->preview_file_,
d->preview_format_))
dr.setMessage(_("Error viewing the output file."));
break;
+ case LFUN_CHANGES_TRACK:
+ if (params().save_transient_properties)
+ undo().recordUndoBufferParams(CursorData());
+ params().track_changes = !params().track_changes;
+ if (!params().track_changes)
+ dr.forceChangesUpdate();
+ break;
+
+ case LFUN_CHANGES_OUTPUT:
+ if (params().save_transient_properties)
+ undo().recordUndoBufferParams(CursorData());
+ params().output_changes = !params().output_changes;
+ if (params().output_changes) {
+ bool dvipost = LaTeXFeatures::isAvailable("dvipost");
+ bool xcolorulem = LaTeXFeatures::isAvailable("ulem") &&
+ LaTeXFeatures::isAvailable("xcolor");
+
+ if (!dvipost && !xcolorulem) {
+ Alert::warning(_("Changes not shown in LaTeX output"),
+ _("Changes will not be highlighted in LaTeX output, "
+ "because neither dvipost nor xcolor/ulem are installed.\n"
+ "Please install these packages or redefine "
+ "\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
+ } else if (!xcolorulem) {
+ Alert::warning(_("Changes not shown in LaTeX output"),
+ _("Changes will not be highlighted in LaTeX output "
+ "when using pdflatex, because xcolor and ulem are not installed.\n"
+ "Please install both packages or redefine "
+ "\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
+ }
+ }
+ break;
+
+ case LFUN_BUFFER_TOGGLE_COMPRESSION:
+ // turn compression on/off
+ undo().recordUndoBufferParams(CursorData());
+ params().compressed = !params().compressed;
+ break;
+
+ case LFUN_BUFFER_TOGGLE_OUTPUT_SYNC:
+ undo().recordUndoBufferParams(CursorData());
+ params().output_sync = !params().output_sync;
+ break;
+
+ case LFUN_BUFFER_ANONYMIZE: {
+ undo().recordUndoFullBuffer(CursorData());
+ CursorData cur(doc_iterator_begin(this));
+ for ( ; cur ; cur.forwardPar())
+ cur.paragraph().anonymize();
+ dr.forceBufferUpdate();
+ dr.screenUpdate(Update::Force);
+ break;
+ }
+
default:
dispatched = false;
break;
}
dr.dispatched(dispatched);
- undo().endUndoGroup();
}
std::set<Language const *> Buffer::getLanguages() const
{
- std::set<Language const *> languages;
- getLanguages(languages);
- return languages;
+ std::set<Language const *> langs;
+ getLanguages(langs);
+ return langs;
}
-void Buffer::getLanguages(std::set<Language const *> & languages) const
+void Buffer::getLanguages(std::set<Language const *> & langs) const
{
ParConstIterator end = par_iterator_end();
// add the buffer language, even if it's not actively used
- languages.insert(language());
+ langs.insert(language());
// iterate over the paragraphs
for (ParConstIterator it = par_iterator_begin(); it != end; ++it)
- it->getLanguages(languages);
+ it->getLanguages(langs);
// also children
ListOfBuffers clist = getDescendents();
- ListOfBuffers::const_iterator cit = clist.begin();
- ListOfBuffers::const_iterator const cen = clist.end();
- for (; cit != cen; ++cit)
- (*cit)->getLanguages(languages);
+ for (auto const & cit : clist)
+ cit->getLanguages(langs);
}
DocIterator Buffer::getParFromID(int const id) const
{
Buffer * buf = const_cast<Buffer *>(this);
- if (id < 0) {
- // John says this is called with id == -1 from undo
- lyxerr << "getParFromID(), id: " << id << endl;
+ if (id < 0)
+ // This means non-existent
return doc_iterator_end(buf);
- }
for (DocIterator it = doc_iterator_begin(buf); !it.atEnd(); it.forwardPar())
if (it.paragraph().id() == id)
}
-bool Buffer::isExternallyModified(CheckMethod method) const
+bool Buffer::isChecksumModified() const
{
LASSERT(d->filename.exists(), return false);
- // if method == timestamp, check timestamp before checksum
- return (method == checksum_method
- || d->timestamp_ != d->filename.lastModified())
- && d->checksum_ != d->filename.checksum();
+ return d->checksum_ != d->filename.checksum();
}
void Buffer::saveCheckSum() const
{
FileName const & file = d->filename;
-
file.refresh();
- if (file.exists()) {
- d->timestamp_ = file.lastModified();
- d->checksum_ = file.checksum();
- } else {
- // in the case of save to a new file.
- d->timestamp_ = 0;
- d->checksum_ = 0;
- }
+ d->checksum_ = file.exists() ? file.checksum()
+ : 0; // in the case of save to a new file.
}
// autosave
d->bak_clean = true;
d->undo_.markDirty();
+ clearExternalModification();
}
}
d->bak_clean = false;
- DepClean::iterator it = d->dep_clean.begin();
- DepClean::const_iterator const end = d->dep_clean.end();
-
- for (; it != end; ++it)
- it->second = false;
+ for (auto & depit : d->dep_clean)
+ depit.second = false;
}
}
-string Buffer::originFilePath() const
+DocFileName Buffer::getReferencedFileName(string const & fn) const
{
- if (FileName::isAbsolute(params().origin))
- return params().origin;
+ DocFileName result;
+ if (FileName::isAbsolute(fn) || !FileName::isAbsolute(params().origin))
+ result.set(fn, filePath());
+ else {
+ // filePath() ends with a path separator
+ FileName const test(filePath() + fn);
+ if (test.exists())
+ result.set(fn, filePath());
+ else
+ result.set(fn, params().origin);
+ }
+
+ return result;
+}
+
+
+string const Buffer::prepareFileNameForLaTeX(string const & name,
+ string const & ext, bool nice) const
+{
+ string const fname = makeAbsPath(name, filePath()).absFileName();
+ if (FileName::isAbsolute(name) || !FileName(fname + ext).isReadableFile())
+ return name;
+ if (!nice)
+ return fname;
+
+ // FIXME UNICODE
+ return to_utf8(makeRelPath(from_utf8(fname),
+ from_utf8(masterBuffer()->filePath())));
+}
+
+
+vector<pair<docstring, string>> const Buffer::prepareBibFilePaths(OutputParams const & runparams,
+ docstring_list const & bibfilelist,
+ bool const add_extension) const
+{
+ // If we are processing the LaTeX file in a temp directory then
+ // copy the .bib databases to this temp directory, mangling their
+ // names in the process. Store this mangled name in the list of
+ // all databases.
+ // (We need to do all this because BibTeX *really*, *really*
+ // can't handle "files with spaces" and Windows users tend to
+ // use such filenames.)
+ // Otherwise, store the (maybe absolute) path to the original,
+ // unmangled database name.
+
+ vector<pair<docstring, string>> res;
+
+ // determine the export format
+ string const tex_format = flavor2format(runparams.flavor);
+
+ // check for spaces in paths
+ bool found_space = false;
+
+ for (auto const & bit : bibfilelist) {
+ string utf8input = to_utf8(bit);
+ string database =
+ prepareFileNameForLaTeX(utf8input, ".bib", runparams.nice);
+ FileName try_in_file =
+ makeAbsPath(database + ".bib", filePath());
+ bool not_from_texmf = try_in_file.isReadableFile();
+ // If the file has not been found, try with the real file name
+ // (it might come from a child in a sub-directory)
+ if (!not_from_texmf) {
+ try_in_file = getBibfilePath(bit);
+ if (try_in_file.isReadableFile()) {
+ // Check if the file is in texmf
+ FileName kpsefile(findtexfile(changeExtension(utf8input, "bib"), "bib", true));
+ not_from_texmf = kpsefile.empty()
+ || kpsefile.absFileName() != try_in_file.absFileName();
+ if (not_from_texmf)
+ // If this exists, make path relative to the master
+ // FIXME Unicode
+ database =
+ removeExtension(prepareFileNameForLaTeX(
+ to_utf8(makeRelPath(from_utf8(try_in_file.absFileName()),
+ from_utf8(filePath()))),
+ ".bib", runparams.nice));
+ }
+ }
+
+ if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
+ not_from_texmf) {
+ // mangledFileName() needs the extension
+ DocFileName const in_file = DocFileName(try_in_file);
+ database = removeExtension(in_file.mangledFileName());
+ FileName const out_file = makeAbsPath(database + ".bib",
+ masterBuffer()->temppath());
+ bool const success = in_file.copyTo(out_file);
+ if (!success) {
+ LYXERR0("Failed to copy '" << in_file
+ << "' to '" << out_file << "'");
+ }
+ } else if (!runparams.inComment && runparams.nice && not_from_texmf) {
+ runparams.exportdata->addExternalFile(tex_format, try_in_file, database + ".bib");
+ if (!isValidLaTeXFileName(database)) {
+ frontend::Alert::warning(_("Invalid filename"),
+ _("The following filename will cause troubles "
+ "when running the exported file through LaTeX: ") +
+ from_utf8(database));
+ }
+ if (!isValidDVIFileName(database)) {
+ frontend::Alert::warning(_("Problematic filename for DVI"),
+ _("The following filename can cause troubles "
+ "when running the exported file through LaTeX "
+ "and opening the resulting DVI: ") +
+ from_utf8(database), true);
+ }
+ }
+
+ if (add_extension)
+ database += ".bib";
+
+ // FIXME UNICODE
+ docstring const path = from_utf8(latex_path(database));
+
+ if (contains(path, ' '))
+ found_space = true;
+ string enc;
+ if (params().useBiblatex() && !params().bibFileEncoding(utf8input).empty())
+ enc = params().bibFileEncoding(utf8input);
+
+ bool recorded = false;
+ for (pair<docstring, string> pe : res) {
+ if (pe.first == path) {
+ recorded = true;
+ break;
+ }
+
+ }
+ if (!recorded)
+ res.push_back(make_pair(path, enc));
+ }
+
+ // Check if there are spaces in the path and warn BibTeX users, if so.
+ // (biber can cope with such paths)
+ if (!prefixIs(runparams.bibtex_command, "biber")) {
+ // Post this warning only once.
+ static bool warned_about_spaces = false;
+ if (!warned_about_spaces &&
+ runparams.nice && found_space) {
+ warned_about_spaces = true;
+ Alert::warning(_("Export Warning!"),
+ _("There are spaces in the paths to your BibTeX databases.\n"
+ "BibTeX will be unable to find them."));
+ }
+ }
- return filePath();
+ return res;
}
+
string Buffer::layoutPos() const
{
return d->layout_position;
}
-bool Buffer::isReadonly() const
+bool Buffer::hasReadonlyFlag() const
{
return d->read_only;
}
+bool Buffer::isReadonly() const
+{
+ return hasReadonlyFlag() || notifiesExternalModification();
+}
+
+
void Buffer::setParent(Buffer const * buffer)
{
// Avoids recursive include.
void Buffer::collectChildren(ListOfBuffers & clist, bool grand_children) const
{
// loop over children
- Impl::BufferPositionMap::iterator it = d->children_positions.begin();
- Impl::BufferPositionMap::iterator end = d->children_positions.end();
- for (; it != end; ++it) {
- Buffer * child = const_cast<Buffer *>(it->first);
+ for (auto const & p : d->children_positions) {
+ Buffer * child = const_cast<Buffer *>(p.first);
// No duplicates
ListOfBuffers::const_iterator bit = find(clist.begin(), clist.end(), child);
if (bit != clist.end())
while (it.pit() <= lastpit) {
Paragraph & par = it.paragraph();
+ // FIXME Can this be done with the new-style iterators?
// iterate over the insets of the current paragraph
- InsetList const & insets = par.insetList();
- InsetList::const_iterator iit = insets.begin();
- InsetList::const_iterator end = insets.end();
- for (; iit != end; ++iit) {
- it.pos() = iit->pos;
+ for (auto const & insit : par.insetList()) {
+ it.pos() = insit.pos;
// is it a nested text inset?
- if (iit->inset->asInsetText()) {
+ if (insit.inset->asInsetText()) {
// Inset needs its own scope?
- InsetText const * itext = iit->inset->asInsetText();
+ InsetText const * itext = insit.inset->asInsetText();
bool newScope = itext->isMacroScope();
// scope which ends just behind the inset
++insetScope.pos();
// collect macros in inset
- it.push_back(CursorSlice(*iit->inset));
+ it.push_back(CursorSlice(*insit.inset));
updateMacros(it, newScope ? insetScope : scope);
it.pop_back();
continue;
}
- if (iit->inset->asInsetTabular()) {
- CursorSlice slice(*iit->inset);
+ if (insit.inset->asInsetTabular()) {
+ CursorSlice slice(*insit.inset);
size_t const numcells = slice.nargs();
for (; slice.idx() < numcells; slice.forwardIdx()) {
it.push_back(slice);
}
// is it an external file?
- if (iit->inset->lyxCode() == INCLUDE_CODE) {
+ if (insit.inset->lyxCode() == INCLUDE_CODE) {
// get buffer of external file
- InsetInclude const & inset =
- static_cast<InsetInclude const &>(*iit->inset);
+ InsetInclude const & incinset =
+ static_cast<InsetInclude const &>(*insit.inset);
macro_lock = true;
- Buffer * child = inset.getChildBuffer();
+ Buffer * child = incinset.getChildBuffer();
macro_lock = false;
if (!child)
continue;
continue;
}
- InsetMath * im = iit->inset->asInsetMath();
+ InsetMath * im = insit.inset->asInsetMath();
if (doing_export && im) {
InsetMathHull * hull = im->asHullInset();
if (hull)
hull->recordLocation(it);
}
- if (iit->inset->lyxCode() != MATHMACRO_CODE)
+ if (insit.inset->lyxCode() != MATHMACRO_CODE)
continue;
// get macro data
- MathMacroTemplate & macroTemplate =
- *iit->inset->asInsetMath()->asMacroTemplate();
+ InsetMathMacroTemplate & macroTemplate =
+ *insit.inset->asInsetMath()->asMacroTemplate();
MacroContext mc(owner_, it);
macroTemplate.updateToContext(mc);
MacroContext mc = MacroContext(this, it);
for (DocIterator::idx_type i = 0; i < n; ++i) {
MathData & data = minset->cell(i);
- data.updateMacros(0, mc, utype);
+ data.updateMacros(0, mc, utype, 0);
}
}
}
d->macro_lock = true;
// loop over macro names
- Impl::NamePositionScopeMacroMap::iterator nameIt = d->macros.begin();
- Impl::NamePositionScopeMacroMap::iterator nameEnd = d->macros.end();
- for (; nameIt != nameEnd; ++nameIt)
- macros.insert(nameIt->first);
+ for (auto const & nameit : d->macros)
+ macros.insert(nameit.first);
// loop over children
- Impl::BufferPositionMap::iterator it = d->children_positions.begin();
- Impl::BufferPositionMap::iterator end = d->children_positions.end();
- for (; it != end; ++it)
- it->first->listMacroNames(macros);
+ for (auto const & p : d->children_positions) {
+ Buffer * child = const_cast<Buffer *>(p.first);
+ // The buffer might have been closed (see #10766).
+ if (theBufferList().isLoaded(child))
+ child->listMacroNames(macros);
+ }
// call parent
Buffer const * const pbuf = d->parent();
pbuf->listMacroNames(names);
// resolve macros
- MacroNameSet::iterator it = names.begin();
- MacroNameSet::iterator end = names.end();
- for (; it != end; ++it) {
+ for (auto const & mit : names) {
// defined?
- MacroData const * data =
- pbuf->getMacro(*it, *this, false);
+ MacroData const * data = pbuf->getMacro(mit, *this, false);
if (data) {
macros.insert(data);
- // we cannot access the original MathMacroTemplate anymore
+ // we cannot access the original InsetMathMacroTemplate anymore
// here to calls validate method. So we do its work here manually.
// FIXME: somehow make the template accessible here.
if (data->optionals() > 0)
RefCache::iterator it = d->ref_cache_.find(label);
if (it != d->ref_cache_.end())
- return it->second.second;
+ return it->second;
- static InsetLabel const * dummy_il = 0;
- static References const dummy_refs;
+ static References const dummy_refs = References();
it = d->ref_cache_.insert(
- make_pair(label, make_pair(dummy_il, dummy_refs))).first;
- return it->second.second;
+ make_pair(label, dummy_refs)).first;
+ return it->second;
}
}
-void Buffer::setInsetLabel(docstring const & label, InsetLabel const * il)
+void Buffer::setInsetLabel(docstring const & label, InsetLabel const * il,
+ bool const active)
{
- masterBuffer()->d->ref_cache_[label].first = il;
+ LabelInfo linfo;
+ linfo.label = label;
+ linfo.inset = il;
+ linfo.active = active;
+ masterBuffer()->d->label_cache_.push_back(linfo);
}
-InsetLabel const * Buffer::insetLabel(docstring const & label) const
+InsetLabel const * Buffer::insetLabel(docstring const & label,
+ bool const active) const
{
- return masterBuffer()->d->ref_cache_[label].first;
+ for (auto & rc : masterBuffer()->d->label_cache_) {
+ if (rc.label == label && (rc.active || !active))
+ return rc.inset;
+ }
+ return nullptr;
+}
+
+
+bool Buffer::activeLabel(docstring const & label) const
+{
+ if (!insetLabel(label, true))
+ return false;
+
+ return true;
}
void Buffer::clearReferenceCache() const
{
- if (!d->parent())
+ if (!d->parent()) {
d->ref_cache_.clear();
+ d->label_cache_.clear();
+ }
}
reloadBibInfoCache();
// Check if the label 'from' appears more than once
- BiblioInfo const & keys = masterBibInfo();
- BiblioInfo::const_iterator bit = keys.begin();
- BiblioInfo::const_iterator bend = keys.end();
vector<docstring> labels;
-
- for (; bit != bend; ++bit)
- // FIXME UNICODE
- labels.push_back(bit->first);
+ for (auto const & bibit : masterBibInfo())
+ labels.push_back(bibit.first);
if (count(labels.begin(), labels.end(), from) > 1)
return;
string const paramName = "key";
- for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
+ InsetIterator it = inset_iterator_begin(inset());
+ for (; it; ++it) {
if (it->lyxCode() != CITE_CODE)
continue;
InsetCommand * inset = it->asInsetCommand();
}
// returns NULL if id-to-row conversion is unsupported
-auto_ptr<TexRow> Buffer::getSourceCode(odocstream & os, string const & format,
- pit_type par_begin, pit_type par_end,
- OutputWhat output, bool master) const
+unique_ptr<TexRow> Buffer::getSourceCode(odocstream & os, string const & format,
+ pit_type par_begin, pit_type par_end,
+ OutputWhat output, bool master) const
{
- auto_ptr<TexRow> texrow(NULL);
+ unique_ptr<TexRow> texrow;
OutputParams runparams(¶ms().encoding());
runparams.nice = true;
runparams.flavor = params().getOutputFlavor(format);
// No side effect of file copying and image conversion
runparams.dryrun = true;
+ // Some macros rely on font encoding
+ runparams.main_fontenc = params().main_font_encoding();
+
if (output == CurrentParagraph) {
runparams.par_begin = par_begin;
runparams.par_end = par_end;
// in order to know if we should output polyglossia
// macros (instead of babel macros)
LaTeXFeatures features(*this, params(), runparams);
- params().validate(features);
+ validate(features);
runparams.use_polyglossia = features.usePolyglossia();
- texrow.reset(new TexRow());
- texrow->reset();
- texrow->newline();
- texrow->newline();
// latex or literate
- otexstream ots(os, *texrow);
-
+ otexstream ots(os);
+ // output above
+ ots.texrow().newlines(2);
// the real stuff
latexParagraphs(*this, text(), ots, runparams);
+ texrow = ots.releaseTexRow();
// Restore the parenthood
if (!master)
writeDocBookSource(os, absFileName(), runparams, output);
} else {
// latex or literate
- texrow.reset(new TexRow());
- texrow->reset();
- texrow->newline();
- texrow->newline();
- otexstream ots(os, *texrow);
+ otexstream ots(os);
+ // output above
+ ots.texrow().newlines(2);
if (master)
runparams.is_child = true;
+ updateBuffer();
writeLaTeXSource(ots, string(), runparams, output);
+ texrow = ots.releaseTexRow();
}
}
return texrow;
///
virtual shared_ptr<ForkedProcess> clone() const
{
- return shared_ptr<ForkedProcess>(new AutoSaveBuffer(*this));
+ return make_shared<AutoSaveBuffer>(*this);
}
///
int start()
return pid;
}
-} // namespace anon
+} // namespace
FileName Buffer::getEmergencyFileName() const
}
-void Buffer::moveAutosaveFile(support::FileName const & oldauto) const
+void Buffer::moveAutosaveFile(FileName const & oldauto) const
{
FileName const newauto = getAutosaveFileName();
oldauto.refresh();
bool Buffer::autoSave() const
{
Buffer const * buf = d->cloned_buffer_ ? d->cloned_buffer_ : this;
- if (buf->d->bak_clean || isReadonly())
+ if (buf->d->bak_clean || hasReadonlyFlag())
return true;
message(_("Autosaving current document..."));
{
d->doing_export = e;
ListOfBuffers clist = getDescendents();
- ListOfBuffers::const_iterator cit = clist.begin();
- ListOfBuffers::const_iterator const cen = clist.end();
- for (; cit != cen; ++cit)
- (*cit)->d->doing_export = e;
+ for (auto const & bit : clist)
+ bit->d->doing_export = e;
}
if (pos != string::npos) {
dest_filename = target.substr(pos + 1, target.length() - pos - 1);
format = target.substr(0, pos);
+ if (format == "default")
+ format = params().getDefaultOutputFormat();
runparams.export_folder = FileName(dest_filename).onlyPath().realPath();
FileName(dest_filename).onlyPath().createPath();
LYXERR(Debug::FILES, "format=" << format << ", dest_filename=" << dest_filename << ", export_folder=" << runparams.export_folder);
// Get shortest path to format
converters.buildGraph();
Graph::EdgePath path;
- for (vector<string>::const_iterator it = backs.begin();
- it != backs.end(); ++it) {
- Graph::EdgePath p = converters.getPath(*it, format);
+ for (string const & sit : backs) {
+ Graph::EdgePath p = converters.getPath(sit, format);
if (!p.empty() && (path.empty() || p.size() < path.size())) {
- backend_format = *it;
+ backend_format = sit;
path = p;
}
}
if (!put_in_tempdir) {
// Only show this alert if this is an export to a non-temporary
// file (not for previewing).
- Alert::error(_("Couldn't export file"), bformat(
- _("No information for exporting the format %1$s."),
- formats.prettyName(format)));
+ docstring s = bformat(_("No information for exporting the format %1$s."),
+ theFormats().prettyName(format));
+ if (format == "pdf4")
+ s += "\n"
+ + bformat(_("Hint: use non-TeX fonts or set input encoding "
+ " to '%1$s' or '%2$s'"),
+ from_utf8(encodings.fromLyXName("utf8")->guiName()),
+ from_utf8(encodings.fromLyXName("ascii")->guiName()));
+ Alert::error(_("Couldn't export file"), s);
}
return ExportNoPathToFormat;
}
runparams.flavor = converters.getFlavor(path, this);
- Graph::EdgePath::const_iterator it = path.begin();
- Graph::EdgePath::const_iterator en = path.end();
- for (; it != en; ++it)
- if (theConverters().get(*it).nice()) {
+ runparams.hyperref_driver = converters.getHyperrefDriver(path);
+ for (auto const & edge : path)
+ if (theConverters().get(edge).nice()) {
need_nice_file = true;
break;
}
string filename = latexName(false);
filename = addName(temppath(), filename);
filename = changeExtension(filename,
- formats.extension(backend_format));
+ theFormats().extension(backend_format));
LYXERR(Debug::FILES, "filename=" << filename);
// Plain text backend
if (backend_format == "text") {
runparams.flavor = OutputParams::TEXT;
- writePlaintextFile(*this, FileName(filename), runparams);
+ try {
+ writePlaintextFile(*this, FileName(filename), runparams);
+ }
+ catch (ConversionException const &) { return ExportCancel; }
}
// HTML backend
else if (backend_format == "xhtml") {
runparams.flavor = OutputParams::HTML;
setMathFlavor(runparams);
- makeLyXHTMLFile(FileName(filename), runparams);
+ if (makeLyXHTMLFile(FileName(filename), runparams) == ExportKilled)
+ return ExportKilled;
} else if (backend_format == "lyx")
writeFile(FileName(filename));
// Docbook backend
else if (params().isDocBook()) {
runparams.nice = !put_in_tempdir;
- makeDocBookFile(FileName(filename), runparams);
+ if (makeDocBookFile(FileName(filename), runparams) == ExportKilled)
+ return ExportKilled;
}
// LaTeX backend
else if (backend_format == format || need_nice_file) {
runparams.nice = true;
- bool const success = makeLaTeXFile(FileName(filename), string(), runparams);
+ ExportStatus const retval =
+ makeLaTeXFile(FileName(filename), string(), runparams);
+ if (retval == ExportKilled)
+ return ExportKilled;
if (d->cloned_buffer_)
d->cloned_buffer_->d->errorLists["Export"] = d->errorLists["Export"];
- if (!success)
- return ExportError;
+ if (retval != ExportSuccess)
+ return retval;
} else if (!lyxrc.tex_allows_spaces
&& contains(filePath(), ' ')) {
Alert::error(_("File name error"),
- _("The directory path to the document cannot contain spaces."));
+ bformat(_("The directory path to the document\n%1$s\n"
+ "contains spaces, but your TeX installation does "
+ "not allow them. You should save the file to a directory "
+ "whose name does not contain spaces."), from_utf8(filePath())));
return ExportTexPathHasSpaces;
} else {
runparams.nice = false;
- bool const success = makeLaTeXFile(
- FileName(filename), filePath(), runparams);
+ ExportStatus const retval =
+ makeLaTeXFile(FileName(filename), filePath(), runparams);
+ if (retval == ExportKilled)
+ return ExportKilled;
if (d->cloned_buffer_)
d->cloned_buffer_->d->errorLists["Export"] = d->errorLists["Export"];
- if (!success)
+ if (retval != ExportSuccess)
return ExportError;
}
string const error_type = (format == "program")
? "Build" : params().bufferFormat();
ErrorList & error_list = d->errorLists[error_type];
- string const ext = formats.extension(format);
+ string const ext = theFormats().extension(format);
FileName const tmp_result_file(changeExtension(filename, ext));
- bool const success = converters.convert(this, FileName(filename),
- tmp_result_file, FileName(absFileName()), backend_format, format,
- error_list);
+ Converters::RetVal const retval =
+ converters.convert(this, FileName(filename), tmp_result_file,
+ FileName(absFileName()), backend_format, format, error_list);
+ if (retval == Converters::KILLED)
+ return ExportCancel;
+ bool success = (retval == Converters::SUCCESS);
// Emit the signal to show the error list or copy it back to the
// cloned Buffer so that it can be emitted afterwards.
errors(error_type);
// also to the children, in case of master-buffer-view
ListOfBuffers clist = getDescendents();
- ListOfBuffers::const_iterator cit = clist.begin();
- ListOfBuffers::const_iterator const cen = clist.end();
- for (; cit != cen; ++cit) {
+ for (auto const & bit : clist) {
if (runparams.silent)
- (*cit)->d->errorLists[error_type].clear();
+ bit->d->errorLists[error_type].clear();
else if (d->cloned_buffer_) {
// Enable reverse search by copying back the
// texrow object to the cloned buffer.
// FIXME: this is not thread safe.
- (*cit)->d->cloned_buffer_->d->texrow = (*cit)->d->texrow;
- (*cit)->d->cloned_buffer_->d->errorLists[error_type] =
- (*cit)->d->errorLists[error_type];
+ bit->d->cloned_buffer_->d->texrow = bit->d->texrow;
+ bit->d->cloned_buffer_->d->errorLists[error_type] =
+ bit->d->errorLists[error_type];
} else
- (*cit)->errors(error_type, true);
+ bit->errors(error_type, true);
}
}
// FIXME: There is a possibility of concurrent access to texrow
// here from the main GUI thread that should be securized.
d->cloned_buffer_->d->texrow = d->texrow;
- string const error_type = params().bufferFormat();
- d->cloned_buffer_->d->errorLists[error_type] = d->errorLists[error_type];
+ string const err_type = params().bufferFormat();
+ d->cloned_buffer_->d->errorLists[error_type] = d->errorLists[err_type];
}
: force_overwrite == ALL_FILES;
CopyStatus status = use_force ? FORCE : SUCCESS;
- vector<ExportedFile>::const_iterator it = files.begin();
- vector<ExportedFile>::const_iterator const en = files.end();
- for (; it != en && status != CANCEL; ++it) {
- string const fmt = formats.getFormatFromFile(it->sourceName);
- string fixedName = it->exportName;
+ for (ExportedFile const & exp : files) {
+ if (status == CANCEL) {
+ message(_("Document export cancelled."));
+ return ExportCancel;
+ }
+ string const fmt = theFormats().getFormatFromFile(exp.sourceName);
+ string fixedName = exp.exportName;
if (!runparams.export_folder.empty()) {
// Relative pathnames starting with ../ will be sanitized
// if exporting to a different folder
}
FileName fixedFileName = makeAbsPath(fixedName, dest);
fixedFileName.onlyPath().createPath();
- status = copyFile(fmt, it->sourceName,
+ status = copyFile(fmt, exp.sourceName,
fixedFileName,
- it->exportName, status == FORCE,
+ exp.exportName, status == FORCE,
runparams.export_folder.empty());
}
- if (status == CANCEL) {
- message(_("Document export cancelled."));
- return ExportCancel;
- }
if (tmp_result_file.exists()) {
// Finally copy the main file
} else {
message(bformat(_("Document exported as %1$s "
"to file `%2$s'"),
- formats.prettyName(format),
+ theFormats().prettyName(format),
makeDisplayPath(result_file)));
}
} else {
// This must be a dummy converter like fax (bug 1888)
message(bformat(_("Document exported as %1$s"),
- formats.prettyName(format)));
+ theFormats().prettyName(format)));
}
return success ? ExportSuccess : ExportConverterError;
ExportStatus const status = doExport(format, true, false, result_file);
FileName const previewFile(result_file);
- LATTEST (isClone());
- d->cloned_buffer_->d->preview_file_ = previewFile;
- d->cloned_buffer_->d->preview_format_ = format;
- d->cloned_buffer_->d->preview_error_ = (status != ExportSuccess);
+ Impl * theimpl = isClone() ? d->cloned_buffer_->d : d;
+ theimpl->preview_file_ = previewFile;
+ theimpl->preview_format_ = format;
+ theimpl->preview_error_ = (status != ExportSuccess);
if (status != ExportSuccess)
return status;
- if (previewFile.exists()) {
- if (!formats.view(*this, previewFile, format))
- return PreviewError;
- else
- return PreviewSuccess;
- }
- else {
- // Successful export but no output file?
- // Probably a bug in error detection.
- LATTEST (status != ExportSuccess);
- return status;
- }
+ if (previewFile.exists())
+ return theFormats().view(*this, previewFile, format) ?
+ PreviewSuccess : PreviewError;
+
+ // Successful export but no output file?
+ // Probably a bug in error detection.
+ LATTEST(status != ExportSuccess);
+ return status;
}
ReadStatus const ret_llf = loadThisLyXFile(emergencyFile);
bool const success = (ret_llf == ReadSuccess);
if (success) {
- if (isReadonly()) {
+ if (hasReadonlyFlag()) {
Alert::warning(_("File is read-only"),
bformat(_("An emergency file is successfully loaded, "
"but the original file %1$s is marked read-only. "
_("&Remove"), _("&Keep"));
if (del_emerg == 0)
emergencyFile.removeFile();
+ else {
+ // See bug #11464
+ FileName newname;
+ string const ename = emergencyFile.absFileName();
+ bool noname = true;
+ // Surely we can find one in 100 tries?
+ for (int i = 1; i < 100; ++i) {
+ newname.set(ename + to_string(i) + ".lyx");
+ if (!newname.exists()) {
+ noname = false;
+ break;
+ }
+ }
+ if (!noname) {
+ // renameTo returns true on success. So inverting that
+ // will give us true if we fail.
+ noname = !emergencyFile.renameTo(newname);
+ }
+ if (noname) {
+ Alert::warning(_("Can't rename emergency file!"),
+ _("LyX was unable to rename the emergency file. "
+ "You should do so manually. Otherwise, you will be "
+ "asked about it again the next time you try to load "
+ "this file, and may over-write your own work."));
+ }
+ }
return ReadOriginal;
}
ReadStatus const ret_llf = loadThisLyXFile(autosaveFile);
// the file is not saved if we load the autosave file.
if (ret_llf == ReadSuccess) {
- if (isReadonly()) {
+ if (hasReadonlyFlag()) {
Alert::warning(_("File is read-only"),
bformat(_("A backup file is successfully loaded, "
"but the original file %1$s is marked read-only. "
void Buffer::bufferErrors(TeXErrors const & terr, ErrorList & errorList) const
{
- TeXErrors::Errors::const_iterator it = terr.begin();
- TeXErrors::Errors::const_iterator end = terr.end();
- ListOfBuffers clist = getDescendents();
- ListOfBuffers::const_iterator cen = clist.end();
-
- for (; it != end; ++it) {
- int id_start = -1;
- int pos_start = -1;
- int errorRow = it->error_in_line;
+ for (auto const & err : terr) {
+ TexRow::TextEntry start = TexRow::text_none, end = TexRow::text_none;
+ int errorRow = err.error_in_line;
Buffer const * buf = 0;
Impl const * p = d;
- if (it->child_name.empty())
- p->texrow.getIdFromRow(errorRow, id_start, pos_start);
+ if (err.child_name.empty())
+ tie(start, end) = p->texrow.getEntriesFromRow(errorRow);
else {
// The error occurred in a child
- ListOfBuffers::const_iterator cit = clist.begin();
- for (; cit != cen; ++cit) {
+ for (Buffer const * child : getDescendents()) {
string const child_name =
- DocFileName(changeExtension(
- (*cit)->absFileName(), "tex")).
- mangledFileName();
- if (it->child_name != child_name)
+ DocFileName(changeExtension(child->absFileName(), "tex")).
+ mangledFileName();
+ if (err.child_name != child_name)
continue;
- (*cit)->d->texrow.getIdFromRow(errorRow,
- id_start, pos_start);
- if (id_start != -1) {
+ tie(start, end) = child->d->texrow.getEntriesFromRow(errorRow);
+ if (!TexRow::isNone(start)) {
buf = d->cloned_buffer_
- ? (*cit)->d->cloned_buffer_->d->owner_
- : (*cit)->d->owner_;
- p = (*cit)->d;
+ ? child->d->cloned_buffer_->d->owner_
+ : child->d->owner_;
+ p = child->d;
break;
}
}
}
- int id_end = -1;
- int pos_end = -1;
- bool found;
- do {
- ++errorRow;
- found = p->texrow.getIdFromRow(errorRow, id_end, pos_end);
- } while (found && id_start == id_end && pos_start == pos_end);
-
- if (id_start != id_end) {
- // Next registered position is outside the inset where
- // the error occurred, so signal end-of-paragraph
- pos_end = 0;
- }
-
- errorList.push_back(ErrorItem(it->error_desc,
- it->error_text, id_start, pos_start, pos_end, buf));
+ errorList.push_back(ErrorItem(err.error_desc, err.error_text,
+ start, end, buf));
}
}
-void Buffer::setBuffersForInsets() const
-{
- inset().setBuffer(const_cast<Buffer &>(*this));
-}
-
-
void Buffer::updateBuffer(UpdateScope scope, UpdateType utype) const
{
LBUFERR(!text().paragraphs().empty());
Buffer const * const master = masterBuffer();
DocumentClass const & textclass = master->params().documentClass();
- // do this only if we are the top-level Buffer
- if (master == this) {
+ docstring_list old_bibfiles;
+ // Do this only if we are the top-level Buffer. We also need to account
+ // for the case of a previewed child with ignored parent here.
+ if (master == this && !d->ignore_parent) {
textclass.counters().reset(from_ascii("bibitem"));
reloadBibInfoCache();
+ // we will re-read this cache as we go through, but we need
+ // to know whether it's changed to know whether we need to
+ // update the bibinfo cache.
+ old_bibfiles = d->bibfiles_cache_;
+ d->bibfiles_cache_.clear();
}
// keep the buffers to be children in this set. If the call from the
// not updated during the updateBuffer call and TocModel::toc_ is invalid
// (bug 5699). The same happens if the master buffer is open in a different
// window. This test catches both possibilities.
- // See: http://marc.info/?l=lyx-devel&m=138590578911716&w=2
+ // See: https://marc.info/?l=lyx-devel&m=138590578911716&w=2
// There remains a problem here: If there is another child open in yet a third
// window, that TOC is not updated. So some more general solution is needed at
// some point.
// update all caches
clearReferenceCache();
updateMacros();
+ setChangesPresent(false);
Buffer & cbuf = const_cast<Buffer &>(*this);
ParIterator parit = cbuf.par_iterator_begin();
updateBuffer(parit, utype);
+ // If this document has siblings, then update the TocBackend later. The
+ // reason is to ensure that later siblings are up to date when e.g. the
+ // broken or not status of references is computed. The update is called
+ // in InsetInclude::addToToc.
if (master != this)
- // TocBackend update will be done later.
return;
- d->bibinfo_cache_valid_ = true;
+ // if the bibfiles changed, the cache of bibinfo is invalid
+ docstring_list new_bibfiles = d->bibfiles_cache_;
+ // this is a trick to determine whether the two vectors have
+ // the same elements.
+ sort(new_bibfiles.begin(), new_bibfiles.end());
+ sort(old_bibfiles.begin(), old_bibfiles.end());
+ if (old_bibfiles != new_bibfiles) {
+ LYXERR(Debug::FILES, "Reloading bibinfo cache.");
+ invalidateBibinfoCache();
+ reloadBibInfoCache();
+ // We relied upon the bibinfo cache when recalculating labels. But that
+ // cache was invalid, although we didn't find that out until now. So we
+ // have to do it all again.
+ // That said, the only thing we really need to do is update the citation
+ // labels. Nothing else will have changed. So we could create a new
+ // UpdateType that would signal that fact, if we needed to do so.
+ parit = cbuf.par_iterator_begin();
+ // we will be re-doing the counters and references and such.
+ textclass.counters().reset();
+ clearReferenceCache();
+ // we should not need to do this again?
+ // updateMacros();
+ setChangesPresent(false);
+ updateBuffer(parit, utype);
+ // this will already have been done by reloadBibInfoCache();
+ // d->bibinfo_cache_valid_ = true;
+ }
+ else {
+ LYXERR(Debug::FILES, "Bibfiles unchanged.");
+ // this is also set to true on the other path, by reloadBibInfoCache.
+ d->bibinfo_cache_valid_ = true;
+ }
d->cite_labels_valid_ = true;
- cbuf.tocBackend().update(utype == OutputUpdate);
+ /// FIXME: Perf
+ cbuf.tocBackend().update(true, utype);
if (scope == UpdateMaster)
cbuf.structureChanged();
}
if (!it[i].inset().inMathed())
depth += it[i].paragraph().getDepth() + 1;
// remove 1 since the outer inset does not count
+ // we should have at least one non-math inset, so
+ // depth should nevery be 0. but maybe it is worth
+ // marking this, just in case.
+ LATTEST(depth > 0);
+ // coverity[INTEGER_OVERFLOW]
return depth - 1;
}
--prev_it.top().pit();
Paragraph const & prev_par = *prev_it;
if (prev_par.getDepth() <= cur_depth)
- return prev_par.layout().labeltype != LABEL_ENUMERATE;
+ return prev_par.layout().name() != par.layout().name();
}
// start of nested inset: reset
return true;
docstring itemlabel;
switch (par.itemdepth) {
case 0:
+ // • U+2022 BULLET
itemlabel = char_type(0x2022);
break;
case 1:
+ // – U+2013 EN DASH
itemlabel = char_type(0x2013);
break;
case 2:
+ // ∗ U+2217 ASTERISK OPERATOR
itemlabel = char_type(0x2217);
break;
case 3:
- itemlabel = char_type(0x2219); // or 0x00b7
+ // · U+00B7 MIDDLE DOT
+ itemlabel = char_type(0x00b7);
break;
}
par.params().labelString(itemlabel);
switch (par.itemdepth) {
case 2:
enumcounter += 'i';
+ // fall through
case 1:
enumcounter += 'i';
+ // fall through
case 0:
enumcounter += 'i';
break;
break;
}
- // Maybe we have to reset the enumeration counter.
- if (needEnumCounterReset(it))
- counters.reset(enumcounter);
+ if (needEnumCounterReset(it)) {
+ // Increase the master counter?
+ if (layout.stepmastercounter)
+ counters.stepMaster(enumcounter, utype);
+ // Maybe we have to reset the enumeration counter.
+ if (!layout.resumecounter)
+ counters.reset(enumcounter);
+ }
counters.step(enumcounter, utype);
string const & lang = par.getParLanguage(bp)->code();
// to resolve macros in it.
parit.text()->setMacrocontextPosition(parit);
+ // Reset bibitem counter in master (#8499)
+ Buffer const * const master = masterBuffer();
+ if (master == this && !d->ignore_parent)
+ master->params().documentClass().counters().reset(from_ascii("bibitem"));
+
depth_type maxdepth = 0;
pit_type const lastpit = parit.lastpit();
for ( ; parit.pit() <= lastpit ; ++parit.pit()) {
* non-const. This would however be costly in
* terms of code duplication.
*/
- const_cast<Buffer *>(this)->undo().recordUndo(CursorData(parit));
+ CursorData(parit).recordUndo();
parit->params().depth(maxdepth);
}
maxdepth = parit->getMaxDepthAfter();
// set the counter for this paragraph
d->setLabel(parit, utype);
+ // update change-tracking flag
+ parit->addChangesToBuffer(*this);
+
// now the insets
- InsetList::const_iterator iit = parit->insetList().begin();
- InsetList::const_iterator end = parit->insetList().end();
- for (; iit != end; ++iit) {
- parit.pos() = iit->pos;
- iit->inset->updateBuffer(parit, utype);
+ for (auto const & insit : parit->insetList()) {
+ parit.pos() = insit.pos;
+ insit.inset->updateBuffer(parit, utype);
}
}
}
Buffer::ReadStatus Buffer::reload()
{
setBusy(true);
- // c.f. bug http://www.lyx.org/trac/ticket/6587
+ // c.f. bug https://www.lyx.org/trac/ticket/6587
removeAutosaveFile();
// e.g., read-only status could have changed due to version control
d->filename.refresh();
void Buffer::checkChildBuffers()
{
- Impl::BufferPositionMap::iterator it = d->children_positions.begin();
- Impl::BufferPositionMap::iterator const en = d->children_positions.end();
- for (; it != en; ++it) {
- DocIterator dit = it->second;
- Buffer * cbuf = const_cast<Buffer *>(it->first);
+ for (auto const & bit : d->children_positions) {
+ DocIterator dit = bit.second;
+ Buffer * cbuf = const_cast<Buffer *>(bit.first);
if (!cbuf || !theBufferList().isLoaded(cbuf))
continue;
Inset * inset = dit.nextInset();
string Buffer::includedFilePath(string const & name, string const & ext) const
{
+ if (d->old_position.empty() ||
+ equivalent(FileName(d->old_position), FileName(filePath())))
+ return name;
+
bool isabsolute = FileName::isAbsolute(name);
- // old_position already contains a trailing path separator
- string const absname = isabsolute ? name : d->old_position + name;
+ // both old_position and filePath() end with a path separator
+ string absname = isabsolute ? name : d->old_position + name;
- if (d->old_position.empty()
- || equivalent(FileName(d->old_position), FileName(filePath()))
- || !FileName(addExtension(absname, ext)).exists())
+ // if old_position is set to origin, we need to do the equivalent of
+ // getReferencedFileName() (see readDocument())
+ if (!isabsolute && d->old_position == params().origin) {
+ FileName const test(addExtension(filePath() + name, ext));
+ if (test.exists())
+ absname = filePath() + name;
+ }
+
+ if (!FileName(addExtension(absname, ext)).exists())
return name;
if (isabsolute)
from_utf8(filePath())));
}
+
+void Buffer::setChangesPresent(bool b) const
+{
+ d->tracked_changes_present_ = b;
+}
+
+
+bool Buffer::areChangesPresent() const
+{
+ return d->tracked_changes_present_;
+}
+
+
+void Buffer::updateChangesPresent() const
+{
+ LYXERR(Debug::CHANGES, "Buffer::updateChangesPresent");
+ setChangesPresent(false);
+ ParConstIterator it = par_iterator_begin();
+ ParConstIterator const end = par_iterator_end();
+ for (; !areChangesPresent() && it != end; ++it)
+ it->addChangesToBuffer(*this);
+}
+
+
+void Buffer::Impl::refreshFileMonitor()
+{
+ if (file_monitor_ && file_monitor_->filename() == filename.absFileName()) {
+ file_monitor_->refresh();
+ return;
+ }
+
+ // The previous file monitor is invalid
+ // This also destroys the previous file monitor and all its connections
+ file_monitor_ = FileSystemWatcher::monitor(filename);
+ // file_monitor_ will be destroyed with *this, so it is not going to call a
+ // destroyed object method.
+ file_monitor_->connect([this](bool exists) {
+ fileExternallyModified(exists);
+ });
+}
+
+
+void Buffer::Impl::fileExternallyModified(bool const exists)
+{
+ // ignore notifications after our own saving operations
+ if (checksum_ == filename.checksum()) {
+ LYXERR(Debug::FILES, "External modification but "
+ "checksum unchanged: " << filename);
+ return;
+ }
+ // If the file has been deleted, only mark the file as dirty since it is
+ // pointless to prompt for reloading. If later a file is moved into this
+ // location, then the externally modified warning will appear then.
+ if (exists)
+ externally_modified_ = true;
+ // Update external modification notification.
+ // Dirty buffers must be visible at all times.
+ if (wa_ && wa_->unhide(owner_))
+ wa_->updateTitles();
+ else
+ // Unable to unhide the buffer (e.g. no GUI or not current View)
+ lyx_clean = true;
+}
+
+
+bool Buffer::notifiesExternalModification() const
+{
+ return d->externally_modified_;
+}
+
+
+void Buffer::clearExternalModification() const
+{
+ d->externally_modified_ = false;
+ if (d->wa_)
+ d->wa_->updateTitles();
+}
+
+
} // namespace lyx