#include "PDFOptions.h"
#include "SpellChecker.h"
#include "sgml.h"
+#include "texstream.h"
#include "TexRow.h"
#include "Text.h"
#include "TextClass.h"
#include "mathed/MathMacroTemplate.h"
#include "mathed/MathSupport.h"
+#include "graphics/GraphicsCache.h"
#include "graphics/PreviewLoader.h"
#include "frontends/alert.h"
#include "support/debug.h"
#include "support/docstring_list.h"
#include "support/ExceptionMessage.h"
+#include "support/FileMonitor.h"
#include "support/FileName.h"
#include "support/FileNameList.h"
#include "support/filetools.h"
#include "support/types.h"
#include "support/bind.h"
-#include "support/shared_ptr.h"
#include <algorithm>
#include <fstream>
#include <iomanip>
#include <map>
+#include <memory>
#include <set>
#include <sstream>
#include <vector>
*/
bool file_fully_loaded;
+ /// original format of loaded file
+ int file_format;
+
+ /// if the file was originally loaded from an older format, do
+ /// we need to back it up still?
+ bool need_format_backup;
+
/// Ignore the parent (e.g. when exporting a child standalone)?
bool ignore_parent;
/// positions of child buffers in the buffer
typedef map<Buffer const * const, DocIterator> BufferPositionMap;
struct ScopeBuffer {
- ScopeBuffer() {}
- ScopeBuffer(DocIterator const & s,Buffer const * b)
+ ScopeBuffer() : buffer(0) {}
+ ScopeBuffer(DocIterator const & s, Buffer const * b)
: scope(s), buffer(b) {}
DocIterator scope;
Buffer const * buffer;
/// Container for all sort of Buffer dependant errors.
map<string, ErrorList> errorLists;
- /// timestamp and checksum used to test if the file has been externally
- /// modified. (Used to properly enable 'File->Revert to saved', bug 4114).
- time_t timestamp_;
+ /// checksum used to test if the file has been externally modified. Used to
+ /// double check whether the file had been externally modified when saving.
unsigned long checksum_;
///
/// A cache for the bibfiles (including bibfiles of loaded child
/// documents), needed for appropriate update of natbib labels.
- mutable support::FileNameList bibfiles_cache_;
+ mutable support::FileNamePairList bibfiles_cache_;
// FIXME The caching mechanism could be improved. At present, we have a
// cache for each Buffer, that caches all the bibliography info for that
+ (with_blanks ? blank_count_ : 0);
}
+ // does the buffer contain tracked changes? (if so, we automatically
+ // display the review toolbar, for instance)
+ mutable bool tracked_changes_present_;
+
+ // Make sure the file monitor monitors the good file.
+ void refreshFileMonitor();
+
+ /// has it been notified of an external modification?
+ bool isExternallyModified() const { return externally_modified_; }
+
+ /// Notify or clear of external modification
+ void fileExternallyModified(bool modified) const;
+
+ /// Block notifications of external modifications
+ FileMonitorBlocker blockFileMonitor() { return file_monitor_->block(10); }
+
private:
/// So we can force access via the accessors.
mutable Buffer const * parent_buffer;
int char_count_;
int blank_count_;
+ /// has been externally modified? Can be reset by the user.
+ mutable bool externally_modified_;
+
+ FileMonitorPtr file_monitor_;
};
Buffer const * cloned_buffer)
: owner_(owner), lyx_clean(true), bak_clean(true), unnamed(false),
internal_buffer(false), read_only(readonly_), filename(file),
- file_fully_loaded(false), ignore_parent(false), toc_backend(owner),
- macro_lock(false), timestamp_(0), checksum_(0), wa_(0), gui_(0),
- undo_(*owner), bibinfo_cache_valid_(false), bibfile_cache_valid_(false),
- cite_labels_valid_(false), inset(0), preview_loader_(0),
- cloned_buffer_(cloned_buffer), clone_list_(0),
- doing_export(false), parent_buffer(0),
- word_count_(0), char_count_(0), blank_count_(0)
-{
+ file_fully_loaded(false), file_format(LYX_FORMAT), need_format_backup(false),
+ ignore_parent(false), toc_backend(owner), macro_lock(false),
+ checksum_(0), wa_(0), gui_(0), undo_(*owner), bibinfo_cache_valid_(false),
+ bibfile_cache_valid_(false), cite_labels_valid_(false), preview_error_(false),
+ inset(0), preview_loader_(0), cloned_buffer_(cloned_buffer),
+ clone_list_(0), doing_export(false),
+ tracked_changes_present_(0), parent_buffer(0),
+ word_count_(0), char_count_(0), blank_count_(0),
+ externally_modified_(false)
+{
+ refreshFileMonitor();
if (!cloned_buffer_) {
temppath = createBufferTmpDir();
lyxvc.setBuffer(owner_);
preview_file_ = cloned_buffer_->d->preview_file_;
preview_format_ = cloned_buffer_->d->preview_format_;
preview_error_ = cloned_buffer_->d->preview_error_;
+ tracked_changes_present_ = cloned_buffer_->d->tracked_changes_present_;
}
Impl::BufferPositionMap::iterator end = d->children_positions.end();
for (; it != end; ++it) {
Buffer * child = const_cast<Buffer *>(it->first);
- if (theBufferList().isLoaded(child))
- theBufferList().releaseChild(this, child);
+ if (theBufferList().isLoaded(child)) {
+ if (theBufferList().isOthersChild(this, child))
+ child->setParent(0);
+ else
+ theBufferList().release(child);
+ }
}
if (!isClean()) {
FileName const bname(
addName(path, onlyFileName(
changeExtension(filename,
- formats.extension(params().bufferFormat()) + ".out"))));
+ theFormats().extension(params().bufferFormat()) + ".out"))));
// Also consider the master buffer log file
FileName masterfname = fname;
{
bool const changed = fname != d->filename;
d->filename = fname;
+ d->refreshFileMonitor();
if (changed)
lyxvc().file_found_hook(fname);
setReadonly(d->filename.isReadOnly());
params().html_latex_end.clear();
params().html_math_img_scale = 1.0;
params().output_sync_macro.erase();
- params().setLocalLayout(string(), false);
- params().setLocalLayout(string(), true);
+ params().setLocalLayout(docstring(), false);
+ params().setLocalLayout(docstring(), true);
+ params().biblio_opts.erase();
+ params().biblatex_bibstyle.erase();
+ params().biblatex_citestyle.erase();
+ params().multibib.erase();
for (int i = 0; i < 4; ++i) {
params().user_defined_bullet(i) = ITEMIZE_DEFAULTS[i];
"%1$s %2$s\n"),
from_utf8(token),
lex.getDocString());
- errorList.push_back(ErrorItem(_("Document header error"),
- s, -1, 0, 0));
+ errorList.push_back(ErrorItem(_("Document header error"), s));
}
}
}
if (begin_header_line) {
docstring const s = _("\\begin_header is missing");
- errorList.push_back(ErrorItem(_("Document header error"),
- s, -1, 0, 0));
+ errorList.push_back(ErrorItem(_("Document header error"), s));
}
params().makeDocumentClass();
if (!lex.checkFor("\\begin_document")) {
docstring const s = _("\\begin_document is missing");
- errorList.push_back(ErrorItem(_("Document header error"),
- s, -1, 0, 0));
+ errorList.push_back(ErrorItem(_("Document header error"), s));
}
readHeader(lex);
params().indiceslist().addDefault(B_("Index"));
// read main text
- d->old_position = originFilePath();
+ if (FileName::isAbsolute(params().origin))
+ d->old_position = params().origin;
+ else
+ d->old_position = filePath();
bool const res = text().read(lex, errorList, d->inset);
d->old_position.clear();
bool Buffer::importString(string const & format, docstring const & contents, ErrorList & errorList)
{
- Format const * fmt = formats.getFormat(format);
+ Format const * fmt = theFormats().getFormat(format);
if (!fmt)
return false;
// It is important to use the correct extension here, since some
if (file_format != LYX_FORMAT) {
FileName tmpFile;
- ReadStatus const ret_clf = convertLyXFormat(fn, tmpFile, file_format);
+ ReadStatus ret_clf = convertLyXFormat(fn, tmpFile, file_format);
if (ret_clf != ReadSuccess)
return ret_clf;
- return readFile(tmpFile);
+ ret_clf = readFile(tmpFile);
+ if (ret_clf == ReadSuccess) {
+ d->file_format = file_format;
+ d->need_format_backup = true;
+ }
+ return ret_clf;
}
// FIXME: InsetInfo needs to know whether the file is under VCS
d->file_fully_loaded = true;
d->read_only = !d->filename.isWritable();
- params().compressed = formats.isZippedFile(d->filename);
+ params().compressed = theFormats().isZippedFile(d->filename);
saveCheckSum();
return ReadSuccess;
}
}
+FileName Buffer::getBackupName() const {
+ FileName const & fn = fileName();
+ string const fname = fn.onlyFileNameWithoutExt();
+ string const fext = fn.extension() + "~";
+ string const fpath = lyxrc.backupdir_path.empty() ?
+ fn.onlyPath().absFileName() :
+ lyxrc.backupdir_path;
+ string const fform = convert<string>(d->file_format);
+ string const backname = fname + "-lyxformat-" + fform;
+ FileName backup(addName(fpath, addExtension(backname, fext)));
+
+ // limit recursion, just in case
+ int v = 1;
+ unsigned long orig_checksum = 0;
+ while (backup.exists() && v < 100) {
+ if (orig_checksum == 0)
+ orig_checksum = fn.checksum();
+ unsigned long new_checksum = backup.checksum();
+ if (orig_checksum == new_checksum) {
+ LYXERR(Debug::FILES, "Not backing up " << fn <<
+ "since " << backup << "has the same checksum.");
+ // a bit of a hack, but we have to check this anyway
+ // below, and setting this is simpler than introducing
+ // a special boolean for this purpose.
+ v = 1000;
+ break;
+ }
+ string const newbackname = backname + "-" + convert<string>(v);
+ backup.set(addName(fpath, addExtension(newbackname, fext)));
+ v++;
+ }
+ return v < 100 ? backup : FileName();
+}
+
+
// Should probably be moved to somewhere else: BufferView? GuiView?
bool Buffer::save() const
{
+ FileMonitorBlocker block = d->blockFileMonitor();
docstring const file = makeDisplayPath(absFileName(), 20);
d->filename.refresh();
}
// ask if the disk file has been externally modified (use checksum method)
- if (fileName().exists() && isExternallyModified(checksum_method)) {
+ if (fileName().exists() && isChecksumModified()) {
docstring text =
bformat(_("Document %1$s has been externally modified. "
"Are you sure you want to overwrite this file?"), file);
// if the file does not yet exist, none of the backup activity
// that follows is necessary
- if (!fileName().exists()) {
+ if (!fileName().exists()) {
if (!writeFile(fileName()))
- return false;
- markClean();
- return true;
- }
+ return false;
+ markClean();
+ return true;
+ }
// we first write the file to a new name, then move it to its
// proper location once that has been done successfully. that
// way we preserve the original file if something goes wrong.
string const justname = fileName().onlyFileNameWithoutExt();
- boost::scoped_ptr<TempFile>
- tempfile(new TempFile(fileName().onlyPath(),
- justname + "-XXXXXX.lyx"));
+ auto tempfile = make_unique<TempFile>(fileName().onlyPath(),
+ justname + "-XXXXXX.lyx");
bool const symlink = fileName().isSymLink();
if (!symlink)
tempfile->setAutoRemove(false);
// we will set this to false if we fail
bool made_backup = true;
- FileName backupName(absFileName() + '~');
- if (lyxrc.make_backup) {
- if (!lyxrc.backupdir_path.empty()) {
- string const mangledName =
- subst(subst(backupName.absFileName(), '/', '!'), ':', '!');
- backupName = FileName(addName(lyxrc.backupdir_path,
- mangledName));
+ FileName backupName;
+ bool const needBackup = lyxrc.make_backup || d->need_format_backup;
+ if (needBackup) {
+ if (d->need_format_backup)
+ backupName = getBackupName();
+
+ // If we for some reason failed to find a backup name in case of
+ // a format change, this will still set one. It's the best we can
+ // do in this case.
+ if (backupName.empty()) {
+ backupName.set(fileName().absFileName() + "~");
+ if (!lyxrc.backupdir_path.empty()) {
+ string const mangledName =
+ subst(subst(backupName.absFileName(), '/', '!'), ':', '!');
+ backupName.set(addName(lyxrc.backupdir_path, mangledName));
+ }
}
LYXERR(Debug::FILES, "Backing up original file to " <<
"Please check whether the directory exists and is writable."),
from_utf8(backupName.absFileName())));
//LYXERR(Debug::DEBUG, "Fs error: " << fe.what());
+ } else if (d->need_format_backup) {
+ // the original file has been backed up successfully, so we
+ // will not need to do that again
+ d->need_format_backup = false;
}
}
// time stamp is invalidated by copying/moving
saveCheckSum();
markClean();
+ if (d->file_format != LYX_FORMAT)
+ // the file associated with this buffer is now in the current format
+ d->file_format = LYX_FORMAT;
return true;
}
// else we saved the file, but failed to move it to the right location.
- if (lyxrc.make_backup && made_backup && !symlink) {
- // the original file was moved to filename.lyx~, so it will look
+ if (needBackup && made_backup && !symlink) {
+ // the original file was moved to some new location, so it will look
// to the user as if it was deleted. (see bug #9234.) we could try
// to restore it, but that would basically mean trying to do again
// what we just failed to do. better to leave things as they are.
{
OutputParams runparams = runparams_in;
- // XeTeX with TeX fonts is only safe with ASCII encoding,
- // See #9740 and FIXME in BufferParams::encoding()
- if (!params().useNonTeXFonts && (runparams.flavor == OutputParams::XETEX))
+ // XeTeX with TeX fonts is only safe with ASCII encoding (see also #9740),
+ // Check here, because the "flavor" is not known in BufferParams::encoding()
+ // (power users can override this safety measure selecting "utf8-plain").
+ if (!params().useNonTeXFonts && (runparams.flavor == OutputParams::XETEX)
+ && (runparams.encoding->name() != "utf8-plain"))
runparams.encoding = encodings.fromLyXName("ascii");
string const encoding = runparams.encoding->iconvName();
ErrorList & errorList = d->errorLists["Export"];
errorList.clear();
bool failed_export = false;
- otexstream os(ofs, d->texrow);
+ otexstream os(ofs);
// make sure we are ready to export
// this needs to be done before we validate
updateMacroInstances(OutputUpdate);
try {
- os.texrow().reset();
writeLaTeXSource(os, original_path, runparams, output);
}
catch (EncodingException const & e) {
errorList.push_back(ErrorItem(msg, _("Some characters of your document are probably not "
"representable in the chosen encoding.\n"
"Changing the document encoding to utf8 could help."),
- e.par_id, e.pos, e.pos + 1));
+ {e.par_id, e.pos}, {e.par_id, e.pos + 1}));
failed_export = true;
}
catch (iconv_codecvt_facet_exception const & e) {
errorList.push_back(ErrorItem(_("iconv conversion failed"),
- _(e.what()), -1, 0, 0));
+ _(e.what())));
failed_export = true;
}
catch (exception const & e) {
errorList.push_back(ErrorItem(_("conversion failed"),
- _(e.what()), -1, 0, 0));
+ _(e.what())));
+ lyxerr << e.what() << endl;
failed_export = true;
}
catch (...) {
lyx_exit(1);
}
+ d->texrow = move(os.texrow());
+
ofs.close();
if (ofs.fail()) {
failed_export = true;
OutputParams runparams = runparams_in;
// XeTeX with TeX fonts is only safe with ASCII encoding,
- // See #9740 and FIXME in BufferParams::encoding()
- // FIXME: when only the current paragraph is shown, this seems to be ignored:
- // characters encodable in the current encoding are not converted to ASCII-representation.
- if (!params().useNonTeXFonts && (runparams.flavor == OutputParams::XETEX))
+ // Check here, because the "flavor" is not known in BufferParams::encoding()
+ // (power users can override this safety measure selecting "utf8-plain").
+ if (!params().useNonTeXFonts && (runparams.flavor == OutputParams::XETEX)
+ && (runparams.encoding->name() != "utf8-plain"))
runparams.encoding = encodings.fromLyXName("ascii");
+ // FIXME: when only the current paragraph is shown, this is ignored
+ // (or not reached) and characters encodable in the current
+ // encoding are not converted to ASCII-representation.
// If we are compiling a file standalone, even if this is the
// child of some other buffer, let's cut the link here, so the
runparams.use_babel = params().writeLaTeX(os, features,
d->filename.onlyPath());
+ // Biblatex bibliographies are loaded here
+ if (params().useBiblatex()) {
+ vector<docstring> const bibfiles =
+ prepareBibFilePaths(runparams, getBibfilesCache(), true);
+ for (docstring const & file: bibfiles)
+ os << "\\addbibresource{" << file << "}\n";
+ }
+
+ if (!runparams.dryrun && features.hasPolyglossiaExclusiveLanguages()
+ && !features.hasOnlyPolyglossiaLanguages()) {
+ docstring blangs;
+ docstring plangs;
+ vector<string> bll = features.getBabelExclusiveLanguages();
+ vector<string> pll = features.getPolyglossiaExclusiveLanguages();
+ if (!bll.empty()) {
+ docstring langs;
+ for (vector<string>::const_iterator it = bll.begin(); it != bll.end(); ++it) {
+ if (!langs.empty())
+ langs += ", ";
+ langs += _(*it);
+ }
+ blangs = bll.size() > 1 ?
+ support::bformat(_("The languages %1$s are only supported by Babel."), langs)
+ : support::bformat(_("The language %1$s is only supported by Babel."), langs);
+ }
+ if (!pll.empty()) {
+ docstring langs;
+ for (vector<string>::const_iterator it = pll.begin(); it != pll.end(); ++it) {
+ if (!langs.empty())
+ langs += ", ";
+ langs += _(*it);
+ }
+ plangs = pll.size() > 1 ?
+ support::bformat(_("The languages %1$s are only supported by Polyglossia."), langs)
+ : support::bformat(_("The language %1$s is only supported by Polyglossia."), langs);
+ if (!blangs.empty())
+ plangs += "\n";
+ }
+
+ frontend::Alert::warning(
+ _("Incompatible Languages!"),
+ support::bformat(
+ _("You cannot use the following languages "
+ "together in one LaTeX document because "
+ "they require conflicting language packages:\n"
+ "%1$s%2$s"),
+ plangs, blangs));
+ }
+
// Japanese might be required only in some children of a document,
// but once required, we must keep use_japanese true.
runparams.use_japanese |= features.isRequired("japanese");
}
// make the body.
+ // mark the beginning of the body to separate it from InPreamble insets
+ os.texrow().start(TexRow::beginDocument());
os << "\\begin{document}\n";
+ // mark the start of a new paragraph by simulating a newline,
+ // so that os.afterParbreak() returns true at document start
+ os.lastChar('\n');
+
// output the parent macros
MacroSet::iterator it = parentMacros.begin();
MacroSet::iterator end = parentMacros.end();
} // output_preamble
- os.texrow().start(paragraphs().begin()->id(), 0);
-
LYXERR(Debug::INFO, "preamble finished, now the body.");
// the real stuff
}
runparams_in.encoding = runparams.encoding;
- os.texrow().finalize();
-
LYXERR(Debug::INFO, "Finished making LaTeX file.");
LYXERR(Debug::INFO, "Row count was " << os.texrow().rows() - 1 << '.');
}
LaTeXFeatures features(*this, params(), runparams);
validate(features);
- d->texrow.reset(false);
+ d->texrow.reset();
DocumentClass const & tclass = params().documentClass();
string const & top_element = tclass.latexname();
if (! tclass.class_header().empty())
os << from_ascii(tclass.class_header());
else if (runparams.flavor == OutputParams::XML)
- os << "PUBLIC \"-//OASIS//DTD DocBook XML//EN\" "
+ os << "PUBLIC \"-//OASIS//DTD DocBook XML V4.2//EN\" "
<< "\"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd\"";
else
os << " PUBLIC \"-//OASIS//DTD DocBook V4.2//EN\"";
- docstring preamble = from_utf8(params().preamble);
+ docstring preamble = params().preamble;
if (runparams.flavor != OutputParams::XML ) {
preamble += "<!ENTITY % output.print.png \"IGNORE\">\n";
preamble += "<!ENTITY % output.print.pdf \"IGNORE\">\n";
if (!styles.empty())
os << "\n<!-- Text Class Preamble -->\n" << styles << '\n';
- styles = from_utf8(features.getPreambleSnippets());
+ styles = features.getPreambleSnippets().str;
if (!styles.empty())
os << "\n<!-- Preamble Snippets -->\n" << styles << '\n';
// we will collect CSS information in a stream, and then output it
// either here, as part of the header, or else in a separate file.
odocstringstream css;
- styles = from_utf8(features.getCSSSnippets());
+ styles = features.getCSSSnippets();
if (!styles.empty())
css << "/* LyX Provided Styles */\n" << styles << '\n';
if (output_body) {
bool const output_body_tag = (output != IncludedFile);
if (output_body_tag)
- os << "<body>\n";
+ os << "<body dir=\"auto\">\n";
XHTMLStream xs(os);
if (output != IncludedFile)
// if we're an included file, the counters are in the master.
if (!features.runparams().is_child)
params().validate(features);
- for_each(paragraphs().begin(), paragraphs().end(),
- bind(&Paragraph::validate, _1, ref(features)));
+ for (Paragraph const & p : paragraphs())
+ p.validate(features);
if (lyxerr.debugging(Debug::LATEX)) {
features.showStruct();
list.clear();
shared_ptr<Toc> toc = d->toc_backend.toc("label");
- TocIterator toc_it = toc->begin();
- TocIterator end = toc->end();
+ Toc::const_iterator toc_it = toc->begin();
+ Toc::const_iterator end = toc->end();
for (; toc_it != end; ++toc_it) {
if (toc_it->depth() == 0)
list.push_back(toc_it->str());
for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
if (it->lyxCode() == BIBTEX_CODE) {
InsetBibtex const & inset = static_cast<InsetBibtex const &>(*it);
- support::FileNameList const bibfiles = inset.getBibFiles();
+ support::FileNamePairList const bibfiles = inset.getBibFiles();
d->bibfiles_cache_.insert(d->bibfiles_cache_.end(),
bibfiles.begin(),
bibfiles.end());
Buffer const * const incbuf = inset.getChildBuffer();
if (!incbuf)
continue;
- support::FileNameList const & bibfiles =
+ support::FileNamePairList const & bibfiles =
incbuf->getBibfilesCache(UpdateChildOnly);
if (!bibfiles.empty()) {
d->bibfiles_cache_.insert(d->bibfiles_cache_.end(),
}
-support::FileNameList const & Buffer::getBibfilesCache(UpdateScope scope) const
+support::FileNamePairList const & Buffer::getBibfilesCache(UpdateScope scope) const
{
// FIXME This is probably unnecessary, given where we call this.
// If this is a child document, use the master's cache instead.
}
// compare the cached timestamps with the actual ones.
- FileNameList const & bibfiles_cache = getBibfilesCache();
- FileNameList::const_iterator ei = bibfiles_cache.begin();
- FileNameList::const_iterator en = bibfiles_cache.end();
+ FileNamePairList const & bibfiles_cache = getBibfilesCache();
+ FileNamePairList::const_iterator ei = bibfiles_cache.begin();
+ FileNamePairList::const_iterator en = bibfiles_cache.end();
for (; ei != en; ++ ei) {
- time_t lastw = ei->lastModified();
- time_t prevw = d->bibfile_status_[*ei];
+ FileName const fn = ei->second;
+ time_t lastw = fn.lastModified();
+ time_t prevw = d->bibfile_status_[fn];
if (lastw != prevw) {
d->bibinfo_cache_valid_ = false;
d->cite_labels_valid_ = false;
- d->bibfile_status_[*ei] = lastw;
+ d->bibfile_status_[fn] = lastw;
}
}
}
switch (cmd.action()) {
case LFUN_BUFFER_TOGGLE_READ_ONLY:
- flag.setOnOff(isReadonly());
+ flag.setOnOff(hasReadonlyFlag());
break;
// FIXME: There is need for a command-line import.
enable = true;
break;
}
- string format = to_utf8(arg);
+ string format = (arg.empty() || arg == "default") ?
+ params().getDefaultOutputFormat() : to_utf8(arg);
size_t pos = format.find(' ');
if (pos != string::npos)
format = format.substr(0, pos);
- enable = params().isExportable(format);
+ enable = params().isExportable(format, false);
if (!enable)
flag.message(bformat(
_("Don't know how to export to format: %1$s"), arg));
break;
case LFUN_BUILD_PROGRAM:
- enable = params().isExportable("program");
+ enable = params().isExportable("program", false);
break;
case LFUN_BRANCH_ACTIVATE:
dr.setMessage(log);
}
else
- setReadonly(!isReadonly());
+ setReadonly(!hasReadonlyFlag());
break;
case LFUN_BUFFER_EXPORT: {
- ExportStatus const status = doExport(argument, false);
+ string const format = (argument.empty() || argument == "default") ?
+ params().getDefaultOutputFormat() : argument;
+ ExportStatus const status = doExport(format, false);
dr.setError(status != ExportSuccess);
if (status != ExportSuccess)
dr.setMessage(bformat(_("Error exporting to format: %1$s."),
- func.argument()));
+ from_utf8(format)));
break;
}
case LFUN_BUFFER_EXPORT_CUSTOM: {
string format_name;
string command = split(argument, format_name, ' ');
- Format const * format = formats.getFormat(format_name);
+ Format const * format = theFormats().getFormat(format_name);
if (!format) {
lyxerr << "Format \"" << format_name
<< "\" not recognized!"
}
case LFUN_BUFFER_VIEW_CACHE:
- if (!formats.view(*this, d->preview_file_,
+ if (!theFormats().view(*this, d->preview_file_,
d->preview_format_))
dr.setMessage(_("Error viewing the output file."));
break;
case LFUN_CHANGES_TRACK:
- undo().recordUndoBufferParams(CursorData());
+ if (params().save_transient_properties)
+ undo().recordUndoBufferParams(CursorData());
params().track_changes = !params().track_changes;
+ if (!params().track_changes)
+ dr.forceChangesUpdate();
break;
case LFUN_CHANGES_OUTPUT:
- undo().recordUndoBufferParams(CursorData());
+ if (params().save_transient_properties)
+ undo().recordUndoBufferParams(CursorData());
params().output_changes = !params().output_changes;
if (params().output_changes) {
bool dvipost = LaTeXFeatures::isAvailable("dvipost");
DocIterator Buffer::getParFromID(int const id) const
{
Buffer * buf = const_cast<Buffer *>(this);
- if (id < 0) {
- // John says this is called with id == -1 from undo
- lyxerr << "getParFromID(), id: " << id << endl;
+ if (id < 0)
+ // This means non-existent
return doc_iterator_end(buf);
- }
for (DocIterator it = doc_iterator_begin(buf); !it.atEnd(); it.forwardPar())
if (it.paragraph().id() == id)
}
-bool Buffer::isExternallyModified(CheckMethod method) const
+bool Buffer::isChecksumModified() const
{
LASSERT(d->filename.exists(), return false);
- // if method == timestamp, check timestamp before checksum
- return (method == checksum_method
- || d->timestamp_ != d->filename.lastModified())
- && d->checksum_ != d->filename.checksum();
+ return d->checksum_ != d->filename.checksum();
}
void Buffer::saveCheckSum() const
{
FileName const & file = d->filename;
-
file.refresh();
- if (file.exists()) {
- d->timestamp_ = file.lastModified();
- d->checksum_ = file.checksum();
- } else {
- // in the case of save to a new file.
- d->timestamp_ = 0;
- d->checksum_ = 0;
- }
+ d->checksum_ = file.exists() ? file.checksum()
+ : 0; // in the case of save to a new file.
}
// autosave
d->bak_clean = true;
d->undo_.markDirty();
+ clearExternalModification();
}
}
-string Buffer::originFilePath() const
+DocFileName Buffer::getReferencedFileName(string const & fn) const
{
- if (FileName::isAbsolute(params().origin))
- return params().origin;
+ DocFileName result;
+ if (FileName::isAbsolute(fn) || !FileName::isAbsolute(params().origin))
+ result.set(fn, filePath());
+ else {
+ // filePath() ends with a path separator
+ FileName const test(filePath() + fn);
+ if (test.exists())
+ result.set(fn, filePath());
+ else
+ result.set(fn, params().origin);
+ }
+
+ return result;
+}
+
+
+string const Buffer::prepareFileNameForLaTeX(string const & name,
+ string const & ext, bool nice) const
+{
+ string const fname = makeAbsPath(name, filePath()).absFileName();
+ if (FileName::isAbsolute(name) || !FileName(fname + ext).isReadableFile())
+ return name;
+ if (!nice)
+ return fname;
+
+ // FIXME UNICODE
+ return to_utf8(makeRelPath(from_utf8(fname),
+ from_utf8(masterBuffer()->filePath())));
+}
+
+
+vector<docstring> const Buffer::prepareBibFilePaths(OutputParams const & runparams,
+ FileNamePairList const bibfilelist,
+ bool const add_extension) const
+{
+ // If we are processing the LaTeX file in a temp directory then
+ // copy the .bib databases to this temp directory, mangling their
+ // names in the process. Store this mangled name in the list of
+ // all databases.
+ // (We need to do all this because BibTeX *really*, *really*
+ // can't handle "files with spaces" and Windows users tend to
+ // use such filenames.)
+ // Otherwise, store the (maybe absolute) path to the original,
+ // unmangled database name.
+
+ vector<docstring> res;
+
+ // determine the export format
+ string const tex_format = flavor2format(runparams.flavor);
+
+ // check for spaces in paths
+ bool found_space = false;
+
+ FileNamePairList::const_iterator it = bibfilelist.begin();
+ FileNamePairList::const_iterator en = bibfilelist.end();
+ for (; it != en; ++it) {
+ string utf8input = to_utf8(it->first);
+ string database =
+ prepareFileNameForLaTeX(utf8input, ".bib", runparams.nice);
+ FileName const try_in_file =
+ makeAbsPath(database + ".bib", filePath());
+ bool const not_from_texmf = try_in_file.isReadableFile();
+
+ if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
+ not_from_texmf) {
+ // mangledFileName() needs the extension
+ DocFileName const in_file = DocFileName(try_in_file);
+ database = removeExtension(in_file.mangledFileName());
+ FileName const out_file = makeAbsPath(database + ".bib",
+ masterBuffer()->temppath());
+ bool const success = in_file.copyTo(out_file);
+ if (!success) {
+ LYXERR0("Failed to copy '" << in_file
+ << "' to '" << out_file << "'");
+ }
+ } else if (!runparams.inComment && runparams.nice && not_from_texmf) {
+ runparams.exportdata->addExternalFile(tex_format, try_in_file, database + ".bib");
+ if (!isValidLaTeXFileName(database)) {
+ frontend::Alert::warning(_("Invalid filename"),
+ _("The following filename will cause troubles "
+ "when running the exported file through LaTeX: ") +
+ from_utf8(database));
+ }
+ if (!isValidDVIFileName(database)) {
+ frontend::Alert::warning(_("Problematic filename for DVI"),
+ _("The following filename can cause troubles "
+ "when running the exported file through LaTeX "
+ "and opening the resulting DVI: ") +
+ from_utf8(database), true);
+ }
+ }
+
+ if (add_extension)
+ database += ".bib";
- return filePath();
+ // FIXME UNICODE
+ docstring const path = from_utf8(latex_path(database));
+
+ if (contains(path, ' '))
+ found_space = true;
+
+ if (find(res.begin(), res.end(), path) == res.end())
+ res.push_back(path);
+ }
+
+ // Check if there are spaces in the path and warn BibTeX users, if so.
+ // (biber can cope with such paths)
+ if (!prefixIs(runparams.bibtex_command, "biber")) {
+ // Post this warning only once.
+ static bool warned_about_spaces = false;
+ if (!warned_about_spaces &&
+ runparams.nice && found_space) {
+ warned_about_spaces = true;
+ Alert::warning(_("Export Warning!"),
+ _("There are spaces in the paths to your BibTeX databases.\n"
+ "BibTeX will be unable to find them."));
+ }
+ }
+
+ return res;
}
+
string Buffer::layoutPos() const
{
return d->layout_position;
}
-bool Buffer::isReadonly() const
+bool Buffer::hasReadonlyFlag() const
{
return d->read_only;
}
+bool Buffer::isReadonly() const
+{
+ return hasReadonlyFlag() || notifiesExternalModification();
+}
+
+
void Buffer::setParent(Buffer const * buffer)
{
// Avoids recursive include.
MacroContext mc = MacroContext(this, it);
for (DocIterator::idx_type i = 0; i < n; ++i) {
MathData & data = minset->cell(i);
- data.updateMacros(0, mc, utype);
+ data.updateMacros(0, mc, utype, 0);
}
}
}
}
// returns NULL if id-to-row conversion is unsupported
-auto_ptr<TexRow> Buffer::getSourceCode(odocstream & os, string const & format,
- pit_type par_begin, pit_type par_end,
- OutputWhat output, bool master) const
+unique_ptr<TexRow> Buffer::getSourceCode(odocstream & os, string const & format,
+ pit_type par_begin, pit_type par_end,
+ OutputWhat output, bool master) const
{
- auto_ptr<TexRow> texrow(NULL);
+ unique_ptr<TexRow> texrow;
OutputParams runparams(¶ms().encoding());
runparams.nice = true;
runparams.flavor = params().getOutputFlavor(format);
// in order to know if we should output polyglossia
// macros (instead of babel macros)
LaTeXFeatures features(*this, params(), runparams);
- params().validate(features);
+ validate(features);
runparams.use_polyglossia = features.usePolyglossia();
- texrow.reset(new TexRow());
- texrow->newline();
- texrow->newline();
// latex or literate
- otexstream ots(os, *texrow);
-
+ otexstream ots(os);
+ // output above
+ ots.texrow().newlines(2);
// the real stuff
latexParagraphs(*this, text(), ots, runparams);
- texrow->finalize();
+ texrow = ots.releaseTexRow();
// Restore the parenthood
if (!master)
writeDocBookSource(os, absFileName(), runparams, output);
} else {
// latex or literate
- texrow.reset(new TexRow());
- texrow->newline();
- texrow->newline();
- otexstream ots(os, *texrow);
+ otexstream ots(os);
+ // output above
+ ots.texrow().newlines(2);
if (master)
runparams.is_child = true;
writeLaTeXSource(ots, string(), runparams, output);
- texrow->finalize();
+ texrow = ots.releaseTexRow();
}
}
return texrow;
///
virtual shared_ptr<ForkedProcess> clone() const
{
- return shared_ptr<ForkedProcess>(new AutoSaveBuffer(*this));
+ return make_shared<AutoSaveBuffer>(*this);
}
///
int start()
bool Buffer::autoSave() const
{
Buffer const * buf = d->cloned_buffer_ ? d->cloned_buffer_ : this;
- if (buf->d->bak_clean || isReadonly())
+ if (buf->d->bak_clean || hasReadonlyFlag())
return true;
message(_("Autosaving current document..."));
if (pos != string::npos) {
dest_filename = target.substr(pos + 1, target.length() - pos - 1);
format = target.substr(0, pos);
+ if (format == "default")
+ format = params().getDefaultOutputFormat();
runparams.export_folder = FileName(dest_filename).onlyPath().realPath();
FileName(dest_filename).onlyPath().createPath();
LYXERR(Debug::FILES, "format=" << format << ", dest_filename=" << dest_filename << ", export_folder=" << runparams.export_folder);
// file (not for previewing).
Alert::error(_("Couldn't export file"), bformat(
_("No information for exporting the format %1$s."),
- formats.prettyName(format)));
+ theFormats().prettyName(format)));
}
return ExportNoPathToFormat;
}
string filename = latexName(false);
filename = addName(temppath(), filename);
filename = changeExtension(filename,
- formats.extension(backend_format));
+ theFormats().extension(backend_format));
LYXERR(Debug::FILES, "filename=" << filename);
// Plain text backend
string const error_type = (format == "program")
? "Build" : params().bufferFormat();
ErrorList & error_list = d->errorLists[error_type];
- string const ext = formats.extension(format);
+ string const ext = theFormats().extension(format);
FileName const tmp_result_file(changeExtension(filename, ext));
bool const success = converters.convert(this, FileName(filename),
tmp_result_file, FileName(absFileName()), backend_format, format,
vector<ExportedFile>::const_iterator it = files.begin();
vector<ExportedFile>::const_iterator const en = files.end();
for (; it != en && status != CANCEL; ++it) {
- string const fmt = formats.getFormatFromFile(it->sourceName);
+ string const fmt = theFormats().getFormatFromFile(it->sourceName);
string fixedName = it->exportName;
if (!runparams.export_folder.empty()) {
// Relative pathnames starting with ../ will be sanitized
} else {
message(bformat(_("Document exported as %1$s "
"to file `%2$s'"),
- formats.prettyName(format),
+ theFormats().prettyName(format),
makeDisplayPath(result_file)));
}
} else {
// This must be a dummy converter like fax (bug 1888)
message(bformat(_("Document exported as %1$s"),
- formats.prettyName(format)));
+ theFormats().prettyName(format)));
}
return success ? ExportSuccess : ExportConverterError;
ExportStatus const status = doExport(format, true, false, result_file);
FileName const previewFile(result_file);
- LATTEST (isClone());
- d->cloned_buffer_->d->preview_file_ = previewFile;
- d->cloned_buffer_->d->preview_format_ = format;
- d->cloned_buffer_->d->preview_error_ = (status != ExportSuccess);
+ Impl * theimpl = isClone() ? d->cloned_buffer_->d : d;
+ theimpl->preview_file_ = previewFile;
+ theimpl->preview_format_ = format;
+ theimpl->preview_error_ = (status != ExportSuccess);
if (status != ExportSuccess)
return status;
- if (previewFile.exists()) {
- if (!formats.view(*this, previewFile, format))
- return PreviewError;
- else
- return PreviewSuccess;
- }
- else {
- // Successful export but no output file?
- // Probably a bug in error detection.
- LATTEST (status != ExportSuccess);
- return status;
- }
+ if (previewFile.exists())
+ return theFormats().view(*this, previewFile, format) ?
+ PreviewSuccess : PreviewError;
+
+ // Successful export but no output file?
+ // Probably a bug in error detection.
+ LATTEST(status != ExportSuccess);
+ return status;
}
ReadStatus const ret_llf = loadThisLyXFile(emergencyFile);
bool const success = (ret_llf == ReadSuccess);
if (success) {
- if (isReadonly()) {
+ if (hasReadonlyFlag()) {
Alert::warning(_("File is read-only"),
bformat(_("An emergency file is successfully loaded, "
"but the original file %1$s is marked read-only. "
ReadStatus const ret_llf = loadThisLyXFile(autosaveFile);
// the file is not saved if we load the autosave file.
if (ret_llf == ReadSuccess) {
- if (isReadonly()) {
+ if (hasReadonlyFlag()) {
Alert::warning(_("File is read-only"),
bformat(_("A backup file is successfully loaded, "
"but the original file %1$s is marked read-only. "
void Buffer::bufferErrors(TeXErrors const & terr, ErrorList & errorList) const
{
- TeXErrors::Errors::const_iterator it = terr.begin();
- TeXErrors::Errors::const_iterator end = terr.end();
- ListOfBuffers clist = getDescendents();
- ListOfBuffers::const_iterator cen = clist.end();
-
- for (; it != end; ++it) {
- int id_start = -1;
- int pos_start = -1;
- int errorRow = it->error_in_line;
+ for (auto const & err : terr) {
+ TexRow::TextEntry start = TexRow::text_none, end = TexRow::text_none;
+ int errorRow = err.error_in_line;
Buffer const * buf = 0;
Impl const * p = d;
- if (it->child_name.empty())
- p->texrow.getIdFromRow(errorRow, id_start, pos_start);
+ if (err.child_name.empty())
+ tie(start, end) = p->texrow.getEntriesFromRow(errorRow);
else {
// The error occurred in a child
- ListOfBuffers::const_iterator cit = clist.begin();
- for (; cit != cen; ++cit) {
+ for (Buffer const * child : getDescendents()) {
string const child_name =
- DocFileName(changeExtension(
- (*cit)->absFileName(), "tex")).
- mangledFileName();
- if (it->child_name != child_name)
+ DocFileName(changeExtension(child->absFileName(), "tex")).
+ mangledFileName();
+ if (err.child_name != child_name)
continue;
- (*cit)->d->texrow.getIdFromRow(errorRow,
- id_start, pos_start);
- if (id_start != -1) {
+ tie(start, end) = child->d->texrow.getEntriesFromRow(errorRow);
+ if (!TexRow::isNone(start)) {
buf = d->cloned_buffer_
- ? (*cit)->d->cloned_buffer_->d->owner_
- : (*cit)->d->owner_;
- p = (*cit)->d;
+ ? child->d->cloned_buffer_->d->owner_
+ : child->d->owner_;
+ p = child->d;
break;
}
}
}
- int id_end = -1;
- int pos_end = -1;
- bool found;
- do {
- ++errorRow;
- found = p->texrow.getIdFromRow(errorRow, id_end, pos_end);
- } while (found && id_start == id_end && pos_start == pos_end);
-
- if (id_start != id_end) {
- // Next registered position is outside the inset where
- // the error occurred, so signal end-of-paragraph
- pos_end = 0;
- }
-
- errorList.push_back(ErrorItem(it->error_desc,
- it->error_text, id_start, pos_start, pos_end, buf));
+ errorList.push_back(ErrorItem(err.error_desc, err.error_text,
+ start, end, buf));
}
}
// update all caches
clearReferenceCache();
updateMacros();
+ setChangesPresent(false);
Buffer & cbuf = const_cast<Buffer &>(*this);
ParIterator parit = cbuf.par_iterator_begin();
updateBuffer(parit, utype);
+ /// FIXME: Perf
+ /// Update the tocBackend for any buffer. The outliner uses the master's,
+ /// and the navigation menu uses the child's.
+ cbuf.tocBackend().update(true, utype);
+
if (master != this)
- // TocBackend update will be done later.
return;
d->bibinfo_cache_valid_ = true;
d->cite_labels_valid_ = true;
- /// FIXME: Perf
- cbuf.tocBackend().update(true, utype);
if (scope == UpdateMaster)
cbuf.structureChanged();
}
if (!it[i].inset().inMathed())
depth += it[i].paragraph().getDepth() + 1;
// remove 1 since the outer inset does not count
+ // we should have at least one non-math inset, so
+ // depth should nevery be 0. but maybe it is worth
+ // marking this, just in case.
+ LATTEST(depth > 0);
+ // coverity[INTEGER_OVERFLOW]
return depth - 1;
}
--prev_it.top().pit();
Paragraph const & prev_par = *prev_it;
if (prev_par.getDepth() <= cur_depth)
- return prev_par.layout().labeltype != LABEL_ENUMERATE;
+ return prev_par.layout().name() != par.layout().name();
}
// start of nested inset: reset
return true;
break;
}
+ // Increase the master counter?
+ if (layout.stepmastercounter && needEnumCounterReset(it))
+ counters.stepMaster(enumcounter, utype);
+
// Maybe we have to reset the enumeration counter.
- if (needEnumCounterReset(it))
+ if (!layout.resumecounter && needEnumCounterReset(it))
counters.reset(enumcounter);
counters.step(enumcounter, utype);
// set the counter for this paragraph
d->setLabel(parit, utype);
+ // update change-tracking flag
+ parit->addChangesToBuffer(*this);
+
// now the insets
InsetList::const_iterator iit = parit->insetList().begin();
InsetList::const_iterator end = parit->insetList().end();
string Buffer::includedFilePath(string const & name, string const & ext) const
{
+ if (d->old_position.empty() ||
+ equivalent(FileName(d->old_position), FileName(filePath())))
+ return name;
+
bool isabsolute = FileName::isAbsolute(name);
- // old_position already contains a trailing path separator
- string const absname = isabsolute ? name : d->old_position + name;
+ // both old_position and filePath() end with a path separator
+ string absname = isabsolute ? name : d->old_position + name;
- if (d->old_position.empty()
- || equivalent(FileName(d->old_position), FileName(filePath()))
- || !FileName(addExtension(absname, ext)).exists())
+ // if old_position is set to origin, we need to do the equivalent of
+ // getReferencedFileName() (see readDocument())
+ if (!isabsolute && d->old_position == params().origin) {
+ FileName const test(addExtension(filePath() + name, ext));
+ if (test.exists())
+ absname = filePath() + name;
+ }
+
+ if (!FileName(addExtension(absname, ext)).exists())
return name;
if (isabsolute)
from_utf8(filePath())));
}
+
+void Buffer::setChangesPresent(bool b) const
+{
+ d->tracked_changes_present_ = b;
+}
+
+
+bool Buffer::areChangesPresent() const
+{
+ return d->tracked_changes_present_;
+}
+
+
+void Buffer::updateChangesPresent() const
+{
+ LYXERR(Debug::CHANGES, "Buffer::updateChangesPresent");
+ setChangesPresent(false);
+ ParConstIterator it = par_iterator_begin();
+ ParConstIterator const end = par_iterator_end();
+ for (; !areChangesPresent() && it != end; ++it)
+ it->addChangesToBuffer(*this);
+}
+
+
+void Buffer::Impl::refreshFileMonitor()
+{
+ if (file_monitor_ && file_monitor_->filename() == filename.absFileName())
+ return file_monitor_->refresh();
+
+ // The previous file monitor is invalid
+ // This also destroys the previous file monitor and all its connections
+ file_monitor_ = FileSystemWatcher::monitor(filename);
+ fileExternallyModified(false);
+ // file_monitor_ will be destroyed with *this, so it is not going to call a
+ // destroyed object method.
+ file_monitor_->connect([this](){ fileExternallyModified(true); });
+}
+
+
+void Buffer::Impl::fileExternallyModified(bool modified) const
+{
+ if (modified)
+ lyx_clean = bak_clean = false;
+ externally_modified_ = modified;
+ if (wa_)
+ wa_->updateTitles();
+}
+
+
+bool Buffer::notifiesExternalModification() const
+{
+ return d->isExternallyModified();
+}
+
+
+void Buffer::clearExternalModification() const
+{
+ d->fileExternallyModified(false);
+}
+
+
} // namespace lyx