* This file is part of LyX, the document processor.
* Licence details can be found in the file COPYING.
*
- * \author Lars Gullik Bjønnes
+ * \author Lars Gullik Bjønnes
* \author Stefan Schimanski
*
* Full author contact details are available in file CREDITS.
#include "Buffer.h"
#include "Author.h"
+#include "LayoutFile.h"
#include "BiblioInfo.h"
#include "BranchList.h"
#include "buffer_funcs.h"
#include "Converter.h"
#include "Counters.h"
#include "DocIterator.h"
-#include "EmbeddedFiles.h"
#include "Encoding.h"
#include "ErrorList.h"
#include "Exporter.h"
#include "ParagraphParameters.h"
#include "ParIterator.h"
#include "PDFOptions.h"
-#include "Session.h"
#include "sgml.h"
#include "TexRow.h"
#include "TexStream.h"
-#include "TextClassList.h"
#include "Text.h"
+#include "TextClass.h"
#include "TocBackend.h"
#include "Undo.h"
#include "VCBackend.h"
#include "version.h"
+#include "WordList.h"
#include "insets/InsetBibitem.h"
#include "insets/InsetBibtex.h"
#include "graphics/Previews.h"
+#include "support/lassert.h"
#include "support/convert.h"
#include "support/debug.h"
-#include "support/FileFilterList.h"
+#include "support/ExceptionMessage.h"
+#include "support/FileName.h"
#include "support/FileNameList.h"
#include "support/filetools.h"
#include "support/ForkedCalls.h"
#include "support/gzstream.h"
#include "support/lstrings.h"
#include "support/lyxalgo.h"
-#include "support/lyxlib.h"
#include "support/os.h"
+#include "support/Package.h"
#include "support/Path.h"
#include "support/textutils.h"
#include "support/types.h"
-#include "support/FileZipListDir.h"
-
-#if !defined (HAVE_FORK)
-# define fork() -1
-#endif
#include <boost/bind.hpp>
#include <boost/shared_ptr.hpp>
#include <algorithm>
+#include <fstream>
#include <iomanip>
-#include <stack>
+#include <map>
+#include <set>
#include <sstream>
-#include <fstream>
+#include <stack>
+#include <vector>
-using std::endl;
-using std::for_each;
-using std::make_pair;
-
-using std::ios;
-using std::map;
-using std::ostream;
-using std::ostringstream;
-using std::ofstream;
-using std::ifstream;
-using std::pair;
-using std::stack;
-using std::vector;
-using std::string;
-using std::time_t;
+using namespace std;
+using namespace lyx::support;
namespace lyx {
-using support::addName;
-using support::bformat;
-using support::changeExtension;
-using support::cmd_ret;
-using support::createBufferTmpDir;
-using support::FileName;
-using support::FileNameList;
-using support::libFileSearch;
-using support::latex_path;
-using support::ltrim;
-using support::makeAbsPath;
-using support::makeDisplayPath;
-using support::makeLatexName;
-using support::onlyFilename;
-using support::onlyPath;
-using support::quoteName;
-using support::removeAutosaveFile;
-using support::rename;
-using support::runCommand;
-using support::split;
-using support::subst;
-using support::tempName;
-using support::trim;
-using support::suffixIs;
-
namespace Alert = frontend::Alert;
namespace os = support::os;
namespace {
-int const LYX_FORMAT = 307; // JSpitzm: support for \slash
+// Do not remove the comment below, so we get merge conflict in
+// independent branches. Instead add your own.
+int const LYX_FORMAT = 348; // uwestoehr: add support for \*phantom
-} // namespace anon
+typedef map<string, bool> DepClean;
+typedef map<docstring, pair<InsetLabel const *, Buffer::References> > RefCache;
+} // namespace anon
-typedef std::map<string, bool> DepClean;
+class BufferSet : public std::set<Buffer const *> {};
class Buffer::Impl
{
wa_->closeAll();
delete wa_;
}
+ delete inset;
}
-
+
BufferParams params;
LyXVC lyxvc;
- string temppath;
+ FileName temppath;
mutable TexRow texrow;
Buffer const * parent_buffer;
*/
bool file_fully_loaded;
- /// our Text that should be wrapped in an InsetText
- InsetText inset;
-
///
mutable TocBackend toc_backend;
- /// macro table
- typedef std::map<unsigned int, MacroData, std::greater<int> > PositionToMacroMap;
- typedef std::map<docstring, PositionToMacroMap> NameToPositionMacroMap;
- NameToPositionMacroMap macros;
+ /// macro tables
+ typedef pair<DocIterator, MacroData> ScopeMacro;
+ typedef map<DocIterator, ScopeMacro> PositionScopeMacroMap;
+ typedef map<docstring, PositionScopeMacroMap> NamePositionScopeMacroMap;
+ /// map from the macro name to the position map,
+ /// which maps the macro definition position to the scope and the MacroData.
+ NamePositionScopeMacroMap macros;
+ bool macro_lock;
+
+ /// positions of child buffers in the buffer
+ typedef map<Buffer const * const, DocIterator> BufferPositionMap;
+ typedef pair<DocIterator, Buffer const *> ScopeBuffer;
+ typedef map<DocIterator, ScopeBuffer> PositionScopeBufferMap;
+ /// position of children buffers in this buffer
+ BufferPositionMap children_positions;
+ /// map from children inclusion positions to their scope and their buffer
+ PositionScopeBufferMap position_to_children;
/// Container for all sort of Buffer dependant errors.
map<string, ErrorList> errorLists;
- /// all embedded files of this buffer
- EmbeddedFiles embedded_files;
-
/// timestamp and checksum used to test if the file has been externally
/// modified. (Used to properly enable 'File->Revert to saved', bug 4114).
time_t timestamp_;
/// A cache for the bibfiles (including bibfiles of loaded child
/// documents), needed for appropriate update of natbib labels.
- mutable FileNameList bibfilesCache_;
+ mutable support::FileNameList bibfilesCache_;
+
+ // FIXME The caching mechanism could be improved. At present, we have a
+ // cache for each Buffer, that caches all the bibliography info for that
+ // Buffer. A more efficient solution would be to have a global cache per
+ // file, and then to construct the Buffer's bibinfo from that.
+ /// A cache for bibliography info
+ mutable BiblioInfo bibinfo_;
+ /// whether the bibinfo cache is valid
+ bool bibinfoCacheValid_;
+ /// Cache of timestamps of .bib files
+ map<FileName, time_t> bibfileStatus_;
+
+ mutable RefCache ref_cache_;
+
+ /// our Text that should be wrapped in an InsetText
+ InsetText * inset;
};
+/// Creates the per buffer temporary directory
+static FileName createBufferTmpDir()
+{
+ static int count;
+ // We are in our own directory. Why bother to mangle name?
+ // In fact I wrote this code to circumvent a problematic behaviour
+ // (bug?) of EMX mkstemp().
+ FileName tmpfl(package().temp_dir().absFilename() + "/lyx_tmpbuf" +
+ convert<string>(count++));
+
+ if (!tmpfl.createDirectory(0777)) {
+ throw ExceptionMessage(WarningException, _("Disk Error: "), bformat(
+ _("LyX could not create the temporary directory '%1$s' (Disk is full maybe?)"),
+ from_utf8(tmpfl.absFilename())));
+ }
+ return tmpfl;
+}
+
+
Buffer::Impl::Impl(Buffer & parent, FileName const & file, bool readonly_)
: parent_buffer(0), lyx_clean(true), bak_clean(true), unnamed(false),
read_only(readonly_), filename(file), file_fully_loaded(false),
- inset(params), toc_backend(&parent), embedded_files(&parent),
- timestamp_(0), checksum_(0), wa_(0), undo_(parent)
+ toc_backend(&parent), macro_lock(false), timestamp_(0),
+ checksum_(0), wa_(0), undo_(parent), bibinfoCacheValid_(false)
{
- inset.setAutoBreakRows(true);
- lyxvc.setBuffer(&parent);
temppath = createBufferTmpDir();
-
- // FIXME: And now do something if temppath == string(), because we
- // assume from now on that temppath points to a valid temp dir.
- // See http://www.mail-archive.com/lyx-devel@lists.lyx.org/msg67406.html
-
+ lyxvc.setBuffer(&parent);
if (use_gui)
wa_ = new frontend::WorkAreaManager;
}
: d(new Impl(*this, FileName(file), readonly)), gui_(0)
{
LYXERR(Debug::INFO, "Buffer::Buffer()");
+
+ d->inset = new InsetText(*this);
+ d->inset->setAutoBreakRows(true);
+ d->inset->getText(0)->setMacrocontextPosition(par_iterator_begin());
}
// GuiView already destroyed
gui_ = 0;
- Buffer const * master = masterBuffer();
- if (master != this && use_gui)
- // We are closing buf which was a child document so we
- // must update the labels and section numbering of its master
- // Buffer.
- updateLabels(*master);
+ if (d->unnamed && d->filename.extension() == "internal") {
+ // No need to do additional cleanups for internal buffer.
+ delete d;
+ return;
+ }
- resetChildDocuments(false);
+ // loop over children
+ Impl::BufferPositionMap::iterator it = d->children_positions.begin();
+ Impl::BufferPositionMap::iterator end = d->children_positions.end();
+ for (; it != end; ++it) {
+ Buffer * child = const_cast<Buffer *>(it->first);
+ // The child buffer might have been closed already.
+ if (theBufferList().isLoaded(child))
+ theBufferList().releaseChild(this, child);
+ }
- if (!temppath().empty() && !FileName(temppath()).destroyDirectory()) {
+ // clear references to children in macro tables
+ d->children_positions.clear();
+ d->position_to_children.clear();
+
+ if (!d->temppath.destroyDirectory()) {
Alert::warning(_("Could not remove temporary directory"),
bformat(_("Could not remove the temporary directory %1$s"),
- from_utf8(temppath())));
+ from_utf8(d->temppath.absFilename())));
}
// Remove any previewed LaTeX snippets associated with this buffer.
- graphics::Previews::get().removeLoader(*this);
+ thePreviews().removeLoader(*this);
delete d;
}
frontend::WorkAreaManager & Buffer::workAreaManager() const
{
- BOOST_ASSERT(d->wa_);
+ LASSERT(d->wa_, /**/);
return *d->wa_;
}
Text & Buffer::text() const
{
- return const_cast<Text &>(d->inset.text_);
+ return d->inset->text();
}
Inset & Buffer::inset() const
{
- return const_cast<InsetText &>(d->inset);
+ return *d->inset;
}
}
-string const & Buffer::temppath() const
+string const Buffer::temppath() const
{
- return d->temppath;
+ return d->temppath.absFilename();
}
-TexRow const & Buffer::texrow() const
+TexRow & Buffer::texrow()
{
return d->texrow;
}
-TocBackend & Buffer::tocBackend() const
-{
- return d->toc_backend;
-}
-
-
-EmbeddedFiles & Buffer::embeddedFiles()
+TexRow const & Buffer::texrow() const
{
- return d->embedded_files;
+ return d->texrow;
}
-EmbeddedFiles const & Buffer::embeddedFiles() const
+TocBackend & Buffer::tocBackend() const
{
- return d->embedded_files;
+ return d->toc_backend;
}
params().branchlist().clear();
params().preamble.erase();
params().options.erase();
+ params().master.erase();
params().float_placement.erase();
params().paperwidth.erase();
params().paperheight.erase();
params().headheight.erase();
params().headsep.erase();
params().footskip.erase();
+ params().columnsep.erase();
+ params().fontsCJK.erase();
params().listings_params.clear();
params().clearLayoutModules();
+ params().clearRemovedModules();
params().pdfoptions().clear();
-
+
for (int i = 0; i < 4; ++i) {
params().user_defined_bullet(i) = ITEMIZE_DEFAULTS[i];
params().temp_bullet(i) = ITEMIZE_DEFAULTS[i];
ErrorList & errorList = d->errorLists["Parse"];
while (lex.isOK()) {
- lex.next();
- string const token = lex.getString();
+ string token;
+ lex >> token;
if (token.empty())
continue;
s, -1, 0, 0));
}
+ params().makeDocumentClass();
+
return unknown_tokens;
}
// Uwe C. Schroeder
// changed to be public and have one parameter
-// Returns false if "\end_document" is not read (Asger)
+// Returns true if "\end_document" is not read (Asger)
bool Buffer::readDocument(Lexer & lex)
{
ErrorList & errorList = d->errorLists["Parse"];
errorList.clear();
- lex.next();
- string const token = lex.getString();
- if (token != "\\begin_document") {
+ if (!lex.checkFor("\\begin_document")) {
docstring const s = _("\\begin_document is missing");
errorList.push_back(ErrorItem(_("Document header error"),
s, -1, 0, 0));
}
// we are reading in a brand new document
- BOOST_ASSERT(paragraphs().empty());
+ LASSERT(paragraphs().empty(), /**/);
readHeader(lex);
- TextClass const & baseClass = textclasslist[params().getBaseClass()];
- if (!baseClass.load(filePath())) {
- string theclass = baseClass.name();
- Alert::error(_("Can't load document class"), bformat(
- _("Using the default document class, because the "
- "class %1$s could not be loaded."), from_utf8(theclass)));
- params().setBaseClass(defaultTextclass());
- }
if (params().outputChanges) {
bool dvipost = LaTeXFeatures::isAvailable("dvipost");
- bool xcolorsoul = LaTeXFeatures::isAvailable("soul") &&
+ bool xcolorulem = LaTeXFeatures::isAvailable("ulem") &&
LaTeXFeatures::isAvailable("xcolor");
- if (!dvipost && !xcolorsoul) {
+ if (!dvipost && !xcolorulem) {
Alert::warning(_("Changes not shown in LaTeX output"),
_("Changes will not be highlighted in LaTeX output, "
- "because neither dvipost nor xcolor/soul are installed.\n"
+ "because neither dvipost nor xcolor/ulem are installed.\n"
"Please install these packages or redefine "
"\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
- } else if (!xcolorsoul) {
+ } else if (!xcolorulem) {
Alert::warning(_("Changes not shown in LaTeX output"),
_("Changes will not be highlighted in LaTeX output "
- "when using pdflatex, because xcolor and soul are not installed.\n"
+ "when using pdflatex, because xcolor and ulem are not installed.\n"
"Please install both packages or redefine "
"\\lyxadded and \\lyxdeleted in the LaTeX preamble."));
}
}
+ if (!params().master.empty()) {
+ FileName const master_file = makeAbsPath(params().master,
+ onlyPath(absFileName()));
+ if (isLyXFilename(master_file.absFilename())) {
+ Buffer * master =
+ checkAndLoadLyXFile(master_file, true);
+ if (master) {
+ // set master as master buffer, but only
+ // if we are a real child
+ if (master->isChild(this))
+ setParent(master);
+ // if the master is not fully loaded
+ // it is probably just loading this
+ // child. No warning needed then.
+ else if (master->isFullyLoaded())
+ LYXERR0("The master '"
+ << params().master
+ << "' assigned to this document '"
+ << absFileName()
+ << "' does not include "
+ "this document. Ignoring the master assignment.");
+ }
+ }
+ }
+
// read main text
- bool const res = text().read(*this, lex, errorList);
- for_each(text().paragraphs().begin(),
- text().paragraphs().end(),
- bind(&Paragraph::setInsetOwner, _1, &inset()));
+ bool const res = text().read(*this, lex, errorList, d->inset);
+ updateMacros();
+ updateMacroInstances();
return res;
}
if (*cit == '\n') {
if (autobreakrows && (!par.empty() || par.allowEmpty())) {
breakParagraph(params(), pars, pit, pos,
- par.layout()->isEnvironment());
+ par.layout().isEnvironment());
++pit;
pos = 0;
space_inserted = true;
++pos;
space_inserted = true;
} else {
- const pos_type n = 8 - pos % 8;
- for (pos_type i = 0; i < n; ++i) {
- par.insertChar(pos, ' ', font, params().trackChanges);
- ++pos;
- }
+ par.insertChar(pos, *cit, font, params().trackChanges);
+ ++pos;
space_inserted = true;
}
} else if (!isPrintable(*cit)) {
}
-bool Buffer::readString(std::string const & s)
+bool Buffer::readString(string const & s)
{
params().compressed = false;
// remove dummy empty par
paragraphs().clear();
- Lexer lex(0, 0);
- std::istringstream is(s);
+ Lexer lex;
+ istringstream is(s);
lex.setStream(is);
- FileName const name(tempName());
+ FileName const name = FileName::tempName("Buffer_readString");
switch (readFile(lex, name, true)) {
case failure:
return false;
case wrongversion: {
// We need to call lyx2lyx, so write the input to a file
- std::ofstream os(name.toFilesystemEncoding().c_str());
+ ofstream os(name.toFilesystemEncoding().c_str());
os << s;
os.close();
return readFile(name);
bool Buffer::readFile(FileName const & filename)
{
FileName fname(filename);
- // Check if the file is compressed.
- string format = filename.guessFormatFromContents();
- if (format == "zip") {
- // decompress to a temp directory
- LYXERR(Debug::FILES, filename << " is in zip format. Unzip to " << temppath());
- ::unzipToDir(filename.toFilesystemEncoding(), temppath());
- //
- FileName lyxfile(addName(temppath(), "content.lyx"));
- // if both manifest.txt and file.lyx exist, this is am embedded file
- if (lyxfile.exists()) {
- params().embedded = true;
- fname = lyxfile;
- }
- }
- // The embedded lyx file can also be compressed, for backward compatibility
- format = fname.guessFormatFromContents();
- if (format == "gzip" || format == "zip" || format == "compress")
- params().compressed = true;
+
+ params().compressed = fname.isZippedFile();
// remove dummy empty par
paragraphs().clear();
- Lexer lex(0, 0);
+ Lexer lex;
lex.setFile(fname);
if (readFile(lex, fname) != success)
return false;
Buffer::ReadStatus Buffer::readFile(Lexer & lex, FileName const & filename,
bool fromstring)
{
- BOOST_ASSERT(!filename.empty());
-
- if (!lex.isOK()) {
- Alert::error(_("Document could not be read"),
- bformat(_("%1$s could not be read."), from_utf8(filename.absFilename())));
- return failure;
- }
-
- lex.next();
- string const token = lex.getString();
-
- if (!lex) {
- Alert::error(_("Document could not be read"),
- bformat(_("%1$s could not be read."), from_utf8(filename.absFilename())));
- return failure;
- }
-
- // the first token _must_ be...
- if (token != "\\lyxformat") {
- lyxerr << "Token: " << token << endl;
+ LASSERT(!filename.empty(), /**/);
+ // the first (non-comment) token _must_ be...
+ if (!lex.checkFor("\\lyxformat")) {
Alert::error(_("Document format failure"),
- bformat(_("%1$s is not a LyX document."),
+ bformat(_("%1$s is not a readable LyX document."),
from_utf8(filename.absFilename())));
return failure;
}
- lex.next();
- string tmp_format = lex.getString();
+ string tmp_format;
+ lex >> tmp_format;
//lyxerr << "LyX Format: `" << tmp_format << '\'' << endl;
// if present remove ".," from string.
- string::size_type dot = tmp_format.find_first_of(".,");
+ size_t dot = tmp_format.find_first_of(".,");
//lyxerr << " dot found at " << dot << endl;
if (dot != string::npos)
tmp_format.erase(dot, 1);
// lyx2lyx would fail
return wrongversion;
- FileName const tmpfile(tempName());
+ FileName const tmpfile = FileName::tempName("Buffer_readFile");
if (tmpfile.empty()) {
Alert::error(_("Conversion failed"),
bformat(_("%1$s is from a different"
" version of LyX, but a temporary"
" file for converting it could"
- " not be created."),
+ " not be created."),
from_utf8(filename.absFilename())));
return failure;
}
bformat(_("%1$s is from a different"
" version of LyX, but the"
" conversion script lyx2lyx"
- " could not be found."),
+ " could not be found."),
from_utf8(filename.absFilename())));
return failure;
}
Alert::error(_("Conversion script failed"),
bformat(_("%1$s is from a different version"
" of LyX, but the lyx2lyx script"
- " failed to convert it."),
+ " failed to convert it."),
from_utf8(filename.absFilename())));
return failure;
} else {
backupName = FileName(addName(lyxrc.backupdir_path,
mangledName));
}
- if (fileName().copyTo(backupName, true)) {
+ if (fileName().copyTo(backupName)) {
madeBackup = true;
} else {
Alert::error(_("Backup failure"),
} else {
// Saving failed, so backup is not backup
if (madeBackup)
- rename(backupName, d->filename);
+ backupName.moveTo(d->filename);
return false;
}
}
bool retval = false;
- FileName content;
- if (params().embedded)
- // first write the .lyx file to the temporary directory
- content = FileName(addName(temppath(), "content.lyx"));
- else
- content = fname;
-
docstring const str = bformat(_("Saving document %1$s..."),
- makeDisplayPath(content.absFilename()));
+ makeDisplayPath(fname.absFilename()));
message(str);
if (params().compressed) {
- gz::ogzstream ofs(content.toFilesystemEncoding().c_str(), ios::out|ios::trunc);
+ gz::ogzstream ofs(fname.toFilesystemEncoding().c_str(), ios::out|ios::trunc);
retval = ofs && write(ofs);
} else {
- ofstream ofs(content.toFilesystemEncoding().c_str(), ios::out|ios::trunc);
+ ofstream ofs(fname.toFilesystemEncoding().c_str(), ios::out|ios::trunc);
retval = ofs && write(ofs);
}
if (!retval) {
- message(str + _(" could not write file!."));
+ message(str + _(" could not write file!"));
return false;
}
removeAutosaveFile(d->filename.absFilename());
+
saveCheckSum(d->filename);
message(str + _(" done."));
- if (!params().embedded)
- return true;
-
- message(str + _(" writing embedded files!."));
- // if embedding is enabled, write file.lyx and all the embedded files
- // to the zip file fname.
- if (!d->embedded_files.writeFile(fname)) {
- message(str + _(" could not write embedded files!."));
- return false;
- }
- message(str + _(" error while writing embedded files."));
return true;
}
{
#ifdef HAVE_LOCALE
// Use the standard "C" locale for file output.
- ofs.imbue(std::locale::classic());
+ ofs.imbue(locale::classic());
#endif
// The top of the file should not be written by params().
<< "\\lyxformat " << LYX_FORMAT << "\n"
<< "\\begin_document\n";
-
/// For each author, set 'used' to true if there is a change
/// by this author in the document; otherwise set it to 'false'.
AuthorList::Authors::const_iterator a_it = params().authors().begin();
for (; a_it != a_end; ++a_it)
a_it->second.setUsed(false);
- ParIterator const end = par_iterator_end();
- ParIterator it = par_iterator_begin();
+ ParIterator const end = const_cast<Buffer *>(this)->par_iterator_end();
+ ParIterator it = const_cast<Buffer *>(this)->par_iterator_begin();
for ( ; it != end; ++it)
it->checkAuthors(params().authors());
string const encoding = runparams.encoding->iconvName();
LYXERR(Debug::LATEX, "makeLaTeXFile encoding: " << encoding << "...");
- odocfstream ofs;
+ ofdocstream ofs;
try { ofs.reset(encoding); }
catch (iconv_codecvt_facet_exception & e) {
lyxerr << "Caught iconv exception: " << e.what() << endl;
return false;
//TexStream ts(ofs.rdbuf(), &texrow());
-
+ ErrorList & errorList = d->errorLists["Export"];
+ errorList.clear();
bool failed_export = false;
try {
d->texrow.reset();
writeLaTeXSource(ofs, original_path,
runparams, output_preamble, output_body);
}
+ catch (EncodingException & e) {
+ odocstringstream ods;
+ ods.put(e.failed_char);
+ ostringstream oss;
+ oss << "0x" << hex << e.failed_char << dec;
+ docstring msg = bformat(_("Could not find LaTeX command for character '%1$s'"
+ " (code point %2$s)"),
+ ods.str(), from_utf8(oss.str()));
+ errorList.push_back(ErrorItem(msg, _("Some characters of your document are probably not "
+ "representable in the chosen encoding.\n"
+ "Changing the document encoding to utf8 could help."),
+ e.par_id, e.pos, e.pos + 1));
+ failed_export = true;
+ }
catch (iconv_codecvt_facet_exception & e) {
- lyxerr << "Caught iconv exception: " << e.what() << endl;
+ errorList.push_back(ErrorItem(_("iconv conversion failed"),
+ _(e.what()), -1, 0, 0));
failed_export = true;
}
- catch (std::exception const & e) {
- lyxerr << "Caught \"normal\" exception: " << e.what() << endl;
+ catch (exception const & e) {
+ errorList.push_back(ErrorItem(_("conversion failed"),
+ _(e.what()), -1, 0, 0));
failed_export = true;
}
catch (...) {
lyxerr << "Caught some really weird exception..." << endl;
- LyX::cref().emergencyCleanup();
- abort();
+ lyx_exit(1);
}
ofs.close();
lyxerr << "File '" << fname << "' was not closed properly." << endl;
}
- if (failed_export) {
- Alert::error(_("Encoding error"),
- _("Some characters of your document are probably not "
- "representable in the chosen encoding.\n"
- "Changing the document encoding to utf8 could help."));
- return false;
- }
- return true;
+ errors("Export");
+ return !failed_export;
}
OutputParams const & runparams_in,
bool const output_preamble, bool const output_body) const
{
+ // The child documents, if any, shall be already loaded at this point.
+
OutputParams runparams = runparams_in;
+ // Classify the unicode characters appearing in math insets
+ Encodings::initUnicodeMath(*this);
+
// validate the buffer.
LYXERR(Debug::LATEX, " Validating buffer...");
LaTeXFeatures features(*this, params(), runparams);
d->texrow.newline();
}
LYXERR(Debug::INFO, "lyx document header finished");
+
+ // Don't move this behind the parent_buffer=0 code below,
+ // because then the macros will not get the right "redefinition"
+ // flag as they don't see the parent macros which are output before.
+ updateMacros();
+
+ // fold macros if possible, still with parent buffer as the
+ // macros will be put in the prefix anyway.
+ updateMacroInstances();
+
// There are a few differences between nice LaTeX and usual files:
// usual is \batchmode and has a
// special input@path to allow the including of figures
if (output_preamble) {
if (!runparams.nice) {
// code for usual, NOT nice-latex-file
- os << "\\nonstopmode\n";
+ os << "\\batchmode\n"; // changed
+ // from \nonstopmode
d->texrow.newline();
}
if (!original_path.empty()) {
d->texrow.newline();
}
+ // get parent macros (if this buffer has a parent) which will be
+ // written at the document begin further down.
+ MacroSet parentMacros;
+ listParentMacros(parentMacros, features);
+
// Write the preamble
runparams.use_babel = params().writeLaTeX(os, features, d->texrow);
+ runparams.use_japanese = features.isRequired("japanese");
+
if (!output_body)
return;
// make the body.
os << "\\begin{document}\n";
d->texrow.newline();
+
+ // output the parent macros
+ MacroSet::iterator it = parentMacros.begin();
+ MacroSet::iterator end = parentMacros.end();
+ for (; it != end; ++it)
+ (*it)->write(os, true);
} // output_preamble
d->texrow.start(paragraphs().begin()->id(), 0);
-
+
LYXERR(Debug::INFO, "preamble finished, now the body.");
// if we are doing a real file with body, even if this is the
d->parent_buffer = 0;
}
- loadChildDocuments();
-
// the real stuff
- latexParagraphs(*this, paragraphs(), os, d->texrow, runparams);
+ latexParagraphs(*this, text(), os, d->texrow, runparams);
// Restore the parenthood if needed
if (output_preamble)
bool Buffer::isLatex() const
{
- return params().getTextClass().outputType() == LATEX;
+ return params().documentClass().outputType() == LATEX;
}
bool Buffer::isLiterate() const
{
- return params().getTextClass().outputType() == LITERATE;
+ return params().documentClass().outputType() == LITERATE;
}
bool Buffer::isDocBook() const
{
- return params().getTextClass().outputType() == DOCBOOK;
+ return params().documentClass().outputType() == DOCBOOK;
}
{
LYXERR(Debug::LATEX, "makeDocBookFile...");
- //ofstream ofs;
- odocfstream ofs;
+ ofdocstream ofs;
if (!openFileWrite(ofs, fname))
return;
d->texrow.reset();
- TextClass const & tclass = params().getTextClass();
+ DocumentClass const & tclass = params().documentClass();
string const top_element = tclass.latexname();
if (!only_body) {
if (runparams.flavor == OutputParams::XML)
top += params().language->code();
else
- top += params().language->code().substr(0,2);
+ top += params().language->code().substr(0, 2);
top += '"';
if (!params().options.empty()) {
<< " file was created by LyX " << lyx_version
<< "\n See http://www.lyx.org/ for more information -->\n";
- params().getTextClass().counters().reset();
+ params().documentClass().counters().reset();
- loadChildDocuments();
+ updateMacros();
sgml::openTag(os, top);
os << '\n';
string const name = addName(path.absFilename(), latexName());
string const org_path = filePath();
- support::PathChanger p(path); // path to LaTeX file
+ PathChanger p(path); // path to LaTeX file
message(_("Running chktex..."));
// Generate the LaTeX file if neccessary
void Buffer::validate(LaTeXFeatures & features) const
{
- TextClass const & tclass = params().getTextClass();
-
- if (params().outputChanges) {
- bool dvipost = LaTeXFeatures::isAvailable("dvipost");
- bool xcolorsoul = LaTeXFeatures::isAvailable("soul") &&
- LaTeXFeatures::isAvailable("xcolor");
-
- if (features.runparams().flavor == OutputParams::LATEX) {
- if (dvipost) {
- features.require("ct-dvipost");
- features.require("dvipost");
- } else if (xcolorsoul) {
- features.require("ct-xcolor-soul");
- features.require("soul");
- features.require("xcolor");
- } else {
- features.require("ct-none");
- }
- } else if (features.runparams().flavor == OutputParams::PDFLATEX ) {
- if (xcolorsoul) {
- features.require("ct-xcolor-soul");
- features.require("soul");
- features.require("xcolor");
- features.require("pdfcolmk"); // improves color handling in PDF output
- } else {
- features.require("ct-none");
- }
- }
- }
-
- // AMS Style is at document level
- if (params().use_amsmath == BufferParams::package_on
- || tclass.provides("amsmath"))
- features.require("amsmath");
- if (params().use_esint == BufferParams::package_on)
- features.require("esint");
+ params().validate(features);
- loadChildDocuments();
+ updateMacros();
for_each(paragraphs().begin(), paragraphs().end(),
boost::bind(&Paragraph::validate, _1, boost::ref(features)));
- // the bullet shapes are buffer level not paragraph level
- // so they are tested here
- for (int i = 0; i < 4; ++i) {
- if (params().user_defined_bullet(i) != ITEMIZE_DEFAULTS[i]) {
- int const font = params().user_defined_bullet(i).getFont();
- if (font == 0) {
- int const c = params()
- .user_defined_bullet(i)
- .getCharacter();
- if (c == 16
- || c == 17
- || c == 25
- || c == 26
- || c == 31) {
- features.require("latexsym");
- }
- } else if (font == 1) {
- features.require("amssymb");
- } else if ((font >= 2 && font <= 5)) {
- features.require("pifont");
- }
- }
- }
-
if (lyxerr.debugging(Debug::LATEX)) {
features.showStruct();
}
void Buffer::getLabelList(vector<docstring> & list) const
{
- /// if this is a child document and the parent is already loaded
- /// Use the parent's list instead [ale990407]
- Buffer const * tmp = masterBuffer();
- if (!tmp) {
- lyxerr << "masterBuffer() failed!" << endl;
- BOOST_ASSERT(tmp);
- }
- if (tmp != this) {
- tmp->getLabelList(list);
+ // If this is a child document, use the parent's list instead.
+ if (d->parent_buffer) {
+ d->parent_buffer->getLabelList(list);
return;
}
- loadChildDocuments();
-
- for (InsetIterator it = inset_iterator_begin(inset()); it; ++it)
- it.nextInset()->getLabelList(*this, list);
+ list.clear();
+ Toc & toc = d->toc_backend.toc("label");
+ TocIterator toc_it = toc.begin();
+ TocIterator end = toc.end();
+ for (; toc_it != end; ++toc_it) {
+ if (toc_it->depth() == 0)
+ list.push_back(toc_it->str());
+ }
}
-void Buffer::updateBibfilesCache() const
+void Buffer::updateBibfilesCache(UpdateScope scope) const
{
- // if this is a child document and the parent is already loaded
- // update the parent's cache instead
- Buffer const * tmp = masterBuffer();
- BOOST_ASSERT(tmp);
- if (tmp != this) {
- tmp->updateBibfilesCache();
+ // If this is a child document, use the parent's cache instead.
+ if (d->parent_buffer && scope != UpdateChildOnly) {
+ d->parent_buffer->updateBibfilesCache();
return;
}
if (it->lyxCode() == BIBTEX_CODE) {
InsetBibtex const & inset =
static_cast<InsetBibtex const &>(*it);
- FileNameList const bibfiles = inset.getFiles(*this);
+ support::FileNameList const bibfiles = inset.getBibFiles();
d->bibfilesCache_.insert(d->bibfilesCache_.end(),
bibfiles.begin(),
bibfiles.end());
} else if (it->lyxCode() == INCLUDE_CODE) {
InsetInclude & inset =
static_cast<InsetInclude &>(*it);
- inset.updateBibfilesCache(*this);
- FileNameList const & bibfiles =
- inset.getBibfilesCache(*this);
+ inset.updateBibfilesCache();
+ support::FileNameList const & bibfiles =
+ inset.getBibfilesCache();
d->bibfilesCache_.insert(d->bibfilesCache_.end(),
bibfiles.begin(),
bibfiles.end());
}
}
+ // the bibinfo cache is now invalid
+ d->bibinfoCacheValid_ = false;
}
-FileNameList const & Buffer::getBibfilesCache() const
+void Buffer::invalidateBibinfoCache()
{
- // if this is a child document and the parent is already loaded
- // use the parent's cache instead
- Buffer const * tmp = masterBuffer();
- BOOST_ASSERT(tmp);
- if (tmp != this)
- return tmp->getBibfilesCache();
+ d->bibinfoCacheValid_ = false;
+}
+
+
+support::FileNameList const & Buffer::getBibfilesCache(UpdateScope scope) const
+{
+ // If this is a child document, use the parent's cache instead.
+ if (d->parent_buffer && scope != UpdateChildOnly)
+ return d->parent_buffer->getBibfilesCache();
// We update the cache when first used instead of at loading time.
if (d->bibfilesCache_.empty())
- const_cast<Buffer *>(this)->updateBibfilesCache();
+ const_cast<Buffer *>(this)->updateBibfilesCache(scope);
return d->bibfilesCache_;
}
+BiblioInfo const & Buffer::masterBibInfo() const
+{
+ // if this is a child document and the parent is already loaded
+ // use the parent's list instead [ale990412]
+ Buffer const * const tmp = masterBuffer();
+ LASSERT(tmp, /**/);
+ if (tmp != this)
+ return tmp->masterBibInfo();
+ return localBibInfo();
+}
+
+
+BiblioInfo const & Buffer::localBibInfo() const
+{
+ if (d->bibinfoCacheValid_) {
+ support::FileNameList const & bibfilesCache = getBibfilesCache();
+ // compare the cached timestamps with the actual ones.
+ support::FileNameList::const_iterator ei = bibfilesCache.begin();
+ support::FileNameList::const_iterator en = bibfilesCache.end();
+ for (; ei != en; ++ ei) {
+ time_t lastw = ei->lastModified();
+ if (lastw != d->bibfileStatus_[*ei]) {
+ d->bibinfoCacheValid_ = false;
+ d->bibfileStatus_[*ei] = lastw;
+ break;
+ }
+ }
+ }
+
+ if (!d->bibinfoCacheValid_) {
+ d->bibinfo_.clear();
+ for (InsetIterator it = inset_iterator_begin(inset()); it; ++it)
+ it->fillWithBibKeys(d->bibinfo_, it);
+ d->bibinfoCacheValid_ = true;
+ }
+ return d->bibinfo_;
+}
+
+
bool Buffer::isDepClean(string const & name) const
{
DepClean::const_iterator const it = d->dep_clean.find(name);
break;
}
+ case LFUN_BRANCH_ACTIVATE:
+ case LFUN_BRANCH_DEACTIVATE: {
+ BranchList & branchList = params().branchlist();
+ docstring const branchName = func.argument();
+ Branch * branch = branchList.find(branchName);
+ if (!branch)
+ LYXERR0("Branch " << branchName << " does not exist.");
+ else
+ branch->setSelected(func.action == LFUN_BRANCH_ACTIVATE);
+ if (result)
+ *result = true;
+ }
+
default:
dispatched = false;
}
void Buffer::changeLanguage(Language const * from, Language const * to)
{
- BOOST_ASSERT(from);
- BOOST_ASSERT(to);
+ LASSERT(from, /**/);
+ LASSERT(to, /**/);
for_each(par_iterator_begin(),
par_iterator_end(),
}
-ParIterator Buffer::getParFromID(int const id) const
+DocIterator Buffer::getParFromID(int const id) const
{
- ParConstIterator it = par_iterator_begin();
- ParConstIterator const end = par_iterator_end();
-
+ Buffer * buf = const_cast<Buffer *>(this);
if (id < 0) {
// John says this is called with id == -1 from undo
lyxerr << "getParFromID(), id: " << id << endl;
- return end;
+ return doc_iterator_end(buf);
}
- for (; it != end; ++it)
- if (it->id() == id)
+ for (DocIterator it = doc_iterator_begin(buf); !it.atEnd(); it.forwardPar())
+ if (it.paragraph().id() == id)
return it;
- return end;
+ return doc_iterator_end(buf);
}
bool Buffer::hasParWithID(int const id) const
{
- ParConstIterator const it = getParFromID(id);
- return it != par_iterator_end();
+ return !getParFromID(id).atEnd();
}
ParIterator Buffer::par_iterator_begin()
{
- return lyx::par_iterator_begin(inset());
+ return ParIterator(doc_iterator_begin(this));
}
ParIterator Buffer::par_iterator_end()
{
- return lyx::par_iterator_end(inset());
+ return ParIterator(doc_iterator_end(this));
}
ParConstIterator Buffer::par_iterator_begin() const
{
- return lyx::par_const_iterator_begin(inset());
+ return ParConstIterator(doc_iterator_begin(this));
}
ParConstIterator Buffer::par_iterator_end() const
{
- return lyx::par_const_iterator_end(inset());
+ return ParConstIterator(doc_iterator_end(this));
}
bool Buffer::isExternallyModified(CheckMethod method) const
{
- BOOST_ASSERT(d->filename.exists());
+ LASSERT(d->filename.exists(), /**/);
// if method == timestamp, check timestamp before checksum
- return (method == checksum_method
+ return (method == checksum_method
|| d->timestamp_ != d->filename.lastModified())
&& d->checksum_ != d->filename.checksum();
}
string Buffer::filePath() const
{
- return d->filename.onlyPath().absFilename();
+ return d->filename.onlyPath().absFilename() + "/";
}
{
// Avoids recursive include.
d->parent_buffer = buffer == this ? 0 : buffer;
+ updateMacros();
}
-Buffer const * Buffer::parent()
+Buffer const * Buffer::parent() const
{
return d->parent_buffer;
}
+void Buffer::collectRelatives(BufferSet & bufs) const
+{
+ bufs.insert(this);
+ if (parent())
+ parent()->collectRelatives(bufs);
+
+ // loop over children
+ Impl::BufferPositionMap::iterator it = d->children_positions.begin();
+ Impl::BufferPositionMap::iterator end = d->children_positions.end();
+ for (; it != end; ++it)
+ bufs.insert(const_cast<Buffer *>(it->first));
+}
+
+
+std::vector<Buffer const *> Buffer::allRelatives() const
+{
+ BufferSet bufs;
+ collectRelatives(bufs);
+ BufferSet::iterator it = bufs.begin();
+ std::vector<Buffer const *> ret;
+ for (; it != bufs.end(); ++it)
+ ret.push_back(*it);
+ return ret;
+}
+
+
Buffer const * Buffer::masterBuffer() const
{
if (!d->parent_buffer)
return this;
-
+
return d->parent_buffer->masterBuffer();
}
-bool Buffer::hasMacro(docstring const & name, Paragraph const & par) const
+bool Buffer::isChild(Buffer * child) const
{
- Impl::PositionToMacroMap::iterator it;
- it = d->macros[name].upper_bound(par.macrocontextPosition());
- if (it != d->macros[name].end())
- return true;
+ return d->children_positions.find(child) != d->children_positions.end();
+}
- // If there is a master buffer, query that
- Buffer const * master = masterBuffer();
- if (master && master != this)
- return master->hasMacro(name);
- return MacroTable::globalMacros().has(name);
+DocIterator Buffer::firstChildPosition(Buffer const * child)
+{
+ Impl::BufferPositionMap::iterator it;
+ it = d->children_positions.find(child);
+ if (it == d->children_positions.end())
+ return DocIterator(this);
+ return it->second;
}
-bool Buffer::hasMacro(docstring const & name) const
+std::vector<Buffer *> Buffer::getChildren() const
{
- if( !d->macros[name].empty() )
- return true;
+ std::vector<Buffer *> clist;
+ // loop over children
+ Impl::BufferPositionMap::iterator it = d->children_positions.begin();
+ Impl::BufferPositionMap::iterator end = d->children_positions.end();
+ for (; it != end; ++it) {
+ Buffer * child = const_cast<Buffer *>(it->first);
+ clist.push_back(child);
+ // there might be grandchildren
+ std::vector<Buffer *> glist = child->getChildren();
+ for (vector<Buffer *>::const_iterator git = glist.begin();
+ git != glist.end(); ++git)
+ clist.push_back(*git);
+ }
+ return clist;
+}
- // If there is a master buffer, query that
- Buffer const * master = masterBuffer();
- if (master && master != this)
- return master->hasMacro(name);
- return MacroTable::globalMacros().has(name);
+template<typename M>
+typename M::iterator greatest_below(M & m, typename M::key_type const & x)
+{
+ if (m.empty())
+ return m.end();
+
+ typename M::iterator it = m.lower_bound(x);
+ if (it == m.begin())
+ return m.end();
+
+ it--;
+ return it;
}
-MacroData const & Buffer::getMacro(docstring const & name,
- Paragraph const & par) const
+MacroData const * Buffer::getBufferMacro(docstring const & name,
+ DocIterator const & pos) const
{
- Impl::PositionToMacroMap::iterator it;
- it = d->macros[name].upper_bound(par.macrocontextPosition());
- if( it != d->macros[name].end() )
- return it->second;
+ LYXERR(Debug::MACROS, "Searching for " << to_ascii(name) << " at " << pos);
- // If there is a master buffer, query that
- Buffer const * master = masterBuffer();
- if (master && master != this)
- return master->getMacro(name);
+ // if paragraphs have no macro context set, pos will be empty
+ if (pos.empty())
+ return 0;
+
+ // we haven't found anything yet
+ DocIterator bestPos = par_iterator_begin();
+ MacroData const * bestData = 0;
+
+ // find macro definitions for name
+ Impl::NamePositionScopeMacroMap::iterator nameIt
+ = d->macros.find(name);
+ if (nameIt != d->macros.end()) {
+ // find last definition in front of pos or at pos itself
+ Impl::PositionScopeMacroMap::const_iterator it
+ = greatest_below(nameIt->second, pos);
+ if (it != nameIt->second.end()) {
+ while (true) {
+ // scope ends behind pos?
+ if (pos < it->second.first) {
+ // Looks good, remember this. If there
+ // is no external macro behind this,
+ // we found the right one already.
+ bestPos = it->first;
+ bestData = &it->second.second;
+ break;
+ }
+
+ // try previous macro if there is one
+ if (it == nameIt->second.begin())
+ break;
+ it--;
+ }
+ }
+ }
+
+ // find macros in included files
+ Impl::PositionScopeBufferMap::const_iterator it
+ = greatest_below(d->position_to_children, pos);
+ if (it == d->position_to_children.end())
+ // no children before
+ return bestData;
+
+ while (true) {
+ // do we know something better (i.e. later) already?
+ if (it->first < bestPos )
+ break;
+
+ // scope ends behind pos?
+ if (pos < it->second.first) {
+ // look for macro in external file
+ d->macro_lock = true;
+ MacroData const * data
+ = it->second.second->getMacro(name, false);
+ d->macro_lock = false;
+ if (data) {
+ bestPos = it->first;
+ bestData = data;
+ break;
+ }
+ }
- return MacroTable::globalMacros().get(name);
+ // try previous file if there is one
+ if (it == d->position_to_children.begin())
+ break;
+ --it;
+ }
+
+ // return the best macro we have found
+ return bestData;
}
-MacroData const & Buffer::getMacro(docstring const & name) const
+MacroData const * Buffer::getMacro(docstring const & name,
+ DocIterator const & pos, bool global) const
{
- Impl::PositionToMacroMap::iterator it;
- it = d->macros[name].begin();
- if( it != d->macros[name].end() )
- return it->second;
+ if (d->macro_lock)
+ return 0;
+
+ // query buffer macros
+ MacroData const * data = getBufferMacro(name, pos);
+ if (data != 0)
+ return data;
// If there is a master buffer, query that
- Buffer const * master = masterBuffer();
- if (master && master != this)
- return master->getMacro(name);
+ if (d->parent_buffer) {
+ d->macro_lock = true;
+ MacroData const * macro = d->parent_buffer->getMacro(
+ name, *this, false);
+ d->macro_lock = false;
+ if (macro)
+ return macro;
+ }
+
+ if (global) {
+ data = MacroTable::globalMacros().get(name);
+ if (data != 0)
+ return data;
+ }
- return MacroTable::globalMacros().get(name);
+ return 0;
}
-void Buffer::updateMacros()
+MacroData const * Buffer::getMacro(docstring const & name, bool global) const
{
- // start with empty table
- d->macros = Impl::NameToPositionMacroMap();
-
- // Iterate over buffer
- ParagraphList & pars = text().paragraphs();
- for (size_t i = 0, n = pars.size(); i != n; ++i) {
- // set position again
- pars[i].setMacrocontextPosition(i);
-
- //lyxerr << "searching main par " << i
- // << " for macro definitions" << std::endl;
- InsetList const & insets = pars[i].insetList();
- InsetList::const_iterator it = insets.begin();
+ // set scope end behind the last paragraph
+ DocIterator scope = par_iterator_begin();
+ scope.pit() = scope.lastpit() + 1;
+
+ return getMacro(name, scope, global);
+}
+
+
+MacroData const * Buffer::getMacro(docstring const & name,
+ Buffer const & child, bool global) const
+{
+ // look where the child buffer is included first
+ Impl::BufferPositionMap::iterator it = d->children_positions.find(&child);
+ if (it == d->children_positions.end())
+ return 0;
+
+ // check for macros at the inclusion position
+ return getMacro(name, it->second, global);
+}
+
+
+void Buffer::updateMacros(DocIterator & it, DocIterator & scope) const
+{
+ pit_type lastpit = it.lastpit();
+
+ // look for macros in each paragraph
+ while (it.pit() <= lastpit) {
+ Paragraph & par = it.paragraph();
+
+ // iterate over the insets of the current paragraph
+ InsetList const & insets = par.insetList();
+ InsetList::const_iterator iit = insets.begin();
InsetList::const_iterator end = insets.end();
- for ( ; it != end; ++it) {
- if (it->inset->lyxCode() != MATHMACRO_CODE)
+ for (; iit != end; ++iit) {
+ it.pos() = iit->pos;
+
+ // is it a nested text inset?
+ if (iit->inset->asInsetText()) {
+ // Inset needs its own scope?
+ InsetText const * itext = iit->inset->asInsetText();
+ bool newScope = itext->isMacroScope();
+
+ // scope which ends just behind the inset
+ DocIterator insetScope = it;
+ ++insetScope.pos();
+
+ // collect macros in inset
+ it.push_back(CursorSlice(*iit->inset));
+ updateMacros(it, newScope ? insetScope : scope);
+ it.pop_back();
+ continue;
+ }
+
+ // is it an external file?
+ if (iit->inset->lyxCode() == INCLUDE_CODE) {
+ // get buffer of external file
+ InsetInclude const & inset =
+ static_cast<InsetInclude const &>(*iit->inset);
+ d->macro_lock = true;
+ Buffer * child = inset.getChildBuffer();
+ d->macro_lock = false;
+ if (!child)
+ continue;
+
+ // register its position, but only when it is
+ // included first in the buffer
+ if (d->children_positions.find(child) ==
+ d->children_positions.end())
+ d->children_positions[child] = it;
+
+ // register child with its scope
+ d->position_to_children[it] = Impl::ScopeBuffer(scope, child);
+ continue;
+ }
+
+ if (iit->inset->lyxCode() != MATHMACRO_CODE)
continue;
-
+
// get macro data
- MathMacroTemplate const & macroTemplate
- = static_cast<MathMacroTemplate const &>(*it->inset);
+ MathMacroTemplate & macroTemplate =
+ static_cast<MathMacroTemplate &>(*iit->inset);
+ MacroContext mc(*this, it);
+ macroTemplate.updateToContext(mc);
// valid?
- if (macroTemplate.validMacro()) {
- MacroData macro = macroTemplate.asMacroData();
+ bool valid = macroTemplate.validMacro();
+ // FIXME: Should be fixNameAndCheckIfValid() in fact,
+ // then the BufferView's cursor will be invalid in
+ // some cases which leads to crashes.
+ if (!valid)
+ continue;
- // redefinition?
- // call hasMacro here instead of directly querying mc to
- // also take the master document into consideration
- macro.setRedefinition(hasMacro(macroTemplate.name()));
+ // register macro
+ d->macros[macroTemplate.name()][it] =
+ Impl::ScopeMacro(scope, MacroData(*this, it));
+ }
- // register macro (possibly overwrite the previous one of this paragraph)
- d->macros[macroTemplate.name()][i] = macro;
- }
+ // next paragraph
+ it.pit()++;
+ it.pos() = 0;
+ }
+}
+
+
+void Buffer::updateMacros() const
+{
+ if (d->macro_lock)
+ return;
+
+ LYXERR(Debug::MACROS, "updateMacro of " << d->filename.onlyFileName());
+
+ // start with empty table
+ d->macros.clear();
+ d->children_positions.clear();
+ d->position_to_children.clear();
+
+ // Iterate over buffer, starting with first paragraph
+ // The scope must be bigger than any lookup DocIterator
+ // later. For the global lookup, lastpit+1 is used, hence
+ // we use lastpit+2 here.
+ DocIterator it = par_iterator_begin();
+ DocIterator outerScope = it;
+ outerScope.pit() = outerScope.lastpit() + 2;
+ updateMacros(it, outerScope);
+}
+
+
+void Buffer::updateMacroInstances() const
+{
+ LYXERR(Debug::MACROS, "updateMacroInstances for "
+ << d->filename.onlyFileName());
+ DocIterator it = doc_iterator_begin(this);
+ DocIterator end = doc_iterator_end(this);
+ for (; it != end; it.forwardPos()) {
+ // look for MathData cells in InsetMathNest insets
+ Inset * inset = it.nextInset();
+ if (!inset)
+ continue;
+
+ InsetMath * minset = inset->asInsetMath();
+ if (!minset)
+ continue;
+
+ // update macro in all cells of the InsetMathNest
+ DocIterator::idx_type n = minset->nargs();
+ MacroContext mc = MacroContext(*this, it);
+ for (DocIterator::idx_type i = 0; i < n; ++i) {
+ MathData & data = minset->cell(i);
+ data.updateMacros(0, mc);
}
}
}
+void Buffer::listMacroNames(MacroNameSet & macros) const
+{
+ if (d->macro_lock)
+ return;
+
+ d->macro_lock = true;
+
+ // loop over macro names
+ Impl::NamePositionScopeMacroMap::iterator nameIt = d->macros.begin();
+ Impl::NamePositionScopeMacroMap::iterator nameEnd = d->macros.end();
+ for (; nameIt != nameEnd; ++nameIt)
+ macros.insert(nameIt->first);
+
+ // loop over children
+ Impl::BufferPositionMap::iterator it = d->children_positions.begin();
+ Impl::BufferPositionMap::iterator end = d->children_positions.end();
+ for (; it != end; ++it)
+ it->first->listMacroNames(macros);
+
+ // call parent
+ if (d->parent_buffer)
+ d->parent_buffer->listMacroNames(macros);
+
+ d->macro_lock = false;
+}
+
+
+void Buffer::listParentMacros(MacroSet & macros, LaTeXFeatures & features) const
+{
+ if (!d->parent_buffer)
+ return;
+
+ MacroNameSet names;
+ d->parent_buffer->listMacroNames(names);
+
+ // resolve macros
+ MacroNameSet::iterator it = names.begin();
+ MacroNameSet::iterator end = names.end();
+ for (; it != end; ++it) {
+ // defined?
+ MacroData const * data =
+ d->parent_buffer->getMacro(*it, *this, false);
+ if (data) {
+ macros.insert(data);
+
+ // we cannot access the original MathMacroTemplate anymore
+ // here to calls validate method. So we do its work here manually.
+ // FIXME: somehow make the template accessible here.
+ if (data->optionals() > 0)
+ features.require("xargs");
+ }
+ }
+}
+
+
+Buffer::References & Buffer::references(docstring const & label)
+{
+ if (d->parent_buffer)
+ return const_cast<Buffer *>(masterBuffer())->references(label);
+
+ RefCache::iterator it = d->ref_cache_.find(label);
+ if (it != d->ref_cache_.end())
+ return it->second.second;
+
+ static InsetLabel const * dummy_il = 0;
+ static References const dummy_refs;
+ it = d->ref_cache_.insert(
+ make_pair(label, make_pair(dummy_il, dummy_refs))).first;
+ return it->second.second;
+}
+
+
+Buffer::References const & Buffer::references(docstring const & label) const
+{
+ return const_cast<Buffer *>(this)->references(label);
+}
+
+
+void Buffer::setInsetLabel(docstring const & label, InsetLabel const * il)
+{
+ masterBuffer()->d->ref_cache_[label].first = il;
+}
+
+
+InsetLabel const * Buffer::insetLabel(docstring const & label) const
+{
+ return masterBuffer()->d->ref_cache_[label].first;
+}
+
+
+void Buffer::clearReferenceCache() const
+{
+ if (!d->parent_buffer)
+ d->ref_cache_.clear();
+}
+
+
void Buffer::changeRefsIfUnique(docstring const & from, docstring const & to,
InsetCode code)
{
//FIXME: This does not work for child documents yet.
- BOOST_ASSERT(code == CITE_CODE || code == REF_CODE);
+ LASSERT(code == CITE_CODE, /**/);
// Check if the label 'from' appears more than once
vector<docstring> labels;
-
string paramName;
- if (code == CITE_CODE) {
- BiblioInfo keys;
- keys.fillWithBibKeys(this);
- BiblioInfo::const_iterator bit = keys.begin();
- BiblioInfo::const_iterator bend = keys.end();
+ BiblioInfo const & keys = masterBibInfo();
+ BiblioInfo::const_iterator bit = keys.begin();
+ BiblioInfo::const_iterator bend = keys.end();
- for (; bit != bend; ++bit)
- // FIXME UNICODE
- labels.push_back(bit->first);
- paramName = "key";
- } else {
- getLabelList(labels);
- paramName = "reference";
- }
+ for (; bit != bend; ++bit)
+ // FIXME UNICODE
+ labels.push_back(bit->first);
+ paramName = "key";
- if (std::count(labels.begin(), labels.end(), from) > 1)
+ if (count(labels.begin(), labels.end(), from) > 1)
return;
for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
void Buffer::getSourceCode(odocstream & os, pit_type par_begin,
- pit_type par_end, bool full_source)
+ pit_type par_end, bool full_source) const
{
OutputParams runparams(¶ms().encoding());
runparams.nice = true;
os << "% " << _("Preview source code") << "\n\n";
d->texrow.newline();
d->texrow.newline();
- if (isLatex())
- writeLaTeXSource(os, filePath(), runparams, true, true);
- else {
+ if (isDocBook())
writeDocBookSource(os, absFileName(), runparams, false);
- }
+ else
+ // latex or literate
+ writeLaTeXSource(os, string(), runparams, true, true);
} else {
runparams.par_begin = par_begin;
runparams.par_end = par_end;
- if (par_begin + 1 == par_end)
+ if (par_begin + 1 == par_end) {
os << "% "
<< bformat(_("Preview source code for paragraph %1$d"), par_begin)
<< "\n\n";
- else
+ } else {
os << "% "
<< bformat(_("Preview source code from paragraph %1$s to %2$s"),
convert<docstring>(par_begin),
convert<docstring>(par_end - 1))
<< "\n\n";
+ }
d->texrow.newline();
d->texrow.newline();
// output paragraphs
- if (isLatex()) {
- latexParagraphs(*this, paragraphs(), os, d->texrow, runparams);
- } else {
- // DocBook
+ if (isDocBook())
docbookParagraphs(paragraphs(), *this, os, runparams);
- }
+ else
+ // latex or literate
+ latexParagraphs(*this, text(), os, d->texrow, runparams);
}
}
ErrorList & Buffer::errorList(string const & type) const
{
static ErrorList emptyErrorList;
- std::map<string, ErrorList>::iterator I = d->errorLists.find(type);
+ map<string, ErrorList>::iterator I = d->errorLists.find(type);
if (I == d->errorLists.end())
return emptyErrorList;
}
+void Buffer::updateTocItem(std::string const & type,
+ DocIterator const & dit) const
+{
+ if (gui_)
+ gui_->updateTocItem(type, dit);
+}
+
+
void Buffer::structureChanged() const
{
if (gui_)
}
-void Buffer::errors(std::string const & err) const
+void Buffer::errors(string const & err) const
{
if (gui_)
gui_->errors(err);
}
+bool Buffer::hasGuiDelegate() const
+{
+ return gui_;
+}
+
+
void Buffer::setGuiDelegate(frontend::GuiBufferDelegate * gui)
{
gui_ = gui;
namespace {
-class AutoSaveBuffer : public support::ForkedProcess {
+class AutoSaveBuffer : public ForkedProcess {
public:
///
AutoSaveBuffer(Buffer const & buffer, FileName const & fname)
///
int start()
{
- command_ = to_utf8(bformat(_("Auto-saving %1$s"),
+ command_ = to_utf8(bformat(_("Auto-saving %1$s"),
from_utf8(fname_.absFilename())));
return run(DontWait);
}
};
-#if !defined (HAVE_FORK)
-# define fork() -1
-#endif
-
int AutoSaveBuffer::generateChild()
{
// tmp_ret will be located (usually) in /tmp
// will that be a problem?
+ // Note that this calls ForkedCalls::fork(), so it's
+ // ok cross-platform.
pid_t const pid = fork();
// If you want to debug the autosave
// you should set pid to -1, and comment out the fork.
- if (pid == 0 || pid == -1) {
- // pid = -1 signifies that lyx was unable
- // to fork. But we will do the save
- // anyway.
- bool failed = false;
-
- FileName const tmp_ret(tempName(FileName(), "lyxauto"));
- if (!tmp_ret.empty()) {
- buffer_.writeFile(tmp_ret);
- // assume successful write of tmp_ret
- if (!rename(tmp_ret, fname_)) {
- failed = true;
- // most likely couldn't move between
- // filesystems unless write of tmp_ret
- // failed so remove tmp file (if it
- // exists)
- tmp_ret.removeFile();
- }
- } else {
+ if (pid != 0 && pid != -1)
+ return pid;
+
+ // pid = -1 signifies that lyx was unable
+ // to fork. But we will do the save
+ // anyway.
+ bool failed = false;
+ FileName const tmp_ret = FileName::tempName("lyxauto");
+ if (!tmp_ret.empty()) {
+ buffer_.writeFile(tmp_ret);
+ // assume successful write of tmp_ret
+ if (!tmp_ret.moveTo(fname_))
failed = true;
- }
-
- if (failed) {
- // failed to write/rename tmp_ret so try writing direct
- if (!buffer_.writeFile(fname_)) {
- // It is dangerous to do this in the child,
- // but safe in the parent, so...
- if (pid == -1) // emit message signal.
- buffer_.message(_("Autosave failed!"));
- }
- }
- if (pid == 0) { // we are the child so...
- _exit(0);
+ } else
+ failed = true;
+
+ if (failed) {
+ // failed to write/rename tmp_ret so try writing direct
+ if (!buffer_.writeFile(fname_)) {
+ // It is dangerous to do this in the child,
+ // but safe in the parent, so...
+ if (pid == -1) // emit message signal.
+ buffer_.message(_("Autosave failed!"));
}
}
+
+ if (pid == 0) // we are the child so...
+ _exit(0);
+
return pid;
}
}
-void Buffer::resetChildDocuments(bool close_them) const
-{
- for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
- if (it->lyxCode() != INCLUDE_CODE)
- continue;
- InsetCommand const & inset = static_cast<InsetCommand const &>(*it);
- InsetCommandParams const & ip = inset.params();
-
- resetParentBuffer(this, ip, close_them);
- }
-
- if (use_gui && masterBuffer() == this)
- updateLabels(*this);
-}
-
-
-void Buffer::loadChildDocuments() const
-{
- bool parse_error = false;
-
- for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
- if (it->lyxCode() != INCLUDE_CODE)
- continue;
- InsetCommand const & inset = static_cast<InsetCommand const &>(*it);
- InsetCommandParams const & ip = inset.params();
- Buffer * child = loadIfNeeded(*this, ip);
- if (!child)
- continue;
- parse_error |= !child->errorList("Parse").empty();
- child->loadChildDocuments();
- }
-
- if (use_gui && masterBuffer() == this)
- updateLabels(*this);
-}
-
-
string Buffer::bufferFormat() const
{
if (isDocBook())
return "docbook";
if (isLiterate())
return "literate";
+ if (params().encoding().package() == Encoding::japanese)
+ return "platex";
return "latex";
}
filename = changeExtension(filename,
formats.extension(backend_format));
+ // fix macros
+ updateMacroInstances();
+
// Plain text backend
if (backend_format == "text")
writePlaintextFile(*this, FileName(filename), runparams);
if (!makeLaTeXFile(FileName(filename), string(), runparams))
return false;
} else if (!lyxrc.tex_allows_spaces
- && support::contains(filePath(), ' ')) {
+ && contains(filePath(), ' ')) {
Alert::error(_("File name error"),
_("The directory path to the document cannot contain spaces."));
return false;
string const error_type = (format == "program")
? "Build" : bufferFormat();
+ ErrorList & error_list = d->errorLists[error_type];
string const ext = formats.extension(format);
FileName const tmp_result_file(changeExtension(filename, ext));
bool const success = theConverters().convert(this, FileName(filename),
tmp_result_file, FileName(absFileName()), backend_format, format,
- errorList(error_type));
+ error_list);
// Emit the signal to show the error list.
if (format != backend_format)
errors(error_type);
if (!success)
return false;
- if (put_in_tempdir)
+ if (put_in_tempdir) {
result_file = tmp_result_file.absFilename();
- else {
- result_file = changeExtension(absFileName(), ext);
- // We need to copy referenced files (e. g. included graphics
- // if format == "dvi") to the result dir.
- vector<ExportedFile> const files =
- runparams.exportdata->externalFiles(format);
- string const dest = onlyPath(result_file);
- CopyStatus status = SUCCESS;
- for (vector<ExportedFile>::const_iterator it = files.begin();
- it != files.end() && status != CANCEL; ++it) {
- string const fmt =
- formats.getFormatFromFile(it->sourceName);
- status = copyFile(fmt, it->sourceName,
- makeAbsPath(it->exportName, dest),
- it->exportName, status == FORCE);
- }
- if (status == CANCEL) {
- message(_("Document export cancelled."));
- } else if (tmp_result_file.exists()) {
- // Finally copy the main file
- status = copyFile(format, tmp_result_file,
- FileName(result_file), result_file,
- status == FORCE);
- message(bformat(_("Document exported as %1$s "
- "to file `%2$s'"),
- formats.prettyName(format),
- makeDisplayPath(result_file)));
- } else {
- // This must be a dummy converter like fax (bug 1888)
- message(bformat(_("Document exported as %1$s"),
- formats.prettyName(format)));
- }
+ return true;
+ }
+
+ result_file = changeExtension(absFileName(), ext);
+ // We need to copy referenced files (e. g. included graphics
+ // if format == "dvi") to the result dir.
+ vector<ExportedFile> const files =
+ runparams.exportdata->externalFiles(format);
+ string const dest = onlyPath(result_file);
+ CopyStatus status = SUCCESS;
+ for (vector<ExportedFile>::const_iterator it = files.begin();
+ it != files.end() && status != CANCEL; ++it) {
+ string const fmt = formats.getFormatFromFile(it->sourceName);
+ status = copyFile(fmt, it->sourceName,
+ makeAbsPath(it->exportName, dest),
+ it->exportName, status == FORCE);
+ }
+ if (status == CANCEL) {
+ message(_("Document export cancelled."));
+ } else if (tmp_result_file.exists()) {
+ // Finally copy the main file
+ status = copyFile(format, tmp_result_file,
+ FileName(result_file), result_file,
+ status == FORCE);
+ message(bformat(_("Document exported as %1$s "
+ "to file `%2$s'"),
+ formats.prettyName(format),
+ makeDisplayPath(result_file)));
+ } else {
+ // This must be a dummy converter like fax (bug 1888)
+ message(bformat(_("Document exported as %1$s"),
+ formats.prettyName(format)));
}
return true;
vector<string> Buffer::backends() const
{
vector<string> v;
- if (params().getTextClass().isTeXClassAvailable()) {
+ if (params().baseClass()->isTeXClassAvailable()) {
v.push_back(bufferFormat());
// FIXME: Don't hardcode format names here, but use a flag
if (v.back() == "latex")
}
}
+
+void Buffer::setBuffersForInsets() const
+{
+ inset().setBuffer(const_cast<Buffer &>(*this));
+}
+
+
+void Buffer::updateLabels(UpdateScope scope) const
+{
+ // Use the master text class also for child documents
+ Buffer const * const master = masterBuffer();
+ DocumentClass const & textclass = master->params().documentClass();
+
+ // keep the buffers to be children in this set. If the call from the
+ // master comes back we can see which of them were actually seen (i.e.
+ // via an InsetInclude). The remaining ones in the set need still be updated.
+ static std::set<Buffer const *> bufToUpdate;
+ if (scope == UpdateMaster) {
+ // If this is a child document start with the master
+ if (master != this) {
+ bufToUpdate.insert(this);
+ master->updateLabels();
+ // Do this here in case the master has no gui associated with it. Then,
+ // the TocModel is not updated and TocModel::toc_ is invalid (bug 5699).
+ if (!master->gui_)
+ structureChanged();
+
+ // was buf referenced from the master (i.e. not in bufToUpdate anymore)?
+ if (bufToUpdate.find(this) == bufToUpdate.end())
+ return;
+ }
+
+ // start over the counters in the master
+ textclass.counters().reset();
+ }
+
+ // update will be done below for this buffer
+ bufToUpdate.erase(this);
+
+ // update all caches
+ clearReferenceCache();
+ updateMacros();
+
+ Buffer & cbuf = const_cast<Buffer &>(*this);
+
+ LASSERT(!text().paragraphs().empty(), /**/);
+
+ // do the real work
+ ParIterator parit = cbuf.par_iterator_begin();
+ updateLabels(parit);
+
+ if (master != this)
+ // TocBackend update will be done later.
+ return;
+
+ cbuf.tocBackend().update();
+ if (scope == UpdateMaster)
+ cbuf.structureChanged();
+}
+
+
+static depth_type getDepth(DocIterator const & it)
+{
+ depth_type depth = 0;
+ for (size_t i = 0 ; i < it.depth() ; ++i)
+ if (!it[i].inset().inMathed())
+ depth += it[i].paragraph().getDepth() + 1;
+ // remove 1 since the outer inset does not count
+ return depth - 1;
+}
+
+static depth_type getItemDepth(ParIterator const & it)
+{
+ Paragraph const & par = *it;
+ LabelType const labeltype = par.layout().labeltype;
+
+ if (labeltype != LABEL_ENUMERATE && labeltype != LABEL_ITEMIZE)
+ return 0;
+
+ // this will hold the lowest depth encountered up to now.
+ depth_type min_depth = getDepth(it);
+ ParIterator prev_it = it;
+ while (true) {
+ if (prev_it.pit())
+ --prev_it.top().pit();
+ else {
+ // start of nested inset: go to outer par
+ prev_it.pop_back();
+ if (prev_it.empty()) {
+ // start of document: nothing to do
+ return 0;
+ }
+ }
+
+ // We search for the first paragraph with same label
+ // that is not more deeply nested.
+ Paragraph & prev_par = *prev_it;
+ depth_type const prev_depth = getDepth(prev_it);
+ if (labeltype == prev_par.layout().labeltype) {
+ if (prev_depth < min_depth)
+ return prev_par.itemdepth + 1;
+ if (prev_depth == min_depth)
+ return prev_par.itemdepth;
+ }
+ min_depth = min(min_depth, prev_depth);
+ // small optimization: if we are at depth 0, we won't
+ // find anything else
+ if (prev_depth == 0)
+ return 0;
+ }
+}
+
+
+static bool needEnumCounterReset(ParIterator const & it)
+{
+ Paragraph const & par = *it;
+ LASSERT(par.layout().labeltype == LABEL_ENUMERATE, /**/);
+ depth_type const cur_depth = par.getDepth();
+ ParIterator prev_it = it;
+ while (prev_it.pit()) {
+ --prev_it.top().pit();
+ Paragraph const & prev_par = *prev_it;
+ if (prev_par.getDepth() <= cur_depth)
+ return prev_par.layout().labeltype != LABEL_ENUMERATE;
+ }
+ // start of nested inset: reset
+ return true;
+}
+
+
+// set the label of a paragraph. This includes the counters.
+static void setLabel(Buffer const & buf, ParIterator & it)
+{
+ BufferParams const & bp = buf.masterBuffer()->params();
+ DocumentClass const & textclass = bp.documentClass();
+ Paragraph & par = it.paragraph();
+ Layout const & layout = par.layout();
+ Counters & counters = textclass.counters();
+
+ if (par.params().startOfAppendix()) {
+ // FIXME: only the counter corresponding to toplevel
+ // sectionning should be reset
+ counters.reset();
+ counters.appendix(true);
+ }
+ par.params().appendix(counters.appendix());
+
+ // Compute the item depth of the paragraph
+ par.itemdepth = getItemDepth(it);
+
+ if (layout.margintype == MARGIN_MANUAL) {
+ if (par.params().labelWidthString().empty())
+ par.params().labelWidthString(par.translateIfPossible(layout.labelstring(), bp));
+ } else {
+ par.params().labelWidthString(docstring());
+ }
+
+ switch(layout.labeltype) {
+ case LABEL_COUNTER:
+ if (layout.toclevel <= bp.secnumdepth
+ && (layout.latextype != LATEX_ENVIRONMENT
+ || isFirstInSequence(it.pit(), it.plist()))) {
+ counters.step(layout.counter);
+ par.params().labelString(
+ par.expandLabel(layout, bp));
+ } else
+ par.params().labelString(docstring());
+ break;
+
+ case LABEL_ITEMIZE: {
+ // At some point of time we should do something more
+ // clever here, like:
+ // par.params().labelString(
+ // bp.user_defined_bullet(par.itemdepth).getText());
+ // for now, use a simple hardcoded label
+ docstring itemlabel;
+ switch (par.itemdepth) {
+ case 0:
+ itemlabel = char_type(0x2022);
+ break;
+ case 1:
+ itemlabel = char_type(0x2013);
+ break;
+ case 2:
+ itemlabel = char_type(0x2217);
+ break;
+ case 3:
+ itemlabel = char_type(0x2219); // or 0x00b7
+ break;
+ }
+ par.params().labelString(itemlabel);
+ break;
+ }
+
+ case LABEL_ENUMERATE: {
+ // FIXME: Yes I know this is a really, really! bad solution
+ // (Lgb)
+ docstring enumcounter = from_ascii("enum");
+
+ switch (par.itemdepth) {
+ case 2:
+ enumcounter += 'i';
+ case 1:
+ enumcounter += 'i';
+ case 0:
+ enumcounter += 'i';
+ break;
+ case 3:
+ enumcounter += "iv";
+ break;
+ default:
+ // not a valid enumdepth...
+ break;
+ }
+
+ // Maybe we have to reset the enumeration counter.
+ if (needEnumCounterReset(it))
+ counters.reset(enumcounter);
+
+ counters.step(enumcounter);
+
+ string format;
+
+ switch (par.itemdepth) {
+ case 0:
+ format = N_("\\arabic{enumi}.");
+ break;
+ case 1:
+ format = N_("(\\alph{enumii})");
+ break;
+ case 2:
+ format = N_("\\roman{enumiii}.");
+ break;
+ case 3:
+ format = N_("\\Alph{enumiv}.");
+ break;
+ default:
+ // not a valid enumdepth...
+ break;
+ }
+
+ par.params().labelString(counters.counterLabel(
+ par.translateIfPossible(from_ascii(format), bp)));
+
+ break;
+ }
+
+ case LABEL_SENSITIVE: {
+ string const & type = counters.current_float();
+ docstring full_label;
+ if (type.empty())
+ full_label = buf.B_("Senseless!!! ");
+ else {
+ docstring name = buf.B_(textclass.floats().getType(type).name());
+ if (counters.hasCounter(from_utf8(type))) {
+ counters.step(from_utf8(type));
+ full_label = bformat(from_ascii("%1$s %2$s:"),
+ name,
+ counters.theCounter(from_utf8(type)));
+ } else
+ full_label = bformat(from_ascii("%1$s #:"), name);
+ }
+ par.params().labelString(full_label);
+ break;
+ }
+
+ case LABEL_NO_LABEL:
+ par.params().labelString(docstring());
+ break;
+
+ case LABEL_MANUAL:
+ case LABEL_TOP_ENVIRONMENT:
+ case LABEL_CENTERED_TOP_ENVIRONMENT:
+ case LABEL_STATIC:
+ case LABEL_BIBLIO:
+ par.params().labelString(
+ par.translateIfPossible(layout.labelstring(), bp));
+ break;
+ }
+}
+
+
+void Buffer::updateLabels(ParIterator & parit) const
+{
+ LASSERT(parit.pit() == 0, /**/);
+
+ // set the position of the text in the buffer to be able
+ // to resolve macros in it. This has nothing to do with
+ // labels, but by putting it here we avoid implementing
+ // a whole bunch of traversal routines just for this call.
+ parit.text()->setMacrocontextPosition(parit);
+
+ depth_type maxdepth = 0;
+ pit_type const lastpit = parit.lastpit();
+ for ( ; parit.pit() <= lastpit ; ++parit.pit()) {
+ // reduce depth if necessary
+ parit->params().depth(min(parit->params().depth(), maxdepth));
+ maxdepth = parit->getMaxDepthAfter();
+
+ // set the counter for this paragraph
+ setLabel(*this, parit);
+
+ // Now the insets
+ InsetList::const_iterator iit = parit->insetList().begin();
+ InsetList::const_iterator end = parit->insetList().end();
+ for (; iit != end; ++iit) {
+ parit.pos() = iit->pos;
+ iit->inset->updateLabels(parit);
+ }
+ }
+}
+
} // namespace lyx