]> git.lyx.org Git - lyx.git/blobdiff - src/Buffer.cpp
Remove LyXToolBox
[lyx.git] / src / Buffer.cpp
index e04c06b2ccc8d20007e83020c4647c7816ebdcd5..7641741076553a6b9a3131f0bddf1acdeb12a44d 100644 (file)
@@ -79,6 +79,7 @@
 #include "mathed/MathMacroTemplate.h"
 #include "mathed/MathSupport.h"
 
+#include "graphics/GraphicsCache.h"
 #include "graphics/PreviewLoader.h"
 
 #include "frontends/alert.h"
@@ -277,7 +278,7 @@ public:
 
        /// A cache for the bibfiles (including bibfiles of loaded child
        /// documents), needed for appropriate update of natbib labels.
-       mutable support::FileNameList bibfiles_cache_;
+       mutable support::FileNamePairList bibfiles_cache_;
 
        // FIXME The caching mechanism could be improved. At present, we have a
        // cache for each Buffer, that caches all the bibliography info for that
@@ -420,9 +421,9 @@ Buffer::Impl::Impl(Buffer * owner, FileName const & file, bool readonly_,
          file_fully_loaded(false), file_format(LYX_FORMAT), need_format_backup(false),
          ignore_parent(false),  toc_backend(owner), macro_lock(false), timestamp_(0),
          checksum_(0), wa_(0),  gui_(0), undo_(*owner), bibinfo_cache_valid_(false),
-         bibfile_cache_valid_(false), cite_labels_valid_(false), inset(0),
-         preview_loader_(0), cloned_buffer_(cloned_buffer), clone_list_(0),
-         doing_export(false), parent_buffer(0),
+         bibfile_cache_valid_(false), cite_labels_valid_(false), preview_error_(false),
+         inset(0), preview_loader_(0), cloned_buffer_(cloned_buffer),
+         clone_list_(0), doing_export(false), parent_buffer(0),
          word_count_(0), char_count_(0), blank_count_(0)
 {
        if (!cloned_buffer_) {
@@ -909,8 +910,12 @@ int Buffer::readHeader(Lexer & lex)
        params().html_latex_end.clear();
        params().html_math_img_scale = 1.0;
        params().output_sync_macro.erase();
-       params().setLocalLayout(string(), false);
-       params().setLocalLayout(string(), true);
+       params().setLocalLayout(docstring(), false);
+       params().setLocalLayout(docstring(), true);
+       params().biblio_opts.erase();
+       params().biblatex_bibstyle.erase();
+       params().biblatex_citestyle.erase();
+       params().multibib.erase();
 
        for (int i = 0; i < 4; ++i) {
                params().user_defined_bullet(i) = ITEMIZE_DEFAULTS[i];
@@ -949,15 +954,13 @@ int Buffer::readHeader(Lexer & lex)
                                                                        "%1$s %2$s\n"),
                                                         from_utf8(token),
                                                         lex.getDocString());
-                               errorList.push_back(ErrorItem(_("Document header error"),
-                                       s, -1, 0, 0));
+                               errorList.push_back(ErrorItem(_("Document header error"), s));
                        }
                }
        }
        if (begin_header_line) {
                docstring const s = _("\\begin_header is missing");
-               errorList.push_back(ErrorItem(_("Document header error"),
-                       s, -1, 0, 0));
+               errorList.push_back(ErrorItem(_("Document header error"), s));
        }
 
        params().makeDocumentClass();
@@ -979,8 +982,7 @@ bool Buffer::readDocument(Lexer & lex)
 
        if (!lex.checkFor("\\begin_document")) {
                docstring const s = _("\\begin_document is missing");
-               errorList.push_back(ErrorItem(_("Document header error"),
-                       s, -1, 0, 0));
+               errorList.push_back(ErrorItem(_("Document header error"), s));
        }
 
        readHeader(lex);
@@ -1707,17 +1709,17 @@ bool Buffer::makeLaTeXFile(FileName const & fname,
                errorList.push_back(ErrorItem(msg, _("Some characters of your document are probably not "
                                "representable in the chosen encoding.\n"
                                "Changing the document encoding to utf8 could help."),
-                               e.par_id, e.pos, e.pos + 1));
+                                             {e.par_id, e.pos}, {e.par_id, e.pos + 1}));
                failed_export = true;
        }
        catch (iconv_codecvt_facet_exception const & e) {
                errorList.push_back(ErrorItem(_("iconv conversion failed"),
-                       _(e.what()), -1, 0, 0));
+                                             _(e.what())));
                failed_export = true;
        }
        catch (exception const & e) {
                errorList.push_back(ErrorItem(_("conversion failed"),
-                       _(e.what()), -1, 0, 0));
+                                             _(e.what())));
                failed_export = true;
        }
        catch (...) {
@@ -1878,6 +1880,55 @@ void Buffer::writeLaTeXSource(otexstream & os,
                runparams.use_babel = params().writeLaTeX(os, features,
                                                          d->filename.onlyPath());
 
+               // Biblatex bibliographies are loaded here
+               if (params().useBiblatex()) {
+                       vector<docstring> const bibfiles =
+                               prepareBibFilePaths(runparams, getBibfilesCache(), true);
+                       for (docstring const & file: bibfiles)
+                               os << "\\addbibresource{" << file << "}\n";
+               }
+
+               if (!runparams.dryrun && features.hasPolyglossiaExclusiveLanguages()
+                   && !features.hasOnlyPolyglossiaLanguages()) {
+                       docstring blangs;
+                       docstring plangs;
+                       vector<string> bll = features.getBabelExclusiveLanguages();
+                       vector<string> pll = features.getPolyglossiaExclusiveLanguages();
+                       if (!bll.empty()) {
+                               docstring langs;
+                               for (vector<string>::const_iterator it = bll.begin(); it != bll.end(); ++it) {
+                                       if (!langs.empty())
+                                               langs += ", ";
+                                       langs += _(*it);
+                               }
+                               blangs = bll.size() > 1 ?
+                                           support::bformat(_("The languages %1$s are only supported by Babel."), langs)
+                                         : support::bformat(_("The language %1$s is only supported by Babel."), langs);
+                       }
+                       if (!pll.empty()) {
+                               docstring langs;
+                               for (vector<string>::const_iterator it = pll.begin(); it != pll.end(); ++it) {
+                                       if (!langs.empty())
+                                               langs += ", ";
+                                       langs += _(*it);
+                               }
+                               plangs = pll.size() > 1 ?
+                                           support::bformat(_("The languages %1$s are only supported by Polyglossia."), langs)
+                                         : support::bformat(_("The language %1$s is only supported by Polyglossia."), langs);
+                               if (!blangs.empty())
+                                       plangs += "\n"; 
+                       }
+
+                       frontend::Alert::warning(
+                               _("Incompatible Languages!"),
+                               support::bformat(
+                                 _("You cannot use the following languages "
+                                   "together in one LaTeX document because "
+                                   "they require conflicting language packages:\n"
+                                   "%1$s%2$s"),
+                                 plangs, blangs));
+               }
+
                // Japanese might be required only in some children of a document,
                // but once required, we must keep use_japanese true.
                runparams.use_japanese |= features.isRequired("japanese");
@@ -1890,8 +1941,14 @@ void Buffer::writeLaTeXSource(otexstream & os,
                }
 
                // make the body.
+               // mark the beginning of the body to separate it from InPreamble insets
+               os.texrow().start(TexRow::beginDocument());
                os << "\\begin{document}\n";
 
+               // mark the start of a new paragraph by simulating a newline,
+               // so that os.afterParbreak() returns true at document start
+               os.lastChar('\n');
+
                // output the parent macros
                MacroSet::iterator it = parentMacros.begin();
                MacroSet::iterator end = parentMacros.end();
@@ -1902,8 +1959,6 @@ void Buffer::writeLaTeXSource(otexstream & os,
 
        } // output_preamble
 
-       os.texrow().start(paragraphs().begin()->id(), 0);
-
        LYXERR(Debug::INFO, "preamble finished, now the body.");
 
        // the real stuff
@@ -1924,8 +1979,6 @@ void Buffer::writeLaTeXSource(otexstream & os,
        }
        runparams_in.encoding = runparams.encoding;
 
-       os.texrow().finalize();
-
        LYXERR(Debug::INFO, "Finished making LaTeX file.");
        LYXERR(Debug::INFO, "Row count was " << os.texrow().rows() - 1 << '.');
 }
@@ -1961,7 +2014,7 @@ void Buffer::writeDocBookSource(odocstream & os, string const & fname,
        LaTeXFeatures features(*this, params(), runparams);
        validate(features);
 
-       d->texrow.reset(false);
+       d->texrow.reset();
 
        DocumentClass const & tclass = params().documentClass();
        string const & top_element = tclass.latexname();
@@ -1982,12 +2035,12 @@ void Buffer::writeDocBookSource(odocstream & os, string const & fname,
                if (! tclass.class_header().empty())
                        os << from_ascii(tclass.class_header());
                else if (runparams.flavor == OutputParams::XML)
-                       os << "PUBLIC \"-//OASIS//DTD DocBook XML//EN\" "
+                       os << "PUBLIC \"-//OASIS//DTD DocBook XML V4.2//EN\" "
                            << "\"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd\"";
                else
                        os << " PUBLIC \"-//OASIS//DTD DocBook V4.2//EN\"";
 
-               docstring preamble = from_utf8(params().preamble);
+               docstring preamble = params().preamble;
                if (runparams.flavor != OutputParams::XML ) {
                        preamble += "<!ENTITY % output.print.png \"IGNORE\">\n";
                        preamble += "<!ENTITY % output.print.pdf \"IGNORE\">\n";
@@ -2090,14 +2143,14 @@ void Buffer::writeLyXHTMLSource(odocstream & os,
                if (!styles.empty())
                        os << "\n<!-- Text Class Preamble -->\n" << styles << '\n';
 
-               styles = from_utf8(features.getPreambleSnippets());
+               styles = features.getPreambleSnippets().str;
                if (!styles.empty())
                        os << "\n<!-- Preamble Snippets -->\n" << styles << '\n';
 
                // we will collect CSS information in a stream, and then output it
                // either here, as part of the header, or else in a separate file.
                odocstringstream css;
-               styles = from_utf8(features.getCSSSnippets());
+               styles = features.getCSSSnippets();
                if (!styles.empty())
                        css << "/* LyX Provided Styles */\n" << styles << '\n';
 
@@ -2261,7 +2314,7 @@ void Buffer::updateBibfilesCache(UpdateScope scope) const
        for (InsetIterator it = inset_iterator_begin(inset()); it; ++it) {
                if (it->lyxCode() == BIBTEX_CODE) {
                        InsetBibtex const & inset = static_cast<InsetBibtex const &>(*it);
-                       support::FileNameList const bibfiles = inset.getBibFiles();
+                       support::FileNamePairList const bibfiles = inset.getBibFiles();
                        d->bibfiles_cache_.insert(d->bibfiles_cache_.end(),
                                bibfiles.begin(),
                                bibfiles.end());
@@ -2270,7 +2323,7 @@ void Buffer::updateBibfilesCache(UpdateScope scope) const
                        Buffer const * const incbuf = inset.getChildBuffer();
                        if (!incbuf)
                                continue;
-                       support::FileNameList const & bibfiles =
+                       support::FileNamePairList const & bibfiles =
                                        incbuf->getBibfilesCache(UpdateChildOnly);
                        if (!bibfiles.empty()) {
                                d->bibfiles_cache_.insert(d->bibfiles_cache_.end(),
@@ -2308,7 +2361,7 @@ void Buffer::invalidateBibfileCache() const
 }
 
 
-support::FileNameList const & Buffer::getBibfilesCache(UpdateScope scope) const
+support::FileNamePairList const & Buffer::getBibfilesCache(UpdateScope scope) const
 {
        // FIXME This is probably unnecessary, given where we call this.
        // If this is a child document, use the master's cache instead.
@@ -2342,16 +2395,17 @@ void Buffer::checkIfBibInfoCacheIsValid() const
        }
 
        // compare the cached timestamps with the actual ones.
-       FileNameList const & bibfiles_cache = getBibfilesCache();
-       FileNameList::const_iterator ei = bibfiles_cache.begin();
-       FileNameList::const_iterator en = bibfiles_cache.end();
+       FileNamePairList const & bibfiles_cache = getBibfilesCache();
+       FileNamePairList::const_iterator ei = bibfiles_cache.begin();
+       FileNamePairList::const_iterator en = bibfiles_cache.end();
        for (; ei != en; ++ ei) {
-               time_t lastw = ei->lastModified();
-               time_t prevw = d->bibfile_status_[*ei];
+               FileName const fn = ei->second;
+               time_t lastw = fn.lastModified();
+               time_t prevw = d->bibfile_status_[fn];
                if (lastw != prevw) {
                        d->bibinfo_cache_valid_ = false;
                        d->cite_labels_valid_ = false;
-                       d->bibfile_status_[*ei] = lastw;
+                       d->bibfile_status_[fn] = lastw;
                }
        }
 }
@@ -2483,7 +2537,7 @@ bool Buffer::getStatus(FuncRequest const & cmd, FuncStatus & flag)
                size_t pos = format.find(' ');
                if (pos != string::npos)
                        format = format.substr(0, pos);
-               enable = params().isExportable(format);
+               enable = params().isExportable(format, false);
                if (!enable)
                        flag.message(bformat(
                                             _("Don't know how to export to format: %1$s"), arg));
@@ -2495,7 +2549,7 @@ bool Buffer::getStatus(FuncRequest const & cmd, FuncStatus & flag)
                break;
 
        case LFUN_BUILD_PROGRAM:
-               enable = params().isExportable("program");
+               enable = params().isExportable("program", false);
                break;
 
        case LFUN_BRANCH_ACTIVATE:
@@ -2877,11 +2931,9 @@ void Buffer::getLanguages(std::set<Language const *> & languages) const
 DocIterator Buffer::getParFromID(int const id) const
 {
        Buffer * buf = const_cast<Buffer *>(this);
-       if (id < 0) {
-               // John says this is called with id == -1 from undo
-               lyxerr << "getParFromID(), id: " << id << endl;
+       if (id < 0)
+               // This means non-existent
                return doc_iterator_end(buf);
-       }
 
        for (DocIterator it = doc_iterator_begin(buf); !it.atEnd(); it.forwardPar())
                if (it.paragraph().id() == id)
@@ -3064,6 +3116,114 @@ DocFileName Buffer::getReferencedFileName(string const & fn) const
 }
 
 
+string const Buffer::prepareFileNameForLaTeX(string const & name,
+                                            string const & ext, bool nice) const
+{
+       string const fname = makeAbsPath(name, filePath()).absFileName();
+       if (FileName::isAbsolute(name) || !FileName(fname + ext).isReadableFile())
+               return name;
+       if (!nice)
+               return fname;
+
+       // FIXME UNICODE
+       return to_utf8(makeRelPath(from_utf8(fname),
+               from_utf8(masterBuffer()->filePath())));
+}
+
+
+vector<docstring> const Buffer::prepareBibFilePaths(OutputParams const & runparams,
+                                               FileNamePairList const bibfilelist,
+                                               bool const add_extension) const
+{
+       // If we are processing the LaTeX file in a temp directory then
+       // copy the .bib databases to this temp directory, mangling their
+       // names in the process. Store this mangled name in the list of
+       // all databases.
+       // (We need to do all this because BibTeX *really*, *really*
+       // can't handle "files with spaces" and Windows users tend to
+       // use such filenames.)
+       // Otherwise, store the (maybe absolute) path to the original,
+       // unmangled database name.
+
+       vector<docstring> res;
+
+       // determine the export format
+       string const tex_format = flavor2format(runparams.flavor);
+
+       // check for spaces in paths
+       bool found_space = false;
+
+       FileNamePairList::const_iterator it = bibfilelist.begin();
+       FileNamePairList::const_iterator en = bibfilelist.end();
+       for (; it != en; ++it) {
+               string utf8input = to_utf8(it->first);
+               string database =
+                       prepareFileNameForLaTeX(utf8input, ".bib", runparams.nice);
+               FileName const try_in_file =
+                       makeAbsPath(database + ".bib", filePath());
+               bool const not_from_texmf = try_in_file.isReadableFile();
+
+               if (!runparams.inComment && !runparams.dryrun && !runparams.nice &&
+                   not_from_texmf) {
+                       // mangledFileName() needs the extension
+                       DocFileName const in_file = DocFileName(try_in_file);
+                       database = removeExtension(in_file.mangledFileName());
+                       FileName const out_file = makeAbsPath(database + ".bib",
+                                       masterBuffer()->temppath());
+                       bool const success = in_file.copyTo(out_file);
+                       if (!success) {
+                               LYXERR0("Failed to copy '" << in_file
+                                      << "' to '" << out_file << "'");
+                       }
+               } else if (!runparams.inComment && runparams.nice && not_from_texmf) {
+                       runparams.exportdata->addExternalFile(tex_format, try_in_file, database + ".bib");
+                       if (!isValidLaTeXFileName(database)) {
+                               frontend::Alert::warning(_("Invalid filename"),
+                                        _("The following filename will cause troubles "
+                                              "when running the exported file through LaTeX: ") +
+                                            from_utf8(database));
+                       }
+                       if (!isValidDVIFileName(database)) {
+                               frontend::Alert::warning(_("Problematic filename for DVI"),
+                                        _("The following filename can cause troubles "
+                                              "when running the exported file through LaTeX "
+                                                  "and opening the resulting DVI: ") +
+                                            from_utf8(database), true);
+                       }
+               }
+
+               if (add_extension)
+                       database += ".bib";
+
+               // FIXME UNICODE
+               docstring const path = from_utf8(latex_path(database));
+
+               if (contains(path, ' '))
+                       found_space = true;
+
+               if (find(res.begin(), res.end(), path) == res.end())
+                       res.push_back(path);
+       }
+
+       // Check if there are spaces in the path and warn BibTeX users, if so.
+       // (biber can cope with such paths)
+       if (!prefixIs(runparams.bibtex_command, "biber")) {
+               // Post this warning only once.
+               static bool warned_about_spaces = false;
+               if (!warned_about_spaces &&
+                   runparams.nice && found_space) {
+                       warned_about_spaces = true;
+                       Alert::warning(_("Export Warning!"),
+                                      _("There are spaces in the paths to your BibTeX databases.\n"
+                                                     "BibTeX will be unable to find them."));
+               }
+       }
+
+       return res;
+}
+
+
+
 string Buffer::layoutPos() const
 {
        return d->layout_position;
@@ -3517,7 +3677,7 @@ void Buffer::updateMacroInstances(UpdateType utype) const
                MacroContext mc = MacroContext(this, it);
                for (DocIterator::idx_type i = 0; i < n; ++i) {
                        MathData & data = minset->cell(i);
-                       data.updateMacros(0, mc, utype);
+                       data.updateMacros(0, mc, utype, 0);
                }
        }
 }
@@ -3715,7 +3875,7 @@ unique_ptr<TexRow> Buffer::getSourceCode(odocstream & os, string const & format,
                        // in order to know if we should output polyglossia
                        // macros (instead of babel macros)
                        LaTeXFeatures features(*this, params(), runparams);
-                       params().validate(features);
+                       validate(features);
                        runparams.use_polyglossia = features.usePolyglossia();
                        // latex or literate
                        otexstream ots(os);
@@ -3724,7 +3884,6 @@ unique_ptr<TexRow> Buffer::getSourceCode(odocstream & os, string const & format,
                        // the real stuff
                        latexParagraphs(*this, text(), ots, runparams);
                        texrow = ots.releaseTexRow();
-                       texrow->finalize();
 
                        // Restore the parenthood
                        if (!master)
@@ -3766,7 +3925,6 @@ unique_ptr<TexRow> Buffer::getSourceCode(odocstream & os, string const & format,
                                runparams.is_child = true;
                        writeLaTeXSource(ots, string(), runparams, output);
                        texrow = ots.releaseTexRow();
-                       texrow->finalize();
                }
        }
        return texrow;
@@ -4482,56 +4640,33 @@ Buffer::ReadStatus Buffer::loadThisLyXFile(FileName const & fn)
 
 void Buffer::bufferErrors(TeXErrors const & terr, ErrorList & errorList) const
 {
-       TeXErrors::Errors::const_iterator it = terr.begin();
-       TeXErrors::Errors::const_iterator end = terr.end();
-       ListOfBuffers clist = getDescendents();
-       ListOfBuffers::const_iterator cen = clist.end();
-
-       for (; it != end; ++it) {
-               int id_start = -1;
-               int pos_start = -1;
-               int errorRow = it->error_in_line;
+       for (auto const & err : terr) {
+               TexRow::TextEntry start, end = TexRow::text_none;
+               int errorRow = err.error_in_line;
                Buffer const * buf = 0;
                Impl const * p = d;
-               if (it->child_name.empty())
-                   p->texrow.getIdFromRow(errorRow, id_start, pos_start);
+               if (err.child_name.empty())
+                       tie(start, end) = p->texrow.getEntriesFromRow(errorRow);
                else {
                        // The error occurred in a child
-                       ListOfBuffers::const_iterator cit = clist.begin();
-                       for (; cit != cen; ++cit) {
+                       for (Buffer const * child : getDescendents()) {
                                string const child_name =
-                                       DocFileName(changeExtension(
-                                               (*cit)->absFileName(), "tex")).
-                                                       mangledFileName();
-                               if (it->child_name != child_name)
+                                       DocFileName(changeExtension(child->absFileName(), "tex")).
+                                       mangledFileName();
+                               if (err.child_name != child_name)
                                        continue;
-                               (*cit)->d->texrow.getIdFromRow(errorRow,
-                                                       id_start, pos_start);
-                               if (id_start != -1) {
+                               tie(start, end) = child->d->texrow.getEntriesFromRow(errorRow);
+                               if (!TexRow::isNone(start)) {
                                        buf = d->cloned_buffer_
-                                               ? (*cit)->d->cloned_buffer_->d->owner_
-                                               : (*cit)->d->owner_;
-                                       p = (*cit)->d;
+                                               ? child->d->cloned_buffer_->d->owner_
+                                               : child->d->owner_;
+                                       p = child->d;
                                        break;
                                }
                        }
                }
-               int id_end = -1;
-               int pos_end = -1;
-               bool found;
-               do {
-                       ++errorRow;
-                       found = p->texrow.getIdFromRow(errorRow, id_end, pos_end);
-               } while (found && id_start == id_end && pos_start == pos_end);
-
-               if (id_start != id_end) {
-                       // Next registered position is outside the inset where
-                       // the error occurred, so signal end-of-paragraph
-                       pos_end = 0;
-               }
-
-               errorList.push_back(ErrorItem(it->error_desc,
-                       it->error_text, id_start, pos_start, pos_end, buf));
+               errorList.push_back(ErrorItem(err.error_desc, err.error_text,
+                                             start, end, buf));
        }
 }
 
@@ -4599,14 +4734,16 @@ void Buffer::updateBuffer(UpdateScope scope, UpdateType utype) const
        ParIterator parit = cbuf.par_iterator_begin();
        updateBuffer(parit, utype);
 
+       /// FIXME: Perf
+       /// Update the tocBackend for any buffer. The outliner uses the master's,
+       /// and the navigation menu uses the child's.
+       cbuf.tocBackend().update(true, utype);
+
        if (master != this)
-               // TocBackend update will be done later.
                return;
 
        d->bibinfo_cache_valid_ = true;
        d->cite_labels_valid_ = true;
-       /// FIXME: Perf
-       cbuf.tocBackend().update(true, utype);
        if (scope == UpdateMaster)
                cbuf.structureChanged();
 }
@@ -4679,7 +4816,7 @@ static bool needEnumCounterReset(ParIterator const & it)
                --prev_it.top().pit();
                Paragraph const & prev_par = *prev_it;
                if (prev_par.getDepth() <= cur_depth)
-                       return  prev_par.layout().labeltype != LABEL_ENUMERATE;
+                       return prev_par.layout().name() != par.layout().name();
        }
        // start of nested inset: reset
        return true;
@@ -4763,8 +4900,12 @@ void Buffer::Impl::setLabel(ParIterator & it, UpdateType utype) const
                        break;
                }
 
+               // Increase the master counter?
+               if (layout.stepmastercounter && needEnumCounterReset(it))
+                       counters.stepMaster(enumcounter, utype);
+
                // Maybe we have to reset the enumeration counter.
-               if (needEnumCounterReset(it))
+               if (!layout.resumecounter && needEnumCounterReset(it))
                        counters.reset(enumcounter);
                counters.step(enumcounter, utype);