aboutsummaryrefslogtreecommitdiff
#include "derivations.hh"
#include "store-api.hh"
#include "globals.hh"
#include "util.hh"
#include "misc.hh"


namespace nix {


void DerivationOutput::parseHashInfo(bool & recursive, HashType & hashType, Hash & hash) const
{
    recursive = false;
    string algo = hashAlgo;

    if (string(algo, 0, 2) == "r:") {
        recursive = true;
        algo = string(algo, 2);
    }

    hashType = parseHashType(algo);
    if (hashType == htUnknown)
        throw Error(format("unknown hash algorithm `%1%'") % algo);

    hash = parseHash(hashType, this->hash);
}


Path writeDerivation(StoreAPI & store,
    const Derivation & drv, const string & name, bool repair)
{
    PathSet references;
    references.insert(drv.inputSrcs.begin(), drv.inputSrcs.end());
    foreach (DerivationInputs::const_iterator, i, drv.inputDrvs)
        references.insert(i->first);
    /* Note that the outputs of a derivation are *not* references
       (that can be missing (of course) and should not necessarily be
       held during a garbage collection). */
    string suffix = name + drvExtension;
    string contents = unparseDerivation(drv);
    return settings.readOnlyMode
        ? computeStorePathForText(suffix, contents, references)
        : store.addTextToStore(suffix, contents, references, repair);
}


static Path parsePath(std::istream & str)
{
    string s = parseString(str);
    if (s.size() == 0 || s[0] != '/')
        throw FormatError(format("bad path `%1%' in derivation") % s);
    return s;
}


static StringSet parseStrings(std::istream & str, bool arePaths)
{
    StringSet res;
    while (!endOfList(str))
        res.insert(arePaths ? parsePath(str) : parseString(str));
    return res;
}


static Derivation parseDerivation(const string & s)
{
    Derivation drv;
    std::istringstream str(s);
    expect(str, "Derive([");

    /* Parse the list of outputs. */
    while (!endOfList(str)) {
        DerivationOutput out;
        expect(str, "("); string id = parseString(str);
        expect(str, ","); out.path = parsePath(str);
        expect(str, ","); out.hashAlgo = parseString(str);
        expect(str, ","); out.hash = parseString(str);
        expect(str, ")");
        drv.outputs[id] = out;
    }

    /* Parse the list of input derivations. */
    expect(str, ",[");
    while (!endOfList(str)) {
        expect(str, "(");
        Path drvPath = parsePath(str);
        expect(str, ",[");
        drv.inputDrvs[drvPath] = parseStrings(str, false);
        expect(str, ")");
    }

    expect(str, ",["); drv.inputSrcs = parseStrings(str, true);
    expect(str, ","); drv.platform = parseString(str);
    expect(str, ","); drv.builder = parseString(str);

    /* Parse the builder arguments. */
    expect(str, ",[");
    while (!endOfList(str))
        drv.args.push_back(parseString(str));

    /* Parse the environment variables. */
    expect(str, ",[");
    while (!endOfList(str)) {
        expect(str, "("); string name = parseString(str);
        expect(str, ","); string value = parseString(str);
        expect(str, ")");
        drv.env[name] = value;
    }

    expect(str, ")");
    return drv;
}


Derivation readDerivation(const Path & drvPath)
{
    try {
        return parseDerivation(readFile(drvPath));
    } catch (FormatError & e) {
        throw Error(format("error parsing derivation `%1%': %2%") % drvPath % e.msg());
    }
}


static void printString(string & res, const string & s)
{
    res += '"';
    for (const char * i = s.c_str(); *i; i++)
        if (*i == '\"' || *i == '\\') { res += "\\"; res += *i; }
        else if (*i == '\n') res += "\\n";
        else if (*i == '\r') res += "\\r";
        else if (*i == '\t') res += "\\t";
        else res += *i;
    res += '"';
}


template<class ForwardIterator>
static void printStrings(string & res, ForwardIterator i, ForwardIterator j)
{
    res += '[';
    bool first = true;
    for ( ; i != j; ++i) {
        if (first) first = false; else res += ',';
        printString(res, *i);
    }
    res += ']';
}


string unparseDerivation(const Derivation & drv)
{
    string s;
    s.reserve(65536);
    s += "Derive([";

    bool first = true;
    foreach (DerivationOutputs::const_iterator, i, drv.outputs) {
        if (first) first = false; else s += ',';
        s += '('; printString(s, i->first);
        s += ','; printString(s, i->second.path);
        s += ','; printString(s, i->second.hashAlgo);
        s += ','; printString(s, i->second.hash);
        s += ')';
    }

    s += "],[";
    first = true;
    foreach (DerivationInputs::const_iterator, i, drv.inputDrvs) {
        if (first) first = false; else s += ',';
        s += '('; printString(s, i->first);
        s += ','; printStrings(s, i->second.begin(), i->second.end());
        s += ')';
    }

    s += "],";
    printStrings(s, drv.inputSrcs.begin(), drv.inputSrcs.end());

    s += ','; printString(s, drv.platform);
    s += ','; printString(s, drv.builder);
    s += ','; printStrings(s, drv.args.begin(), drv.args.end());

    s += ",[";
    first = true;
    foreach (StringPairs::const_iterator, i, drv.env) {
        if (first) first = false; else s += ',';
        s += '('; printString(s, i->first);
        s += ','; printString(s, i->second);
        s += ')';
    }

    s += "])";

    return s;
}


bool isDerivation(const string & fileName)
{
    return hasSuffix(fileName, drvExtension);
}


bool isFixedOutputDrv(const Derivation & drv)
{
    return drv.outputs.size() == 1 &&
        drv.outputs.begin()->first == "out" &&
        drv.outputs.begin()->second.hash != "";
}


DrvHashes drvHashes;


/* Returns the hash of a derivation modulo fixed-output
   subderivations.  A fixed-output derivation is a derivation with one
   output (`out') for which an expected hash and hash algorithm are
   specified (using the `outputHash' and `outputHashAlgo'
   attributes).  We don't want changes to such derivations to
   propagate upwards through the dependency graph, changing output
   paths everywhere.

   For instance, if we change the url in a call to the `fetchurl'
   function, we do not want to rebuild everything depending on it
   (after all, (the hash of) the file being downloaded is unchanged).
   So the *output paths* should not change.  On the other hand, the
   *derivation paths* should change to reflect the new dependency
   graph.

   That's what this function does: it returns a hash which is just the
   hash of the derivation ATerm, except that any input derivation
   paths have been replaced by the result of a recursive call to this
   function, and that for fixed-output derivations we return a hash of
   its output path. */
Hash hashDerivationModulo(StoreAPI & store, Derivation drv)
{
    /* Return a fixed hash for fixed-output derivations. */
    if (isFixedOutputDrv(drv)) {
        DerivationOutputs::const_iterator i = drv.outputs.begin();
        return hashString(htSHA256, "fixed:out:"
            + i->second.hashAlgo + ":"
            + i->second.hash + ":"
            + i->second.path);
    }

    /* For other derivations, replace the inputs paths with recursive
       calls to this function.*/
    DerivationInputs inputs2;
    foreach (DerivationInputs::const_iterator, i, drv.inputDrvs) {
        Hash h = drvHashes[i->first];
        if (h.type == htUnknown) {
            assert(store.isValidPath(i->first));
            Derivation drv2 = readDerivation(i->first);
            h = hashDerivationModulo(store, drv2);
            drvHashes[i->first] = h;
        }
        inputs2[printHash(h)] = i->second;
    }
    drv.inputDrvs = inputs2;

    return hashString(htSHA256, unparseDerivation(drv));
}


DrvPathWithOutputs parseDrvPathWithOutputs(const string & s)
{
    size_t n = s.find("!");
    return n == s.npos
        ? DrvPathWithOutputs(s, std::set<string>())
        : DrvPathWithOutputs(string(s, 0, n), tokenizeString<std::set<string> >(string(s, n + 1), ","));
}


Path makeDrvPathWithOutputs(const Path & drvPath, const std::set<string> & outputs)
{
    return outputs.empty()
        ? drvPath
        : drvPath + "!" + concatStringsSep(",", outputs);
}


bool wantOutput(const string & output, const std::set<string> & wanted)
{
    return wanted.empty() || wanted.find(output) != wanted.end();
}


PathSet outputPaths(const Derivation & drv)
{
    PathSet paths;
    for (auto & i : drv.outputs)
        paths.insert(i.second.path);
    return paths;
}


}
tests python)) ;for creating the documentation (inputs (list bash-minimal)) (arguments ;; Force cmake to use iconv header from cross-libc instead of the one ;; from native libc. (list #:configure-flags (if (%current-target-system) #~(list (string-append "-DICONV_INCLUDE_DIR=" (assoc-ref %build-inputs "cross-libc") "/include")) #~'()) #:test-target "tests" #:phases #~(modify-phases %standard-phases (add-after 'unpack 'disable-bibtex-test (lambda _ ;; Disable test that requires bibtex to avoid a ;; circular dependency. (for-each delete-file-recursively '("testing/012" "testing/012_cite.dox")))) (add-before 'configure 'patch-sh (lambda* (#:key inputs #:allow-other-keys) (let ((/bin/sh (search-input-file inputs "/bin/sh"))) (substitute* "src/portable.cpp" (("/bin/sh") /bin/sh))))) #$@(if (target-hurd?) #~((add-after 'unpack 'apply-patch (lambda _ (let ((patch-file #$(local-file (search-patch "doxygen-hurd.patch")))) (invoke "patch" "--force" "-p1" "-i" patch-file))))) #~())))) (synopsis "Generate documentation from annotated sources") (description "Doxygen is the de facto standard tool for generating documentation from annotated C++ sources, but it also supports other popular programming languages such as C, Objective-C, C#, PHP, Java, Python, IDL (Corba, Microsoft, and UNO/OpenOffice flavors), Fortran, VHDL, Tcl, and to some extent D.") (license license:gpl3+))) (define-public halibut (package (name "halibut") (version "1.3") (source (origin (method url-fetch) (uri (string-append "https://www.chiark.greenend.org.uk/~sgtatham/halibut/halibut-" version "/halibut-" version ".tar.gz")) (sha256 (base32 "0ciikn878vivs4ayvwvr63nnhpcg12m8023xv514zxqpdxlzg85a")))) (build-system cmake-build-system) (arguments '(#:tests? #f)) ;No tests. (native-inputs (list pkg-config perl)) (home-page "https://www.chiark.greenend.org.uk/~sgtatham/halibut/") (synopsis "Documentation production system for software manuals") (description "Halibut is a text formatting system designed primarily for writing software documentation. It accepts a single source format and outputs any combination of plain text, HTML, Unix man or info pages, PostScript or PDF. It has extensive support for indexing and cross-referencing, and generates hyperlinks within output documents wherever possible. It supports Unicode, with the ability to fall back to an alternative representation if Unicode output is not available.") (license license:expat))) (define-public doc++ (package (name "doc++") (version "3.4.10") (source (origin (method url-fetch) (uri (string-append "https://sourceforge.net/projects/docpp/" "files/doc++-" version ".tar.gz")) (sha256 (base32 "0i37zlxl8g352s4hzpdx0657k5x3czh3xcsfr27irc708gb277pn")) (patches (search-patches "doc++-include-directives.patch" "doc++-segfault-fix.patch")))) (build-system gnu-build-system) (native-inputs (list flex gettext-minimal)) (home-page "https://docpp.sourceforge.net") (synopsis "Documentation system for C, C++, IDL, and Java") (description "DOC++ is a documentation system for C, C++, IDL, and Java. It can generate both TeX output for high-quality hardcopies or HTML output for online browsing. The documentation is extracted directly from the C/C++/IDL source or Java class files.") (license license:gpl2+))) (define-public pod2pdf (package (name "pod2pdf") (version "0.42") (source (origin (method url-fetch) (uri (string-append "mirror://cpan/authors/id/J/JO/JONALLEN/pod2pdf-" version ".tar.gz")) (sha256 (base32 "0w5p7yy01vph74nfr9qzjb18p1avmhhcpza0qz9r88fmb0blbiyv")))) (build-system perl-build-system) (propagated-inputs (list perl-getopt-argvfile perl-pdf-api2 perl-pod-parser)) (home-page "https://metacpan.org/release/pod2pdf") (synopsis "Convert Pod to PDF format") (description "pod2pdf converts documents written in Perl's @acronym{POD, Plain Old Documentation} format to PDF files. It also supports some extensions to the POD format, and supports the file types JPG, GIF, TIFF, PNG, and PNM for embedded objects.") (license license:artistic2.0))) (define-public python-docrepr (package (name "python-docrepr") (version "0.2.0") (source (origin (method git-fetch) (uri (git-reference (url "https://github.com/spyder-ide/docrepr") (commit (string-append "v" version)))) (file-name (git-file-name name version)) (sha256 (base32 "1ma5gwy93m1djd3zdlnqfrwhgr8ic1qbsz5kkrb9f987ax40lfkd")) (patches (search-patches "python-docrepr-fix-tests.patch")))) (build-system python-build-system) (arguments (list #:phases #~(modify-phases %standard-phases (add-after 'unpack 'patch-sources (lambda _ ;; XXX: This fixes an issue where shutil.copytree would fail ;; merging directories with same files copied by Sphinx from the ;; store (hence read-only, throwing a Permission denied error). ;; In the case this happens, it falls back to a manual copy ;; routine that omits overwriting same-named files (see: ;; https://github.com/spyder-ide/docrepr/issues/54). (substitute* "docrepr/utils.py" (("except TypeError") "except (TypeError, shutil.Error)")))) (replace 'check (lambda* (#:key tests? #:allow-other-keys) (when tests? (invoke "pytest" "-p" "no:warnings" "-vv"))))))) (native-inputs (list python-ipython python-matplotlib python-numpy python-pytest python-pytest-asyncio)) (propagated-inputs (list python-docutils python-jinja2 python-matplotlib python-sphinx)) (home-page "https://github.com/spyder-ide/docrepr/") (synopsis "Python docstrings to HTML renderer") (description "Docrepr renders Python docstrings to HTML with Sphinx. It can generate rich and plain representations of docstrings, alongside additional metadata about the object to which the docstring belongs.") (license license:bsd-3))) (define-public scrollkeeper (package (name "scrollkeeper") (version "0.3.14") (source (origin (method url-fetch) (uri (string-append "mirror://sourceforge/scrollkeeper/scrollkeeper/" version "/scrollkeeper-" version ".tar.gz")) (sha256 (base32 "1bfxwxc1ngh11v36z899sz9qam366r050fhkyb5adv65lb1x62sa")))) (build-system gnu-build-system) (arguments `(#:configure-flags (list (string-append "--with-xml-catalog=" (assoc-ref %build-inputs "docbook-xml") "/xml/dtd/docbook/catalog.xml")))) (inputs (list perl libxml2 libxslt ;; The configure script checks for either version 4.2 or 4.1.2. docbook-xml-4.2)) (native-inputs (list intltool)) (home-page "https://scrollkeeper.sourceforge.net/") (synopsis "Open Documentation Cataloging Project") (description "ScrollKeeper is a cataloging system for documentation. It manages documentation metadata as specified by the Open Source Metadata Framework and provides a simple API to allow help browsers to find, sort, and search the document catalog. It will also be able to communicate with catalog servers on the Net to search for documents which are not on the local system.") (license license:lgpl2.1+))) (define-public zeal (package (name "zeal") (version "0.7.1") (source (origin (method git-fetch) (uri (git-reference (url "https://github.com/zealdocs/zeal") (commit (string-append "v" version)))) (file-name (git-file-name name version)) (sha256 (base32 "1yz9zz18rh1d67w40ib4pna70vqkwa9i9nyj423rjysv5rdj2pzp")))) (build-system qt-build-system) (arguments (list #:tests? #f)) ;no tests (native-inputs (list extra-cmake-modules pkg-config)) (inputs (list bash-minimal libarchive sqlite qtbase-5 qtdeclarative-5 qtwebchannel-5 qtwebengine-5 qtquickcontrols-5 qtx11extras xcb-util-keysyms)) (home-page "https://zealdocs.org/") (synopsis "Offline documentation browser inspired by Dash") (description "Zeal is a simple offline documentation browser inspired by Dash.") (license license:gpl3+))) (define-public markdeep (package (name "markdeep") (version "1.16") (home-page "https://casual-effects.com/markdeep/") (source (origin (method git-fetch) (uri (git-reference (url "https://github.com/morgan3d/markdeep") (commit (string-append "v0" version)))) (file-name (git-file-name name version)) (sha256 (base32 "05bvw3993xh1260ckclwk4jw38hvgiff0b2940ryhbhz0p1k41l8")))) (build-system copy-build-system) (arguments (list #:modules '((guix build utils) (guix build copy-build-system) (ice-9 popen)) #:install-plan ''(("." "/share/markdeep/")) #:phases #~(modify-phases %standard-phases (add-after 'unpack 'patch-urls (lambda _ (for-each (lambda (filename) (substitute* filename ;; Don't include a reference to the remote version. (("<script src=\"https://casual-effects\\.com/\ markdeep/latest/markdeep\\.min\\.js\\?\"></script>") "") (("MATHJAX_URL = .*$") ;; Use our local copy of mathjax (string-append "MATHJAX_URL = 'file://" #$js-mathjax "/share/javascript/es5/tex-mml-chtml.js'")))) (find-files "." (lambda (file stat) (string-suffix? ".js" file)))))) (add-before 'install 'minify (lambda _ (for-each (lambda (filename) (let ((minified-filename (string-append (string-drop-right filename 3) ".min.js"))) (format #t "~a -> ~a~%" filename minified-filename) (let ((minified (open-pipe* OPEN_READ "uglifyjs" filename))) (call-with-output-file minified-filename (lambda (port) (dump-port minified port))) (let ((exit (close-pipe minified))) (unless (zero? exit) (error "uglifyjs failed" exit)))))) (find-files "latest" (lambda (path stat) (and (string-suffix? ".js" path) (not (string-suffix? ".min.js" path))))))))))) (inputs (list js-mathjax)) (native-inputs (list uglifyjs)) (synopsis "Tool for displaying markdown documents in a web-browser") (description "Markdeep is a technology for writing plain text documents that can be displayed in any web browser, whether local or remote. It supports diagrams, calendars, equations, and other features as extensions of Markdown syntax.") (license license:bsd-2))) (define-public stddoc (let ((commit "6eef9deaf2e36bae812f50e448a8012b3e5efb14") (revision "1")) (package (name "stddoc") (version (git-version "1.0.2" revision commit)) (home-page "https://github.com/r-lyeh/stddoc.c") (source (origin (method git-fetch) (uri (git-reference (url home-page) (commit commit))) (file-name (git-file-name name version)) (sha256 (base32 "06phjp7wbf4x1sagxwfapgv6iyiixmijxxbg2clb48kyvjg5mlwn")) (snippet #~(delete-file "stddoc.c.html")))) (build-system gnu-build-system) (arguments (list #:tests? #f ; no tests #:phases #~(modify-phases %standard-phases (replace 'configure (lambda _ (substitute* "stddoc.c" ;; Note: For some reason quote characters are being ;; inserted into urls and tags, eg. ;; 'https://morgan3d.github.io/m""arkdeep/latest/markdeep.min.js?' (("https://casual-effects.com/m\"*arkdeep/latest/") (string-append #$markdeep "/share/markdeep/latest/")) (("https://morgan3d.github.io/m\"*arkdeep/latest/") (string-append #$markdeep "/share/markdeep/latest/"))))) (replace 'build (lambda _ (invoke #$(cc-for-target) "-O2" "-g" "-o" "stddoc" "stddoc.c") (with-input-from-file "stddoc.c" (lambda _ (with-output-to-file "stddoc.c.html" (lambda _ (invoke #$(if (%current-target-system) "stddoc" "./stddoc")))))))) (replace 'install (lambda _ (install-file "stddoc" (string-append #$output "/bin")) (install-file "stddoc.c.html" (string-append #$output "/share/doc"))))))) (native-inputs (if (%current-target-system) (list this-package) '())) (synopsis "Documentation generator for multiple programming languages") (description "@code{stddoc.c} is a tiny documentation generator with many supported programming languages. Markdeep code comments are extracted from stdin and printed into stdout as a HTML file.") (license license:unlicense))))