diff --git a/Makefile.config.in b/Makefile.config.in
index b632444e8abe6143a8c7e368b8bf36984b9cafca..5c245b8e99b3bbfd3c9b742f60feb3d6b6502fd4 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -19,6 +19,7 @@ LIBLZMA_LIBS = @LIBLZMA_LIBS@
 OPENSSL_LIBS = @OPENSSL_LIBS@
 PACKAGE_NAME = @PACKAGE_NAME@
 PACKAGE_VERSION = @PACKAGE_VERSION@
+SHELL = @bash@
 SODIUM_LIBS = @SODIUM_LIBS@
 SQLITE3_LIBS = @SQLITE3_LIBS@
 bash = @bash@
diff --git a/README.md b/README.md
index a1588284dcfb388ac937e59541fad8702f90bbad..e5f7a694f49c8618a13678b89ca0f0e1b44d2a29 100644
--- a/README.md
+++ b/README.md
@@ -12,7 +12,7 @@ for more details.
 On Linux and macOS the easiest way to Install Nix is to run the following shell command
 (as a user other than root):
 
-```
+```console
 $ curl -L https://nixos.org/nix/install | sh
 ```
 
@@ -20,27 +20,8 @@ Information on additional installation methods is available on the [Nix download
 
 ## Building And Developing
 
-### Building Nix
-
-You can build Nix using one of the targets provided by [release.nix](./release.nix):
-
-```
-$ nix-build ./release.nix -A build.aarch64-linux
-$ nix-build ./release.nix -A build.x86_64-darwin
-$ nix-build ./release.nix -A build.i686-linux
-$ nix-build ./release.nix -A build.x86_64-linux
-```
-
-### Development Environment
-
-You can use the provided `shell.nix` to get a working development environment:
-
-```
-$ nix-shell
-$ ./bootstrap.sh
-$ ./configure
-$ make
-```
+See our [Hacking guide](hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual#chap-hacking) in our manual for instruction on how to
+build nix from source with nix-build or how to get a development environment.
 
 ## Additional Resources
 
diff --git a/doc/manual/hacking.xml b/doc/manual/hacking.xml
index b671811d3a302b30775e1d135e9ff7475c7b836c..d25d4b84abce60ec335cf280b975392fc163e59b 100644
--- a/doc/manual/hacking.xml
+++ b/doc/manual/hacking.xml
@@ -4,18 +4,37 @@
 
 <title>Hacking</title>
 
-<para>This section provides some notes on how to hack on Nix.  To get
+<para>This section provides some notes on how to hack on Nix. To get
 the latest version of Nix from GitHub:
 <screen>
-$ git clone git://github.com/NixOS/nix.git
+$ git clone https://github.com/NixOS/nix.git
 $ cd nix
 </screen>
 </para>
 
-<para>To build it and its dependencies:
+<para>To build Nix for the current operating system/architecture use
+
+<screen>
+$ nix-build
+</screen>
+
+or if you have a flakes-enabled nix:
+
+<screen>
+$ nix build
+</screen>
+
+This will build <literal>defaultPackage</literal> attribute defined in the <literal>flake.nix</literal> file.
+
+To build for other platforms add one of the following suffixes to it: aarch64-linux,
+i686-linux, x86_64-darwin, x86_64-linux.
+
+i.e.
+
 <screen>
-$ nix-build release.nix -A build.x86_64-linux
+nix-build -A defaultPackage.x86_64-linux
 </screen>
+
 </para>
 
 <para>To build all dependencies and start a shell in which all
@@ -27,13 +46,27 @@ $ nix-shell
 To build Nix itself in this shell:
 <screen>
 [nix-shell]$ ./bootstrap.sh
-[nix-shell]$ configurePhase
-[nix-shell]$ make
+[nix-shell]$ ./configure $configureFlags
+[nix-shell]$ make -j $NIX_BUILD_CORES
 </screen>
 To install it in <literal>$(pwd)/inst</literal> and test it:
 <screen>
 [nix-shell]$ make install
 [nix-shell]$ make installcheck
+[nix-shell]$ ./inst/bin/nix --version
+nix (Nix) 2.4
+</screen>
+
+If you have a flakes-enabled nix you can replace:
+
+<screen>
+$ nix-shell
+</screen>
+
+by:
+
+<screen>
+$ nix develop
 </screen>
 
 </para>
diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh
index 5824c22178871d0a62b111f8e4c2092fdebf6f50..6fb0beb2b7bf93a27014551f571f1076749a6d28 100644
--- a/scripts/install-nix-from-closure.sh
+++ b/scripts/install-nix-from-closure.sh
@@ -207,7 +207,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
         if [ -w "$fn" ]; then
             if ! grep -q "$p" "$fn"; then
                 echo "modifying $fn..." >&2
-                echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
+                echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
             fi
             added=1
             break
@@ -218,7 +218,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
         if [ -w "$fn" ]; then
             if ! grep -q "$p" "$fn"; then
                 echo "modifying $fn..." >&2
-                echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
+                echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
             fi
             added=1
             break
diff --git a/shell.nix b/shell.nix
new file mode 100644
index 0000000000000000000000000000000000000000..330df0ab6f7f84abc7f16915d48646f21e9860e5
--- /dev/null
+++ b/shell.nix
@@ -0,0 +1,3 @@
+(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
+  src = ./.;
+}).shellNix
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 701546671f6883b2426a8874680487ca961e1b4b..6363446f6caad535aba82f2f95f53fea3a8cdf80 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -102,56 +102,61 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
             percentDecode(std::string(match[6])));
     }
 
-    /* Check if 'url' is a path (either absolute or relative to
-       'baseDir'). If so, search upward to the root of the repo
-       (i.e. the directory containing .git). */
-
     else if (std::regex_match(url, match, pathUrlRegex)) {
         std::string path = match[1];
-        if (!baseDir && !hasPrefix(path, "/"))
-            throw BadURL("flake reference '%s' is not an absolute path", url);
-        path = absPath(path, baseDir, true);
-
-        if (!S_ISDIR(lstat(path).st_mode))
-            throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
-
-        if (!allowMissing && !pathExists(path + "/flake.nix"))
-            throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
-
-        auto fragment = percentDecode(std::string(match[3]));
-
-        auto flakeRoot = path;
-        std::string subdir;
-
-        while (flakeRoot != "/") {
-            if (pathExists(flakeRoot + "/.git")) {
-                auto base = std::string("git+file://") + flakeRoot;
-
-                auto parsedURL = ParsedURL{
-                    .url = base, // FIXME
-                    .base = base,
-                    .scheme = "git+file",
-                    .authority = "",
-                    .path = flakeRoot,
-                    .query = decodeQuery(match[2]),
-                };
-
-                if (subdir != "") {
-                    if (parsedURL.query.count("dir"))
-                        throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
-                    parsedURL.query.insert_or_assign("dir", subdir);
-                }
+        std::string fragment = percentDecode(std::string(match[3]));
+
+        if (baseDir) {
+            /* Check if 'url' is a path (either absolute or relative
+               to 'baseDir'). If so, search upward to the root of the
+               repo (i.e. the directory containing .git). */
+
+            path = absPath(path, baseDir, true);
+
+            if (!S_ISDIR(lstat(path).st_mode))
+                throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
+
+            if (!allowMissing && !pathExists(path + "/flake.nix"))
+                throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
 
-                if (pathExists(flakeRoot + "/.git/shallow"))
-                    parsedURL.query.insert_or_assign("shallow", "1");
+            auto flakeRoot = path;
+            std::string subdir;
+
+            while (flakeRoot != "/") {
+                if (pathExists(flakeRoot + "/.git")) {
+                    auto base = std::string("git+file://") + flakeRoot;
+
+                    auto parsedURL = ParsedURL{
+                        .url = base, // FIXME
+                        .base = base,
+                        .scheme = "git+file",
+                        .authority = "",
+                        .path = flakeRoot,
+                        .query = decodeQuery(match[2]),
+                    };
+
+                    if (subdir != "") {
+                        if (parsedURL.query.count("dir"))
+                            throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
+                        parsedURL.query.insert_or_assign("dir", subdir);
+                    }
+
+                    if (pathExists(flakeRoot + "/.git/shallow"))
+                        parsedURL.query.insert_or_assign("shallow", "1");
+
+                    return std::make_pair(
+                        FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
+                        fragment);
+                }
 
-                return std::make_pair(
-                    FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
-                    fragment);
+                subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
+                flakeRoot = dirOf(flakeRoot);
             }
 
-            subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
-            flakeRoot = dirOf(flakeRoot);
+        } else {
+            if (!hasPrefix(path, "/"))
+                throw BadURL("flake reference '%s' is not an absolute path", url);
+            path = canonPath(path);
         }
 
         fetchers::Attrs attrs;
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index ac2e675740d3a4cc32a4f30645ca629ea0ca6c4f..62294a08cdd0ae61f36ba9f06f190939832e6687 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -2774,7 +2774,7 @@ struct RestrictedStore : public LocalFSStore
         goal.addDependency(info.path);
     }
 
-    StorePath addToStoreFromDump(const string & dump, const string & name,
+    StorePath addToStoreFromDump(Source & dump, const string & name,
         FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
     {
         auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index db7139374c1f8c75e896085b6eec47e078a95d27..7e16529a5c26625c07b573c9099a5713b89fa56d 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -173,31 +173,6 @@ struct TunnelSource : BufferedSource
     }
 };
 
-/* If the NAR archive contains a single file at top-level, then save
-   the contents of the file to `s'.  Otherwise barf. */
-struct RetrieveRegularNARSink : ParseSink
-{
-    bool regular;
-    string s;
-
-    RetrieveRegularNARSink() : regular(true) { }
-
-    void createDirectory(const Path & path)
-    {
-        regular = false;
-    }
-
-    void receiveContents(unsigned char * data, unsigned int len)
-    {
-        s.append((const char *) data, len);
-    }
-
-    void createSymlink(const Path & path, const string & target)
-    {
-        regular = false;
-    }
-};
-
 struct ClientSettings
 {
     bool keepFailed;
@@ -375,25 +350,28 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
     }
 
     case wopAddToStore: {
-        std::string s, baseName;
+        HashType hashAlgo;
+        std::string baseName;
         FileIngestionMethod method;
         {
-            bool fixed; uint8_t recursive;
-            from >> baseName >> fixed /* obsolete */ >> recursive >> s;
+            bool fixed;
+            uint8_t recursive;
+            std::string hashAlgoRaw;
+            from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
             if (recursive > (uint8_t) FileIngestionMethod::Recursive)
                 throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
             method = FileIngestionMethod { recursive };
             /* Compatibility hack. */
             if (!fixed) {
-                s = "sha256";
+                hashAlgoRaw = "sha256";
                 method = FileIngestionMethod::Recursive;
             }
+            hashAlgo = parseHashType(hashAlgoRaw);
         }
-        HashType hashAlgo = parseHashType(s);
 
-        StringSink savedNAR;
-        TeeSource savedNARSource(from, savedNAR);
-        RetrieveRegularNARSink savedRegular;
+        StringSink saved;
+        TeeSource savedNARSource(from, saved);
+        RetrieveRegularNARSink savedRegular { saved };
 
         if (method == FileIngestionMethod::Recursive) {
             /* Get the entire NAR dump from the client and save it to
@@ -407,11 +385,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
         logger->startWork();
         if (!savedRegular.regular) throw Error("regular file expected");
 
-        auto path = store->addToStoreFromDump(
-            method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s,
-            baseName,
-            method,
-            hashAlgo);
+        // FIXME: try to stream directly from `from`.
+        StringSource dumpSource { *saved.s };
+        auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
         logger->stopWork();
 
         to << store->printStorePath(path);
@@ -727,15 +703,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
         if (!trusted)
             info.ultimate = false;
 
-        std::string saved;
         std::unique_ptr<Source> source;
         if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
             source = std::make_unique<TunnelSource>(from, to);
         else {
-            TeeParseSink tee(from);
-            parseDump(tee, tee.source);
-            saved = std::move(*tee.saved.s);
-            source = std::make_unique<StringSource>(saved);
+            StringSink saved;
+            TeeSource tee { from, saved };
+            ParseSink ether;
+            parseDump(ether, tee);
+            source = std::make_unique<StringSource>(std::move(*saved.s));
         }
 
         logger->startWork();
diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc
index 082d0f1d15882e7a27db45c31fc0ee26a6afdcc6..b963d64d7aa921ed7ff2619b3ef0c8589b964005 100644
--- a/src/libstore/export-import.cc
+++ b/src/libstore/export-import.cc
@@ -60,8 +60,10 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
         if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'");
 
         /* Extract the NAR from the source. */
-        TeeParseSink tee(source);
-        parseDump(tee, tee.source);
+        StringSink saved;
+        TeeSource tee { source, saved };
+        ParseSink ether;
+        parseDump(ether, tee);
 
         uint32_t magic = readInt(source);
         if (magic != exportMagic)
@@ -77,15 +79,15 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
         if (deriver != "")
             info.deriver = parseStorePath(deriver);
 
-        info.narHash = hashString(htSHA256, *tee.saved.s);
-        info.narSize = tee.saved.s->size();
+        info.narHash = hashString(htSHA256, *saved.s);
+        info.narSize = saved.s->size();
 
         // Ignore optional legacy signature.
         if (readInt(source) == 1)
             readString(source);
 
         // Can't use underlying source, which would have been exhausted
-        auto source = StringSource { *tee.saved.s };
+        auto source = StringSource { *saved.s };
         addToStore(info, source, NoRepair, checkSigs);
 
         res.push_back(info.path);
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index d49d00d6d07297bad2984c7f37ad3a17bda06bef..340fb53066a14b40d7091749688e6de4d1cb3a51 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1033,82 +1033,26 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
 }
 
 
-StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name,
-    FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
-{
-    Hash h = hashString(hashAlgo, dump);
-
-    auto dstPath = makeFixedOutputPath(method, h, name);
-
-    addTempRoot(dstPath);
-
-    if (repair || !isValidPath(dstPath)) {
-
-        /* The first check above is an optimisation to prevent
-           unnecessary lock acquisition. */
-
-        auto realPath = Store::toRealPath(dstPath);
-
-        PathLocks outputLock({realPath});
-
-        if (repair || !isValidPath(dstPath)) {
-
-            deletePath(realPath);
-
-            autoGC();
-
-            if (method == FileIngestionMethod::Recursive) {
-                StringSource source(dump);
-                restorePath(realPath, source);
-            } else
-                writeFile(realPath, dump);
-
-            canonicalisePathMetaData(realPath, -1);
-
-            /* Register the SHA-256 hash of the NAR serialisation of
-               the path in the database.  We may just have computed it
-               above (if called with recursive == true and hashAlgo ==
-               sha256); otherwise, compute it here. */
-            HashResult hash;
-            if (method == FileIngestionMethod::Recursive) {
-                hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
-                hash.second = dump.size();
-            } else
-                hash = hashPath(htSHA256, realPath);
-
-            optimisePath(realPath); // FIXME: combine with hashPath()
-
-            ValidPathInfo info(dstPath);
-            info.narHash = hash.first;
-            info.narSize = hash.second;
-            info.ca = FixedOutputHash { .method = method, .hash = h };
-            registerValidPath(info);
-        }
-
-        outputLock.setDeletion(true);
-    }
-
-    return dstPath;
-}
-
-
 StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
     FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
 {
     Path srcPath(absPath(_srcPath));
+    auto source = sinkToSource([&](Sink & sink) {
+        if (method == FileIngestionMethod::Recursive)
+            dumpPath(srcPath, sink, filter);
+        else
+            readFile(srcPath, sink);
+    });
+    return addToStoreFromDump(*source, name, method, hashAlgo, repair);
+}
 
-    if (method != FileIngestionMethod::Recursive)
-        return addToStoreFromDump(readFile(srcPath), name, method, hashAlgo, repair);
-
-    /* For computing the NAR hash. */
-    auto sha256Sink = std::make_unique<HashSink>(htSHA256);
 
-    /* For computing the store path. In recursive SHA-256 mode, this
-       is the same as the NAR hash, so no need to do it again. */
-    std::unique_ptr<HashSink> hashSink =
-        hashAlgo == htSHA256
-        ? nullptr
-        : std::make_unique<HashSink>(hashAlgo);
+StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
+    FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
+{
+    /* For computing the store path. */
+    auto hashSink = std::make_unique<HashSink>(hashAlgo);
+    TeeSource source { source0, *hashSink };
 
     /* Read the source path into memory, but only if it's up to
        narBufferSize bytes. If it's larger, write it to a temporary
@@ -1116,55 +1060,49 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
        destination store path is already valid, we just delete the
        temporary path. Otherwise, we move it to the destination store
        path. */
-    bool inMemory = true;
-    std::string nar;
-
-    auto source = sinkToSource([&](Sink & sink) {
-
-        LambdaSink sink2([&](const unsigned char * buf, size_t len) {
-            (*sha256Sink)(buf, len);
-            if (hashSink) (*hashSink)(buf, len);
-
-            if (inMemory) {
-                if (nar.size() + len > settings.narBufferSize) {
-                    inMemory = false;
-                    sink << 1;
-                    sink((const unsigned char *) nar.data(), nar.size());
-                    nar.clear();
-                } else {
-                    nar.append((const char *) buf, len);
-                }
-            }
-
-            if (!inMemory) sink(buf, len);
-        });
-
-        dumpPath(srcPath, sink2, filter);
-    });
+    bool inMemory = false;
+
+    std::string dump;
+
+    /* Fill out buffer, and decide whether we are working strictly in
+       memory based on whether we break out because the buffer is full
+       or the original source is empty */
+    while (dump.size() < settings.narBufferSize) {
+        auto oldSize = dump.size();
+        constexpr size_t chunkSize = 1024;
+        auto want = std::min(chunkSize, settings.narBufferSize - oldSize);
+        dump.resize(oldSize + want);
+        auto got = 0;
+        try {
+            got = source.read((uint8_t *) dump.data() + oldSize, want);
+        } catch (EndOfFile &) {
+            inMemory = true;
+            break;
+        }
+        dump.resize(oldSize + got);
+    }
 
     std::unique_ptr<AutoDelete> delTempDir;
     Path tempPath;
 
-    try {
-        /* Wait for the source coroutine to give us some dummy
-           data. This is so that we don't create the temporary
-           directory if the NAR fits in memory. */
-        readInt(*source);
+    if (!inMemory) {
+        /* Drain what we pulled so far, and then keep on pulling */
+        StringSource dumpSource { dump };
+        ChainSource bothSource { dumpSource, source };
 
         auto tempDir = createTempDir(realStoreDir, "add");
         delTempDir = std::make_unique<AutoDelete>(tempDir);
         tempPath = tempDir + "/x";
 
-        restorePath(tempPath, *source);
+        if (method == FileIngestionMethod::Recursive)
+            restorePath(tempPath, bothSource);
+        else
+            writeFile(tempPath, bothSource);
 
-    } catch (EndOfFile &) {
-        if (!inMemory) throw;
-        /* The NAR fits in memory, so we didn't do restorePath(). */
+        dump.clear();
     }
 
-    auto sha256 = sha256Sink->finish();
-
-    Hash hash = hashSink ? hashSink->finish().first : sha256.first;
+    auto [hash, size] = hashSink->finish();
 
     auto dstPath = makeFixedOutputPath(method, hash, name);
 
@@ -1186,22 +1124,34 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
             autoGC();
 
             if (inMemory) {
+                 StringSource dumpSource { dump };
                 /* Restore from the NAR in memory. */
-                StringSource source(nar);
-                restorePath(realPath, source);
+                if (method == FileIngestionMethod::Recursive)
+                    restorePath(realPath, dumpSource);
+                else
+                    writeFile(realPath, dumpSource);
             } else {
                 /* Move the temporary path we restored above. */
                 if (rename(tempPath.c_str(), realPath.c_str()))
                     throw Error("renaming '%s' to '%s'", tempPath, realPath);
             }
 
+            /* For computing the nar hash. In recursive SHA-256 mode, this
+               is the same as the store hash, so no need to do it again. */
+            auto narHash = std::pair { hash, size };
+            if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
+                HashSink narSink { htSHA256 };
+                dumpPath(realPath, narSink);
+                narHash = narSink.finish();
+            }
+
             canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath
 
             optimisePath(realPath);
 
             ValidPathInfo info(dstPath);
-            info.narHash = sha256.first;
-            info.narSize = sha256.second;
+            info.narHash = narHash.first;
+            info.narSize = narHash.second;
             info.ca = FixedOutputHash { .method = method, .hash = hash };
             registerValidPath(info);
         }
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index c0e5d0286825503d475b2bf7e61d708b711e67a8..355c2814f2cc980d57ff86f849c9b7b4139b3047 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -153,7 +153,7 @@ public:
        in `dump', which is either a NAR serialisation (if recursive ==
        true) or simply the contents of a regular file (if recursive ==
        false). */
-    StorePath addToStoreFromDump(const string & dump, const string & name,
+    StorePath addToStoreFromDump(Source & dump, const string & name,
         FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
 
     StorePath addTextToStore(const string & name, const string & s,
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index 6cfe393a45e39b85faf8d913f512202285942a16..6862b42f069664cd74edd245439858eb961f512b 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -12,30 +12,24 @@
 namespace nix {
 
 
-static bool cmpGensByNumber(const Generation & a, const Generation & b)
-{
-    return a.number < b.number;
-}
-
-
 /* Parse a generation name of the format
    `<profilename>-<number>-link'. */
-static int parseName(const string & profileName, const string & name)
+static std::optional<GenerationNumber> parseName(const string & profileName, const string & name)
 {
-    if (string(name, 0, profileName.size() + 1) != profileName + "-") return -1;
+    if (string(name, 0, profileName.size() + 1) != profileName + "-") return {};
     string s = string(name, profileName.size() + 1);
     string::size_type p = s.find("-link");
-    if (p == string::npos) return -1;
-    int n;
+    if (p == string::npos) return {};
+    unsigned int n;
     if (string2Int(string(s, 0, p), n) && n >= 0)
         return n;
     else
-        return -1;
+        return {};
 }
 
 
 
-Generations findGenerations(Path profile, int & curGen)
+std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile)
 {
     Generations gens;
 
@@ -43,30 +37,34 @@ Generations findGenerations(Path profile, int & curGen)
     auto profileName = std::string(baseNameOf(profile));
 
     for (auto & i : readDirectory(profileDir)) {
-        int n;
-        if ((n = parseName(profileName, i.name)) != -1) {
-            Generation gen;
-            gen.path = profileDir + "/" + i.name;
-            gen.number = n;
+        if (auto n = parseName(profileName, i.name)) {
+            auto path = profileDir + "/" + i.name;
             struct stat st;
-            if (lstat(gen.path.c_str(), &st) != 0)
-                throw SysError("statting '%1%'", gen.path);
-            gen.creationTime = st.st_mtime;
-            gens.push_back(gen);
+            if (lstat(path.c_str(), &st) != 0)
+                throw SysError("statting '%1%'", path);
+            gens.push_back({
+                .number = *n,
+                .path = path,
+                .creationTime = st.st_mtime
+            });
         }
     }
 
-    gens.sort(cmpGensByNumber);
+    gens.sort([](const Generation & a, const Generation & b)
+    {
+        return a.number < b.number;
+    });
 
-    curGen = pathExists(profile)
+    return {
+        gens,
+        pathExists(profile)
         ? parseName(profileName, readLink(profile))
-        : -1;
-
-    return gens;
+        : std::nullopt
+    };
 }
 
 
-static void makeName(const Path & profile, unsigned int num,
+static void makeName(const Path & profile, GenerationNumber num,
     Path & outLink)
 {
     Path prefix = (format("%1%-%2%") % profile % num).str();
@@ -78,10 +76,9 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
 {
     /* The new generation number should be higher than old the
        previous ones. */
-    int dummy;
-    Generations gens = findGenerations(profile, dummy);
+    auto [gens, dummy] = findGenerations(profile);
 
-    unsigned int num;
+    GenerationNumber num;
     if (gens.size() > 0) {
         Generation last = gens.back();
 
@@ -121,7 +118,7 @@ static void removeFile(const Path & path)
 }
 
 
-void deleteGeneration(const Path & profile, unsigned int gen)
+void deleteGeneration(const Path & profile, GenerationNumber gen)
 {
     Path generation;
     makeName(profile, gen, generation);
@@ -129,7 +126,7 @@ void deleteGeneration(const Path & profile, unsigned int gen)
 }
 
 
-static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRun)
+static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun)
 {
     if (dryRun)
         printInfo(format("would remove generation %1%") % gen);
@@ -140,31 +137,29 @@ static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRu
 }
 
 
-void deleteGenerations(const Path & profile, const std::set<unsigned int> & gensToDelete, bool dryRun)
+void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun)
 {
     PathLocks lock;
     lockProfile(lock, profile);
 
-    int curGen;
-    Generations gens = findGenerations(profile, curGen);
+    auto [gens, curGen] = findGenerations(profile);
 
-    if (gensToDelete.find(curGen) != gensToDelete.end())
+    if (gensToDelete.count(*curGen))
         throw Error("cannot delete current generation of profile %1%'", profile);
 
     for (auto & i : gens) {
-        if (gensToDelete.find(i.number) == gensToDelete.end()) continue;
+        if (!gensToDelete.count(i.number)) continue;
         deleteGeneration2(profile, i.number, dryRun);
     }
 }
 
-void deleteGenerationsGreaterThan(const Path & profile, int max, bool dryRun)
+void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun)
 {
     PathLocks lock;
     lockProfile(lock, profile);
 
-    int curGen;
     bool fromCurGen = false;
-    Generations gens = findGenerations(profile, curGen);
+    auto [gens, curGen] = findGenerations(profile);
     for (auto i = gens.rbegin(); i != gens.rend(); ++i) {
         if (i->number == curGen) {
             fromCurGen = true;
@@ -186,8 +181,7 @@ void deleteOldGenerations(const Path & profile, bool dryRun)
     PathLocks lock;
     lockProfile(lock, profile);
 
-    int curGen;
-    Generations gens = findGenerations(profile, curGen);
+    auto [gens, curGen] = findGenerations(profile);
 
     for (auto & i : gens)
         if (i.number != curGen)
@@ -200,8 +194,7 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun)
     PathLocks lock;
     lockProfile(lock, profile);
 
-    int curGen;
-    Generations gens = findGenerations(profile, curGen);
+    auto [gens, curGen] = findGenerations(profile);
 
     bool canDelete = false;
     for (auto i = gens.rbegin(); i != gens.rend(); ++i)
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index 78645d8b6a429f7a5ebcd8a0b8b1ae465150d209..abe507f0ed9aed2f9f0fca1baa7421940bfb27aa 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -9,37 +9,32 @@
 namespace nix {
 
 
+typedef unsigned int GenerationNumber;
+
 struct Generation
 {
-    int number;
+    GenerationNumber number;
     Path path;
     time_t creationTime;
-    Generation()
-    {
-        number = -1;
-    }
-    operator bool() const
-    {
-        return number != -1;
-    }
 };
 
-typedef list<Generation> Generations;
+typedef std::list<Generation> Generations;
 
 
 /* Returns the list of currently present generations for the specified
-   profile, sorted by generation number. */
-Generations findGenerations(Path profile, int & curGen);
+   profile, sorted by generation number. Also returns the number of
+   the current generation. */
+std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile);
 
 class LocalFSStore;
 
 Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath);
 
-void deleteGeneration(const Path & profile, unsigned int gen);
+void deleteGeneration(const Path & profile, GenerationNumber gen);
 
-void deleteGenerations(const Path & profile, const std::set<unsigned int> & gensToDelete, bool dryRun);
+void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun);
 
-void deleteGenerationsGreaterThan(const Path & profile, const int max, bool dryRun);
+void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun);
 
 void deleteOldGenerations(const Path & profile, bool dryRun);
 
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 5b9f7904903174816093562b541a079eba5cf0e1..14661722d9023cfc40e265945edfde21fe09e0b3 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -222,20 +222,73 @@ StorePath Store::computeStorePathForText(const string & name, const string & s,
 }
 
 
+/*
+The aim of this function is to compute in one pass the correct ValidPathInfo for
+the files that we are trying to add to the store. To accomplish that in one
+pass, given the different kind of inputs that we can take (normal nar archives,
+nar archives with non SHA-256 hashes, and flat files), we set up a net of sinks
+and aliases. Also, since the dataflow is obfuscated by this, we include here a
+graphviz diagram:
+
+digraph graphname {
+    node [shape=box]
+    fileSource -> narSink
+    narSink [style=dashed]
+    narSink -> unsualHashTee [style = dashed, label = "Recursive && !SHA-256"]
+    narSink -> narHashSink [style = dashed, label = "else"]
+    unsualHashTee -> narHashSink
+    unsualHashTee -> caHashSink
+    fileSource -> parseSink
+    parseSink [style=dashed]
+    parseSink-> fileSink [style = dashed, label = "Flat"]
+    parseSink -> blank [style = dashed, label = "Recursive"]
+    fileSink -> caHashSink
+}
+*/
 ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
     FileIngestionMethod method, HashType hashAlgo,
     std::optional<Hash> expectedCAHash)
 {
-    /* FIXME: inefficient: we're reading/hashing 'tmpFile' three
-       times. */
+    HashSink narHashSink { htSHA256 };
+    HashSink caHashSink { hashAlgo };
+
+    /* Note that fileSink and unusualHashTee must be mutually exclusive, since
+       they both write to caHashSink. Note that that requisite is currently true
+       because the former is only used in the flat case. */
+    RetrieveRegularNARSink fileSink { caHashSink };
+    TeeSink unusualHashTee { narHashSink, caHashSink };
+
+    auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256
+        ? static_cast<Sink &>(unusualHashTee)
+        : narHashSink;
+
+    /* Functionally, this means that fileSource will yield the content of
+       srcPath. The fact that we use scratchpadSink as a temporary buffer here
+       is an implementation detail. */
+    auto fileSource = sinkToSource([&](Sink & scratchpadSink) {
+        dumpPath(srcPath, scratchpadSink);
+    });
 
-    auto [narHash, narSize] = hashPath(htSHA256, srcPath);
+    /* tapped provides the same data as fileSource, but we also write all the
+       information to narSink. */
+    TeeSource tapped { *fileSource, narSink };
 
-    auto hash = method == FileIngestionMethod::Recursive
-        ? hashAlgo == htSHA256
-          ? narHash
-          : hashPath(hashAlgo, srcPath).first
-        : hashFile(hashAlgo, srcPath);
+    ParseSink blank;
+    auto & parseSink = method == FileIngestionMethod::Flat
+        ? fileSink
+        : blank;
+
+    /* The information that flows from tapped (besides being replicated in
+       narSink), is now put in parseSink. */
+    parseDump(parseSink, tapped);
+
+    /* We extract the result of the computation from the sink by calling
+       finish. */
+    auto [narHash, narSize] = narHashSink.finish();
+
+    auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256
+        ? narHash
+        : caHashSink.finish().first;
 
     if (expectedCAHash && expectedCAHash != hash)
         throw Error("hash mismatch for '%s'", srcPath);
@@ -246,8 +299,8 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
     info.ca = FixedOutputHash { .method = method, .hash = hash };
 
     if (!isValidPath(info.path)) {
-        auto source = sinkToSource([&](Sink & sink) {
-            dumpPath(srcPath, sink);
+        auto source = sinkToSource([&](Sink & scratchpadSink) {
+            dumpPath(srcPath, scratchpadSink);
         });
         addToStore(info, *source);
     }
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index a4be0411ee704f4acc28b4113ea6f904e0e114c0..d1cb2035fe1b9ee99c8104e6ef326b6e0897ee5a 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -460,7 +460,7 @@ public:
         std::optional<Hash> expectedCAHash = {});
 
     // FIXME: remove?
-    virtual StorePath addToStoreFromDump(const string & dump, const string & name,
+    virtual StorePath addToStoreFromDump(Source & dump, const string & name,
         FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
     {
         throw Error("addToStoreFromDump() is not supported by this store");
diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh
index 302b1bb18a070bc1e63741f057d033c3c22d9153..57780d16a3242324b460a17b5dc408e1dc35a3a9 100644
--- a/src/libutil/archive.hh
+++ b/src/libutil/archive.hh
@@ -63,12 +63,29 @@ struct ParseSink
     virtual void createSymlink(const Path & path, const string & target) { };
 };
 
-struct TeeParseSink : ParseSink
+/* If the NAR archive contains a single file at top-level, then save
+   the contents of the file to `s'.  Otherwise barf. */
+struct RetrieveRegularNARSink : ParseSink
 {
-    StringSink saved;
-    TeeSource source;
+    bool regular = true;
+    Sink & sink;
 
-    TeeParseSink(Source & source) : source(source, saved) { }
+    RetrieveRegularNARSink(Sink & sink) : sink(sink) { }
+
+    void createDirectory(const Path & path)
+    {
+        regular = false;
+    }
+
+    void receiveContents(unsigned char * data, unsigned int len)
+    {
+        sink(data, len);
+    }
+
+    void createSymlink(const Path & path, const string & target)
+    {
+        regular = false;
+    }
 };
 
 void parseDump(ParseSink & sink, Source & source);
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index c8b71188fe0f82e3fe83efa44c02032aaa790218..00c94511375e47b3edadc2077d1be6fc2f8b72c3 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -322,5 +322,18 @@ void StringSink::operator () (const unsigned char * data, size_t len)
     s->append((const char *) data, len);
 }
 
+size_t ChainSource::read(unsigned char * data, size_t len)
+{
+    if (useSecond) {
+        return source2.read(data, len);
+    } else {
+        try {
+            return source1.read(data, len);
+        } catch (EndOfFile &) {
+            useSecond = true;
+            return this->read(data, len);
+        }
+    }
+}
 
 }
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 8386a499124f27966b7e5369018a3ef0970b77dd..5d9acf8878c1ecc3b1d22101dfc06796126909fd 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -189,7 +189,7 @@ struct TeeSource : Source
     size_t read(unsigned char * data, size_t len)
     {
         size_t n = orig.read(data, len);
-        sink(data, len);
+        sink(data, n);
         return n;
     }
 };
@@ -256,6 +256,19 @@ struct LambdaSource : Source
     }
 };
 
+/* Chain two sources together so after the first is exhausted, the second is
+   used */
+struct ChainSource : Source
+{
+    Source & source1, & source2;
+    bool useSecond = false;
+    ChainSource(Source & s1, Source & s2)
+        : source1(s1), source2(s2)
+    { }
+
+    size_t read(unsigned char * data, size_t len) override;
+};
+
 
 /* Convert a function that feeds data into a Sink into a Source. The
    Source executes the function as a coroutine. */
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index c992b7d740d25d57d25ac08df73b83fff857919a..5795c2c0970e1af87c3c5553b4b0f8a2210d0f60 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -1208,18 +1208,17 @@ static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs)
 }
 
 
-static const int prevGen = -2;
+static constexpr GenerationNumber prevGen = std::numeric_limits<GenerationNumber>::max();
 
 
-static void switchGeneration(Globals & globals, int dstGen)
+static void switchGeneration(Globals & globals, GenerationNumber dstGen)
 {
     PathLocks lock;
     lockProfile(lock, globals.profile);
 
-    int curGen;
-    Generations gens = findGenerations(globals.profile, curGen);
+    auto [gens, curGen] = findGenerations(globals.profile);
 
-    Generation dst;
+    std::optional<Generation> dst;
     for (auto & i : gens)
         if ((dstGen == prevGen && i.number < curGen) ||
             (dstGen >= 0 && i.number == dstGen))
@@ -1227,18 +1226,16 @@ static void switchGeneration(Globals & globals, int dstGen)
 
     if (!dst) {
         if (dstGen == prevGen)
-            throw Error("no generation older than the current (%1%) exists",
-                curGen);
+            throw Error("no generation older than the current (%1%) exists", curGen.value_or(0));
         else
             throw Error("generation %1% does not exist", dstGen);
     }
 
-    printInfo(format("switching from generation %1% to %2%")
-        % curGen % dst.number);
+    printInfo("switching from generation %1% to %2%", curGen.value_or(0), dst->number);
 
     if (globals.dryRun) return;
 
-    switchLink(globals.profile, dst.path);
+    switchLink(globals.profile, dst->path);
 }
 
 
@@ -1249,7 +1246,7 @@ static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArg
     if (opArgs.size() != 1)
         throw UsageError("exactly one argument expected");
 
-    int dstGen;
+    GenerationNumber dstGen;
     if (!string2Int(opArgs.front(), dstGen))
         throw UsageError("expected a generation number");
 
@@ -1278,8 +1275,7 @@ static void opListGenerations(Globals & globals, Strings opFlags, Strings opArgs
     PathLocks lock;
     lockProfile(lock, globals.profile);
 
-    int curGen;
-    Generations gens = findGenerations(globals.profile, curGen);
+    auto [gens, curGen] = findGenerations(globals.profile);
 
     RunPager pager;
 
@@ -1308,14 +1304,14 @@ static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opAr
         if(opArgs.front().size() < 2)
             throw Error("invalid number of generations ‘%1%’", opArgs.front());
         string str_max = string(opArgs.front(), 1, opArgs.front().size());
-        int max;
+        GenerationNumber max;
         if (!string2Int(str_max, max) || max == 0)
             throw Error("invalid number of generations to keep ‘%1%’", opArgs.front());
         deleteGenerationsGreaterThan(globals.profile, max, globals.dryRun);
     } else {
-        std::set<unsigned int> gens;
+        std::set<GenerationNumber> gens;
         for (auto & i : opArgs) {
-            unsigned int n;
+            GenerationNumber n;
             if (!string2Int(i, n))
                 throw UsageError("invalid generation number '%1%'", i);
             gens.insert(n);
diff --git a/src/nix/command.hh b/src/nix/command.hh
index 1c74133002bfbe88d9724bd868332bc300cb7580..856721ebfbc674a81633ff1b3573df4f83f32910 100644
--- a/src/nix/command.hh
+++ b/src/nix/command.hh
@@ -244,4 +244,10 @@ void completeFlakeRefWithFragment(
     const Strings & defaultFlakeAttrPaths,
     std::string_view prefix);
 
+void printClosureDiff(
+    ref<Store> store,
+    const StorePath & beforePath,
+    const StorePath & afterPath,
+    std::string_view indent);
+
 }
diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc
index 56ddb575bfccf2ae52db7aa057fd55bec040587d..4199dae0f51a6a6ae36e77c23368e7c5f293f45d 100644
--- a/src/nix/diff-closures.cc
+++ b/src/nix/diff-closures.cc
@@ -6,7 +6,7 @@
 
 #include <regex>
 
-using namespace nix;
+namespace nix {
 
 struct Info
 {
@@ -52,6 +52,60 @@ std::string showVersions(const std::set<std::string> & versions)
     return concatStringsSep(", ", versions2);
 }
 
+void printClosureDiff(
+    ref<Store> store,
+    const StorePath & beforePath,
+    const StorePath & afterPath,
+    std::string_view indent)
+{
+    auto beforeClosure = getClosureInfo(store, beforePath);
+    auto afterClosure = getClosureInfo(store, afterPath);
+
+    std::set<std::string> allNames;
+    for (auto & [name, _] : beforeClosure) allNames.insert(name);
+    for (auto & [name, _] : afterClosure) allNames.insert(name);
+
+    for (auto & name : allNames) {
+        auto & beforeVersions = beforeClosure[name];
+        auto & afterVersions = afterClosure[name];
+
+        auto totalSize = [&](const std::map<std::string, std::map<StorePath, Info>> & versions)
+        {
+            uint64_t sum = 0;
+            for (auto & [_, paths] : versions)
+                for (auto & [path, _] : paths)
+                    sum += store->queryPathInfo(path)->narSize;
+            return sum;
+        };
+
+        auto beforeSize = totalSize(beforeVersions);
+        auto afterSize = totalSize(afterVersions);
+        auto sizeDelta = (int64_t) afterSize - (int64_t) beforeSize;
+        auto showDelta = abs(sizeDelta) >= 8 * 1024;
+
+        std::set<std::string> removed, unchanged;
+        for (auto & [version, _] : beforeVersions)
+            if (!afterVersions.count(version)) removed.insert(version); else unchanged.insert(version);
+
+        std::set<std::string> added;
+        for (auto & [version, _] : afterVersions)
+            if (!beforeVersions.count(version)) added.insert(version);
+
+        if (showDelta || !removed.empty() || !added.empty()) {
+            std::vector<std::string> items;
+            if (!removed.empty() || !added.empty())
+                items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added)));
+            if (showDelta)
+                items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0));
+            std::cout << fmt("%s%s: %s\n", indent, name, concatStringsSep(", ", items));
+        }
+    }
+}
+
+}
+
+using namespace nix;
+
 struct CmdDiffClosures : SourceExprCommand
 {
     std::string _before, _after;
@@ -85,49 +139,7 @@ struct CmdDiffClosures : SourceExprCommand
         auto beforePath = toStorePath(store, Realise::Outputs, operateOn, before);
         auto after = parseInstallable(store, _after);
         auto afterPath = toStorePath(store, Realise::Outputs, operateOn, after);
-
-        auto beforeClosure = getClosureInfo(store, beforePath);
-        auto afterClosure = getClosureInfo(store, afterPath);
-
-        std::set<std::string> allNames;
-        for (auto & [name, _] : beforeClosure) allNames.insert(name);
-        for (auto & [name, _] : afterClosure) allNames.insert(name);
-
-        for (auto & name : allNames) {
-            auto & beforeVersions = beforeClosure[name];
-            auto & afterVersions = afterClosure[name];
-
-            auto totalSize = [&](const std::map<std::string, std::map<StorePath, Info>> & versions)
-            {
-                uint64_t sum = 0;
-                for (auto & [_, paths] : versions)
-                    for (auto & [path, _] : paths)
-                        sum += store->queryPathInfo(path)->narSize;
-                return sum;
-            };
-
-            auto beforeSize = totalSize(beforeVersions);
-            auto afterSize = totalSize(afterVersions);
-            auto sizeDelta = (int64_t) afterSize - (int64_t) beforeSize;
-            auto showDelta = abs(sizeDelta) >= 8 * 1024;
-
-            std::set<std::string> removed, unchanged;
-            for (auto & [version, _] : beforeVersions)
-                if (!afterVersions.count(version)) removed.insert(version); else unchanged.insert(version);
-
-            std::set<std::string> added;
-            for (auto & [version, _] : afterVersions)
-                if (!beforeVersions.count(version)) added.insert(version);
-
-            if (showDelta || !removed.empty() || !added.empty()) {
-                std::vector<std::string> items;
-                if (!removed.empty() || !added.empty())
-                    items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added)));
-                if (showDelta)
-                    items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0));
-                std::cout << fmt("%s: %s\n", name, concatStringsSep(", ", items));
-            }
-        }
+        printClosureDiff(store, beforePath, afterPath, "");
     }
 };
 
diff --git a/src/nix/edit.cc b/src/nix/edit.cc
index dc9775635787479714b0fc3c67299675ab6450c1..378a3739c326e9220212068b2202f405d10cae89 100644
--- a/src/nix/edit.cc
+++ b/src/nix/edit.cc
@@ -45,6 +45,7 @@ struct CmdEdit : InstallableCommand
 
         auto args = editorFor(pos);
 
+        restoreSignals();
         execvp(args.front().c_str(), stringsToCharPtrs(args).data());
 
         std::string command;
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 307e236d8231f03d266c2ab8d2c745d01a50fa3b..c6cd88c498a6b8fba1e03f665fbdeb93a5b57c24 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -7,6 +7,7 @@
 #include "builtins/buildenv.hh"
 #include "flake/flakeref.hh"
 #include "../nix-env/user-env.hh"
+#include "profiles.hh"
 
 #include <nlohmann/json.hpp>
 #include <regex>
@@ -394,6 +395,46 @@ struct CmdProfileInfo : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
     }
 };
 
+struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile
+{
+    std::string description() override
+    {
+        return "show the closure difference between each generation of a profile";
+    }
+
+    Examples examples() override
+    {
+        return {
+            Example{
+                "To show what changed between each generation of the NixOS system profile:",
+                "nix profile diff-closure --profile /nix/var/nix/profiles/system"
+            },
+        };
+    }
+
+    void run(ref<Store> store) override
+    {
+        auto [gens, curGen] = findGenerations(*profile);
+
+        std::optional<Generation> prevGen;
+        bool first = true;
+
+        for (auto & gen : gens) {
+            if (prevGen) {
+                if (!first) std::cout << "\n";
+                first = false;
+                std::cout << fmt("Generation %d -> %d:\n", prevGen->number, gen.number);
+                printClosureDiff(store,
+                    store->followLinksToStorePath(prevGen->path),
+                    store->followLinksToStorePath(gen.path),
+                    "  ");
+            }
+
+            prevGen = gen;
+        }
+    }
+};
+
 struct CmdProfile : virtual MultiCommand, virtual Command
 {
     CmdProfile()
@@ -402,6 +443,7 @@ struct CmdProfile : virtual MultiCommand, virtual Command
               {"remove", []() { return make_ref<CmdProfileRemove>(); }},
               {"upgrade", []() { return make_ref<CmdProfileUpgrade>(); }},
               {"info", []() { return make_ref<CmdProfileInfo>(); }},
+              {"diff-closures", []() { return make_ref<CmdProfileDiffClosures>(); }},
           })
     { }
 
@@ -425,4 +467,3 @@ struct CmdProfile : virtual MultiCommand, virtual Command
 };
 
 static auto r1 = registerCommand<CmdProfile>("profile");
-
diff --git a/tests/flakes.sh b/tests/flakes.sh
index 25e1847e162a5e901f59e01d472859f7ffba22e3..5aec563ac09592c22f01e8285e48d3860ce0e170 100644
--- a/tests/flakes.sh
+++ b/tests/flakes.sh
@@ -18,7 +18,6 @@ registry=$TEST_ROOT/registry.json
 flake1Dir=$TEST_ROOT/flake1
 flake2Dir=$TEST_ROOT/flake2
 flake3Dir=$TEST_ROOT/flake3
-flake4Dir=$TEST_ROOT/flake4
 flake5Dir=$TEST_ROOT/flake5
 flake6Dir=$TEST_ROOT/flake6
 flake7Dir=$TEST_ROOT/flake7
@@ -390,14 +389,12 @@ cat > $flake3Dir/flake.nix <<EOF
   };
 }
 EOF
-git -C $flake3Dir add flake.nix
+nix flake update $flake3Dir
+git -C $flake3Dir add flake.nix flake.lock
 git -C $flake3Dir commit -m 'Remove packages.xyzzy'
 git -C $flake3Dir checkout master
 
-# Test whether fuzzy-matching works for IsAlias
-(! nix build -o $TEST_ROOT/result flake4/removeXyzzy#xyzzy)
-
-# Test whether fuzzy-matching works for IsGit
+# Test whether fuzzy-matching works for registry entries.
 (! nix build -o $TEST_ROOT/result flake4/removeXyzzy#xyzzy)
 nix build -o $TEST_ROOT/result flake4/removeXyzzy#sth