Merge master into staging-next

This commit is contained in:
github-actions[bot] 2021-12-18 00:01:29 +00:00 committed by GitHub
commit 084faede55
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 2535 additions and 2233 deletions

View File

@ -4,20 +4,19 @@
# Example how to work with the `lib.maintainers` attrset.
# Can be used to check whether all user handles are still valid.
set -e
# nixpkgs='<nixpkgs>'
# if [ -n "$1" ]; then
set -o errexit -o noclobber -o nounset -o pipefail
shopt -s failglob inherit_errexit
function checkCommits {
local user="$1"
local tmp=$(mktemp)
local ret status tmp user
user="$1"
tmp=$(mktemp)
curl --silent -w "%{http_code}" \
"https://github.com/NixOS/nixpkgs/commits?author=$user" \
> "$tmp"
# the last line of tmp contains the http status
local status=$(tail -n1 "$tmp")
local ret=
status=$(tail -n1 "$tmp")
ret=
case $status in
200) if <"$tmp" grep -i "no commits found" > /dev/null; then
ret=1
@ -31,7 +30,7 @@ function checkCommits {
checkCommits "$user"
ret=$?
;;
*) printf "BAD STATUS: $(tail -n1 $tmp) for %s\n" "$user"; ret=1
*) printf "BAD STATUS: $(tail -n1 "$tmp") for %s\n" "$user"; ret=1
ret=1
;;
esac
@ -63,4 +62,5 @@ nix-instantiate -A lib.maintainers --eval --strict --json \
| jq -r '.[]|.github|select(.)' \
| parallel -j5 checkUser
# To check some arbitrary users:
# parallel -j100 checkUser ::: "eelco" "profpatsch" "Profpatsch" "a"

View File

@ -159,6 +159,14 @@
compatibilty, but will be removed at a later date.
</para>
</listitem>
<listitem>
<para>
The <literal>services.unifi.openPorts</literal> option default
value of <literal>true</literal> is now deprecated and will be
changed to <literal>false</literal> in 22.11. Configurations
using this default will print a warning when rebuilt.
</para>
</listitem>
</itemizedlist>
</section>
</section>

View File

@ -65,3 +65,6 @@ In addition to numerous new and upgraded packages, this release has the followin
`influxdb2-cli`, matching the split that took place upstream. A
combined `influxdb2` package is still provided in this release for
backwards compatibilty, but will be removed at a later date.
- The `services.unifi.openPorts` option default value of `true` is now deprecated and will be changed to `false` in 22.11.
Configurations using this default will print a warning when rebuilt.

View File

@ -763,7 +763,7 @@ in {
after = [ "network.target" ] ++ optional hasLocalPostgresDB "postgresql.service";
wantedBy = [ "multi-user.target" ];
preStart = ''
${cfg.package}/bin/homeserver \
${cfg.package}/bin/synapse_homeserver \
--config-path ${configFile} \
--keys-directory ${cfg.dataDir} \
--generate-keys
@ -783,7 +783,7 @@ in {
chmod 0600 ${cfg.dataDir}/homeserver.signing.key
'')) ];
ExecStart = ''
${cfg.package}/bin/homeserver \
${cfg.package}/bin/synapse_homeserver \
${ concatMapStringsSep "\n " (x: "--config-path ${x} \\") ([ configFile ] ++ cfg.extraConfigFiles) }
--keys-directory ${cfg.dataDir}
'';

View File

@ -1,4 +1,4 @@
{ config, lib, pkgs, utils, ... }:
{ config, options, lib, pkgs, utils, ... }:
with lib;
let
cfg = config.services.unifi;
@ -50,7 +50,7 @@ in
'';
};
services.unifi.openPorts = mkOption {
services.unifi.openFirewall = mkOption {
type = types.bool;
default = true;
description = ''
@ -86,6 +86,10 @@ in
config = mkIf cfg.enable {
warnings = optional
(options.services.unifi.openFirewall.highestPrio >= (mkOptionDefault null).priority)
"The current services.unifi.openFirewall = true default is deprecated and will change to false in 22.11. Set it explicitly to silence this warning.";
users.users.unifi = {
isSystemUser = true;
group = "unifi";
@ -94,7 +98,7 @@ in
};
users.groups.unifi = {};
networking.firewall = mkIf cfg.openPorts {
networking.firewall = mkIf cfg.openFirewall {
# https://help.ubnt.com/hc/en-us/articles/218506997
allowedTCPPorts = [
8080 # Port for UAP to inform controller.
@ -192,6 +196,7 @@ in
};
imports = [
(mkRemovedOptionModule [ "services" "unifi" "dataDir" ] "You should move contents of dataDir to /var/lib/unifi/data" )
(mkRenamedOptionModule [ "services" "unifi" "openPorts" ] [ "services" "unifi" "openFirewall" ])
];
meta.maintainers = with lib.maintainers; [ erictapen pennae ];

View File

@ -0,0 +1,355 @@
From deeb435829d73524df851f6f4c2d4be552c99230 Mon Sep 17 00:00:00 2001
From: Dmitry Vedenko <dmitry@crsib.me>
Date: Fri, 1 Oct 2021 16:21:22 +0300
Subject: [PATCH] Use a different approach to estimate the disk space usage
New a approach is a bit less precise, but removes the requirement for the "private" SQLite3 table and allows Audacity to be built against system SQLite3.
---
cmake-proxies/sqlite/CMakeLists.txt | 5 -
src/DBConnection.h | 4 +-
src/ProjectFileIO.cpp | 269 +++++-----------------------
3 files changed, 44 insertions(+), 234 deletions(-)
diff --git a/cmake-proxies/sqlite/CMakeLists.txt b/cmake-proxies/sqlite/CMakeLists.txt
index 63d70637c..d7b9b95ef 100644
--- a/cmake-proxies/sqlite/CMakeLists.txt
+++ b/cmake-proxies/sqlite/CMakeLists.txt
@@ -19,11 +19,6 @@ list( APPEND INCLUDES
list( APPEND DEFINES
PRIVATE
- #
- # We need the dbpage table for space calculations.
- #
- SQLITE_ENABLE_DBPAGE_VTAB=1
-
# Can't be set after a WAL mode database is initialized, so change
# the default here to ensure all project files get the same page
# size.
diff --git a/src/DBConnection.h b/src/DBConnection.h
index 16a7fc9d4..07d3af95e 100644
--- a/src/DBConnection.h
+++ b/src/DBConnection.h
@@ -75,8 +75,8 @@ public:
LoadSampleBlock,
InsertSampleBlock,
DeleteSampleBlock,
- GetRootPage,
- GetDBPage
+ GetSampleBlockSize,
+ GetAllSampleBlocksSize
};
sqlite3_stmt *Prepare(enum StatementID id, const char *sql);
diff --git a/src/ProjectFileIO.cpp b/src/ProjectFileIO.cpp
index 3b3e2e1fd..c9bc45af4 100644
--- a/src/ProjectFileIO.cpp
+++ b/src/ProjectFileIO.cpp
@@ -35,6 +35,7 @@ Paul Licameli split from AudacityProject.cpp
#include "widgets/ProgressDialog.h"
#include "wxFileNameWrapper.h"
#include "xml/XMLFileReader.h"
+#include "MemoryX.h"`
#undef NO_SHM
#if !defined(__WXMSW__)
@@ -2357,255 +2358,69 @@ int64_t ProjectFileIO::GetTotalUsage()
}
//
-// Returns the amount of disk space used by the specified sample blockid or all
-// of the sample blocks if the blockid is 0. It does this by using the raw SQLite
-// pages available from the "sqlite_dbpage" virtual table to traverse the SQLite
-// table b-tree described here: https://www.sqlite.org/fileformat.html
+// Returns the estimation of disk space used by the specified sample blockid or all
+// of the sample blocks if the blockid is 0. This does not include small overhead
+// of the internal SQLite structures, only the size used by the data
//
int64_t ProjectFileIO::GetDiskUsage(DBConnection &conn, SampleBlockID blockid /* = 0 */)
{
- // Information we need to track our travels through the b-tree
- typedef struct
- {
- int64_t pgno;
- int currentCell;
- int numCells;
- unsigned char data[65536];
- } page;
- std::vector<page> stack;
-
- int64_t total = 0;
- int64_t found = 0;
- int64_t right = 0;
- int rc;
+ sqlite3_stmt* stmt = nullptr;
- // Get the rootpage for the sampleblocks table.
- sqlite3_stmt *stmt =
- conn.Prepare(DBConnection::GetRootPage,
- "SELECT rootpage FROM sqlite_master WHERE tbl_name = 'sampleblocks';");
- if (stmt == nullptr || sqlite3_step(stmt) != SQLITE_ROW)
+ if (blockid == 0)
{
- return 0;
- }
-
- // And store it in our first stack frame
- stack.push_back({sqlite3_column_int64(stmt, 0)});
+ static const char* statement =
+R"(SELECT
+ sum(length(blockid) + length(sampleformat) +
+ length(summin) + length(summax) + length(sumrms) +
+ length(summary256) + length(summary64k) +
+ length(samples))
+FROM sampleblocks;)";
- // All done with the statement
- sqlite3_clear_bindings(stmt);
- sqlite3_reset(stmt);
-
- // Prepare/retrieve statement to read raw database page
- stmt = conn.Prepare(DBConnection::GetDBPage,
- "SELECT data FROM sqlite_dbpage WHERE pgno = ?1;");
- if (stmt == nullptr)
- {
- return 0;
+ stmt = conn.Prepare(DBConnection::GetAllSampleBlocksSize, statement);
}
-
- // Traverse the b-tree until we've visited all of the leaf pages or until
- // we find the one corresponding to the passed in sample blockid. Because we
- // use an integer primary key for the sampleblocks table, the traversal will
- // be in ascending blockid sequence.
- do
+ else
{
- // Acces the top stack frame
- page &pg = stack.back();
+ static const char* statement =
+R"(SELECT
+ length(blockid) + length(sampleformat) +
+ length(summin) + length(summax) + length(sumrms) +
+ length(summary256) + length(summary64k) +
+ length(samples)
+FROM sampleblocks WHERE blockid = ?1;)";
- // Read the page from the sqlite_dbpage table if it hasn't yet been loaded
- if (pg.numCells == 0)
- {
- // Bind the page number
- sqlite3_bind_int64(stmt, 1, pg.pgno);
+ stmt = conn.Prepare(DBConnection::GetSampleBlockSize, statement);
+ }
- // And retrieve the page
- if (sqlite3_step(stmt) != SQLITE_ROW)
+ auto cleanup = finally(
+ [stmt]() {
+ // Clear statement bindings and rewind statement
+ if (stmt != nullptr)
{
- // REVIEW: Likely harmless failure - says size is zero on
- // this error.
- // LLL: Yea, but not much else we can do.
- return 0;
+ sqlite3_clear_bindings(stmt);
+ sqlite3_reset(stmt);
}
+ });
- // Copy the page content to the stack frame
- memcpy(&pg.data,
- sqlite3_column_blob(stmt, 0),
- sqlite3_column_bytes(stmt, 0));
-
- // And retrieve the total number of cells within it
- pg.numCells = get2(&pg.data[3]);
-
- // Reset statement for next usage
- sqlite3_clear_bindings(stmt);
- sqlite3_reset(stmt);
- }
-
- //wxLogDebug("%*.*spgno %lld currentCell %d numCells %d", (stack.size() - 1) * 2, (stack.size() - 1) * 2, "", pg.pgno, pg.currentCell, pg.numCells);
-
- // Process an interior table b-tree page
- if (pg.data[0] == 0x05)
- {
- // Process the next cell if we haven't examined all of them yet
- if (pg.currentCell < pg.numCells)
- {
- // Remember the right-most leaf page number.
- right = get4(&pg.data[8]);
-
- // Iterate over the cells.
- //
- // If we're not looking for a specific blockid, then we always push the
- // target page onto the stack and leave the loop after a single iteration.
- //
- // Otherwise, we match the blockid against the highest integer key contained
- // within the cell and if the blockid falls within the cell, we stack the
- // page and stop the iteration.
- //
- // In theory, we could do a binary search for a specific blockid here, but
- // because our sample blocks are always large, we will get very few cells
- // per page...usually 6 or less.
- //
- // In both cases, the stacked page can be either an internal or leaf page.
- bool stacked = false;
- while (pg.currentCell < pg.numCells)
- {
- // Get the offset to this cell using the offset in the cell pointer
- // array.
- //
- // The cell pointer array starts immediately after the page header
- // at offset 12 and the retrieved offset is from the beginning of
- // the page.
- int celloff = get2(&pg.data[12 + (pg.currentCell * 2)]);
-
- // Bump to the next cell for the next iteration.
- pg.currentCell++;
-
- // Get the page number this cell describes
- int pagenum = get4(&pg.data[celloff]);
-
- // And the highest integer key, which starts at offset 4 within the cell.
- int64_t intkey = 0;
- get_varint(&pg.data[celloff + 4], &intkey);
-
- //wxLogDebug("%*.*sinternal - right %lld celloff %d pagenum %d intkey %lld", (stack.size() - 1) * 2, (stack.size() - 1) * 2, " ", right, celloff, pagenum, intkey);
-
- // Stack the described page if we're not looking for a specific blockid
- // or if this page contains the given blockid.
- if (!blockid || blockid <= intkey)
- {
- stack.push_back({pagenum, 0, 0});
- stacked = true;
- break;
- }
- }
-
- // If we pushed a new page onto the stack, we need to jump back up
- // to read the page
- if (stacked)
- {
- continue;
- }
- }
+ if (blockid != 0)
+ {
+ int rc = sqlite3_bind_int64(stmt, 1, blockid);
- // We've exhausted all the cells with this page, so we stack the right-most
- // leaf page. Ensure we only process it once.
- if (right)
- {
- stack.push_back({right, 0, 0});
- right = 0;
- continue;
- }
- }
- // Process a leaf table b-tree page
- else if (pg.data[0] == 0x0d)
+ if (rc != SQLITE_OK)
{
- // Iterate over the cells
- //
- // If we're not looking for a specific blockid, then just accumulate the
- // payload sizes. We will be reading every leaf page in the sampleblocks
- // table.
- //
- // Otherwise we break out when we find the matching blockid. In this case,
- // we only ever look at 1 leaf page.
- bool stop = false;
- for (int i = 0; i < pg.numCells; i++)
- {
- // Get the offset to this cell using the offset in the cell pointer
- // array.
- //
- // The cell pointer array starts immediately after the page header
- // at offset 8 and the retrieved offset is from the beginning of
- // the page.
- int celloff = get2(&pg.data[8 + (i * 2)]);
-
- // Get the total payload size in bytes of the described row.
- int64_t payload = 0;
- int digits = get_varint(&pg.data[celloff], &payload);
-
- // Get the integer key for this row.
- int64_t intkey = 0;
- get_varint(&pg.data[celloff + digits], &intkey);
-
- //wxLogDebug("%*.*sleaf - celloff %4d intkey %lld payload %lld", (stack.size() - 1) * 2, (stack.size() - 1) * 2, " ", celloff, intkey, payload);
-
- // Add this payload size to the total if we're not looking for a specific
- // blockid
- if (!blockid)
- {
- total += payload;
- }
- // Otherwise, return the payload size for a matching row
- else if (blockid == intkey)
- {
- return payload;
- }
- }
+ conn.ThrowException(false);
}
+ }
- // Done with the current branch, so pop back up to the previous one (if any)
- stack.pop_back();
- } while (!stack.empty());
-
- // Return the total used for all sample blocks
- return total;
-}
-
-// Retrieves a 2-byte big-endian integer from the page data
-unsigned int ProjectFileIO::get2(const unsigned char *ptr)
-{
- return (ptr[0] << 8) | ptr[1];
-}
-
-// Retrieves a 4-byte big-endian integer from the page data
-unsigned int ProjectFileIO::get4(const unsigned char *ptr)
-{
- return ((unsigned int) ptr[0] << 24) |
- ((unsigned int) ptr[1] << 16) |
- ((unsigned int) ptr[2] << 8) |
- ((unsigned int) ptr[3]);
-}
-
-// Retrieves a variable length integer from the page data. Returns the
-// number of digits used to encode the integer and the stores the
-// value at the given location.
-int ProjectFileIO::get_varint(const unsigned char *ptr, int64_t *out)
-{
- int64_t val = 0;
- int i;
+ int rc = sqlite3_step(stmt);
- for (i = 0; i < 8; ++i)
+ if (rc != SQLITE_ROW)
{
- val = (val << 7) + (ptr[i] & 0x7f);
- if ((ptr[i] & 0x80) == 0)
- {
- *out = val;
- return i + 1;
- }
+ conn.ThrowException(false);
}
- val = (val << 8) + (ptr[i] & 0xff);
- *out = val;
+ const int64_t size = sqlite3_column_int64(stmt, 0);
- return 9;
+ return size;
}
InvisibleTemporaryProject::InvisibleTemporaryProject()
--
2.33.1

View File

@ -100,6 +100,9 @@ stdenv.mkDerivation rec {
sha256 = "0zp2iydd46analda9cfnbmzdkjphz5m7dynrdj5qdnmq6j3px9fw";
name = "audacity_xdg_paths.patch";
})
# This is required to make audacity work with nixpkgs sqlite
# https://github.com/audacity/audacity/pull/1802 rebased onto 3.0.2
./0001-Use-a-different-approach-to-estimate-the-disk-space-.patch
];
postPatch = ''

View File

@ -37,6 +37,7 @@ stdenv.mkDerivation rec {
homepage = "http://kakoune.org/";
description = "A vim inspired text editor";
license = licenses.publicDomain;
mainProgram = "kak";
maintainers = with maintainers; [ vrthra ];
platforms = platforms.unix;
};

View File

@ -16,45 +16,41 @@
let
libdeltachat' = libdeltachat.overrideAttrs (old: rec {
version = "1.60.0";
version = "1.70.0";
src = fetchFromGitHub {
owner = "deltachat";
repo = "deltachat-core-rust";
rev = version;
sha256 = "1agm5xyaib4ynmw4mhgmkhh4lnxs91wv0q9i1zfihv2vkckfm2s2";
hash = "sha256-702XhFWvFG+g++3X97sy6C5DMNWogv1Xbr8QPR8QyLo=";
};
cargoDeps = rustPlatform.fetchCargoTarball {
inherit src;
name = "${old.pname}-${version}";
sha256 = "09d3mw2hb1gmqg7smaqwnfm7izw40znl0h1dz7s2imms2cnkjws1";
hash = "sha256-MiSGJMXe8vouv4XEHXq274FHEvBMtd7IX6DyNJIWYeU=";
};
patches = [
# https://github.com/deltachat/deltachat-core-rust/pull/2589
(fetchpatch {
url = "https://github.com/deltachat/deltachat-core-rust/commit/408467e85d04fbbfd6bed5908d84d9e995943487.patch";
sha256 = "1j2ywaazglgl6370js34acrg0wrh0b7krqg05dfjf65n527lzn59";
})
./no-static-lib.patch
# https://github.com/deltachat/deltachat-core-rust/pull/2660
(fetchpatch {
url = "https://github.com/deltachat/deltachat-core-rust/commit/8fb5e038a97d8ae68564c885d61b93127a68366d.patch";
sha256 = "088pzfrrkgfi4646dc72404s3kykcpni7hgkppalwlzg0p4is41x";
})
];
});
electronExec = if stdenv.isDarwin then
"${electron}/Applications/Electron.app/Contents/MacOS/Electron"
else
"${electron}/bin/electron";
esbuild' = esbuild.overrideAttrs (old: rec {
version = "0.12.29";
src = fetchFromGitHub {
owner = "evanw";
repo = "esbuild";
rev = "v${version}";
hash = "sha256-oU++9E3StUoyrMVRMZz8/1ntgPI62M1NoNz9sH/N5Bg=";
};
});
in nodePackages.deltachat-desktop.override rec {
pname = "deltachat-desktop";
version = "1.22.2";
version = "1.26.0";
src = fetchFromGitHub {
owner = "deltachat";
repo = "deltachat-desktop";
rev = "v${version}";
sha256 = "0in6w2vl4ypgjb9gfhyh77vg05ni5p3z24lah7wvvhywcpv1jp2n";
hash = "sha256-IDyGV2+/+wHp5N4G10y5OHvw2yoyVxWx394xszIYoj4=";
};
nativeBuildInputs = [
@ -72,6 +68,7 @@ in nodePackages.deltachat-desktop.override rec {
];
ELECTRON_SKIP_BINARY_DOWNLOAD = "1";
ESBUILD_BINARY_PATH = "${esbuild'}/bin/esbuild";
USE_SYSTEM_LIBDELTACHAT = "true";
VERSION_INFO_GIT_REF = src.rev;

View File

@ -1,24 +1,24 @@
{
"name": "deltachat-desktop",
"version": "1.22.2",
"version": "1.26.0",
"dependencies": {
"@blueprintjs/core": "^3.22.3",
"@deltachat/message_parser_wasm": "^0.1.0",
"@mapbox/geojson-extent": "^1.0.0",
"application-config": "^1.0.1",
"classnames": "^2.3.1",
"debounce": "^1.2.0",
"deltachat-node": "1.60.0",
"deltachat-node": "1.70.0",
"emoji-js-clean": "^4.0.0",
"emoji-mart": "^3.0.0",
"emoji-regex": "^9.2.2",
"error-stack-parser": "^2.0.6",
"filesize": "^6.1.0",
"filesize": "^8.0.6",
"mapbox-gl": "^1.12.0",
"mime-types": "^2.1.31",
"moment": "^2.27.0",
"node-fetch": "^2.6.1",
"path-browserify": "^1.0.1",
"punycode": "^2.1.1",
"rc": "^1.2.8",
"react": "^17.0.2",
"react-dom": "^17.0.2",
@ -28,15 +28,14 @@
"react-virtualized-auto-sizer": "^1.0.5",
"react-window": "^1.8.6",
"react-window-infinite-loader": "^1.0.7",
"simple-markdown": "^0.7.1",
"source-map-support": "^0.5.19",
"tempy": "^0.3.0",
"url-parse": "^1.4.7",
"url-parse": "^1.5.3",
"use-debounce": "^3.3.0",
"@babel/core": "^7.7.7",
"@types/debounce": "^1.2.0",
"@babel/preset-env": "^7.7.7",
"@babel/preset-react": "^7.7.4",
"@types/debounce": "^1.2.0",
"@types/emoji-mart": "^3.0.2",
"@types/mapbox-gl": "^0.54.5",
"@types/mime-types": "^2.1.0",
@ -50,9 +49,10 @@
"@types/react-window-infinite-loader": "^1.0.4",
"@types/url-parse": "^1.4.3",
"electron": "^13.1.6",
"esbuild": "^0.12.15",
"glob-watcher": "^5.0.5",
"sass": "^1.26.5",
"typescript": "^3.9.7",
"typescript": "^4.4.4",
"xml-js": "^1.6.8"
}
}

View File

@ -20,16 +20,16 @@ if [ "$ver" = "$old_ver" ]; then
fi
echo "$old_ver -> $ver"
sha256=$(nix-prefetch -f "$nixpkgs" deltachat-desktop --rev "$rev")
hash=$(nix-prefetch -f "$nixpkgs" deltachat-desktop --rev "$rev")
tac default.nix \
| sed -e "0,/version = \".*\"/s//version = \"$ver\"/" \
-e "0,/sha256 = \".*\"/s//sha256 = \"$sha256\"/" \
-e "0,/hash = \".*\"/s//hash = \"${hash//\//\\/}\"/" \
| tac \
| sponge default.nix
src=$(nix-build "$nixpkgs" -A deltachat-desktop.src --no-out-link)
jq '{ name, version, dependencies: (.dependencies + (.devDependencies | del(.["@typescript-eslint/eslint-plugin","@typescript-eslint/parser","esbuild","electron-builder","electron-devtools-installer","electron-notarize","esbuild","eslint","eslint-config-prettier","eslint-plugin-react-hooks","hallmark","prettier","tape","testcafe","testcafe-browser-provider-electron","testcafe-react-selectors","walk"]))) }' \
jq '{ name, version, dependencies: (.dependencies + (.devDependencies | del(.["@typescript-eslint/eslint-plugin","@typescript-eslint/parser","electron-builder","electron-devtools-installer","electron-notarize","eslint","eslint-config-prettier","eslint-plugin-react-hooks","hallmark","prettier","tape","testcafe","testcafe-browser-provider-electron","testcafe-react-selectors","walk"]))) }' \
"$src/package.json" > package.json.new
if cmp --quiet package.json{.new,}; then

View File

@ -4,7 +4,7 @@
stdenv.mkDerivation rec {
pname = "mblaze";
version = "1.1";
version = "1.2";
nativeBuildInputs = [ installShellFiles makeWrapper ];
buildInputs = [ ruby ] ++ lib.optionals stdenv.isDarwin [ libiconv ];
@ -13,7 +13,7 @@ stdenv.mkDerivation rec {
owner = "leahneukirchen";
repo = "mblaze";
rev = "v${version}";
sha256 = "sha256-Ho2Qoxs93ig4yYUOaoqdYnLA8Y4+7CfRM0dju89JOa4=";
sha256 = "sha256-LCyw3xGsYjsbExueRHVRqoJYluji9MmZq5zGclvSSDk=";
};
makeFlags = [ "PREFIX=$(out)" ];

View File

@ -1,4 +1,5 @@
{ lib, stdenv
{ lib
, stdenv
, meson
, ninja
, gettext
@ -40,21 +41,14 @@
stdenv.mkDerivation rec {
pname = "epiphany";
version = "41.0";
version = "41.2";
src = fetchurl {
url = "mirror://gnome/sources/${pname}/${lib.versions.major version}/${pname}-${version}.tar.xz";
sha256 = "s50YJUkllbC3TF1qZoaoV/lBnfpMAvgBPCl7yHDibdA=";
sha256 = "Ud5KGB+nxKEs3DDMsWQ2ElwaFt+av44/pTP8gb8Q60w=";
};
patches = [
# tab-view: Update close button position on startup
# https://gitlab.gnome.org/GNOME/epiphany/-/merge_requests/1025
(fetchpatch {
url = "https://gitlab.gnome.org/GNOME/epiphany/-/commit/6e9d6d3cf7fa7ddf21a70e9816a5cd4767a79523.patch";
sha256 = "sha256-lBVliGCIKwTvsYnWjAcmJxhTg1HS/2x4wlOh+4sx/xQ=";
})
] ++ lib.optionals withPantheon [
patches = lib.optionals withPantheon [
# Pantheon specific patches for epiphany
# https://github.com/elementary/browser
#

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,7 @@
buildPythonPackage rec {
pname = "rapidfuzz";
version = "1.9.0";
version = "1.9.1";
disabled = pythonOlder "3.5";
@ -19,7 +19,7 @@ buildPythonPackage rec {
repo = "RapidFuzz";
rev = "v${version}";
fetchSubmodules = true;
sha256 = "sha256-INtoJXPe22vB6Yi9Co3xxJ8uxZYz9dn0eMF+fo9GfIo=";
sha256 = "sha256-aZqsQHrxmPqZARkqR1hWaj7XndOlCJjmWk1Cosx4skA=";
};
propagatedBuildInputs = [

View File

@ -17,15 +17,15 @@
rustPlatform.buildRustPackage rec {
pname = "deno";
version = "1.16.4";
version = "1.17.0";
src = fetchFromGitHub {
owner = "denoland";
repo = pname;
rev = "v${version}";
sha256 = "sha256-lEUEA8TAUCzTViGTSPz3Iw17BIIHr+oQXA0ldWe2T3w=";
sha256 = "sha256-fkgsMPSmJVp62sVKuuHOsOUczFfBoooKNEY3w5f9zbE=";
};
cargoSha256 = "sha256-Y/eN15B3aiIrvhuGykB9sQRGRajEC7t84JQ2U0dHAzo=";
cargoSha256 = "sha256-wzQf5Wb0kxAg5sNxom9qltdkpJbNatA7IK4oVstQXME=";
# Install completions post-install
nativeBuildInputs = [ installShellFiles ];
@ -43,6 +43,10 @@ rustPlatform.buildRustPackage rec {
# Skipping until resolved
doCheck = false;
preInstall = ''
find ./target -name libswc_common.${stdenv.hostPlatform.extensions.sharedLibrary} -delete
'';
postInstall = ''
installShellCompletion --cmd deno \
--bash <($out/bin/deno completions bash) \

View File

@ -1,33 +0,0 @@
From 36ffbb7ad2c535180cae473b470a43f9db4fbdcd Mon Sep 17 00:00:00 2001
From: Maximilian Bosch <maximilian@mbosch.me>
Date: Mon, 16 Aug 2021 13:27:28 +0200
Subject: [PATCH 1/2] setup: add homeserver as console script
With this change, it will be added to `$out/bin` in `nixpkgs` directly.
This became necessary since our old workaround, calling it as script,
doesn't work anymore since the shebangs were removed[1].
[1] https://github.com/matrix-org/synapse/pull/10415
---
setup.py | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/setup.py b/setup.py
index c47856351..27f1d842c 100755
--- a/setup.py
+++ b/setup.py
@@ -133,6 +133,11 @@ setup(
long_description=long_description,
long_description_content_type="text/x-rst",
python_requires="~=3.6",
+ entry_points={
+ 'console_scripts': [
+ 'homeserver = synapse.app.homeserver:main'
+ ]
+ },
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Chat",
--
2.31.1

View File

@ -1,43 +0,0 @@
From 3089758015c64cc1e6788793c4fe40a0e1783457 Mon Sep 17 00:00:00 2001
From: Maximilian Bosch <maximilian@mbosch.me>
Date: Tue, 5 Oct 2021 22:33:12 +0200
Subject: [PATCH 2/2] Expose generic worker as binary under NixOS
---
setup.py | 3 ++-
synapse/app/generic_worker.py | 6 +++++-
2 files changed, 7 insertions(+), 2 deletions(-)
diff --git a/setup.py b/setup.py
index 27f1d842c..6383aed6f 100755
--- a/setup.py
+++ b/setup.py
@@ -135,7 +135,8 @@ setup(
python_requires="~=3.6",
entry_points={
'console_scripts': [
- 'homeserver = synapse.app.homeserver:main'
+ 'homeserver = synapse.app.homeserver:main',
+ 'worker = synapse.app.generic_worker:main'
]
},
classifiers=[
diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py
index 3b7131af8..c77a6a95c 100644
--- a/synapse/app/generic_worker.py
+++ b/synapse/app/generic_worker.py
@@ -491,6 +491,10 @@ def start(config_options):
_base.start_worker_reactor("synapse-generic-worker", config)
-if __name__ == "__main__":
+def main():
with LoggingContext("main"):
start(sys.argv[1:])
+
+
+if __name__ == "__main__":
+ main()
--
2.31.1

View File

@ -11,18 +11,13 @@ in
with python3.pkgs;
buildPythonApplication rec {
pname = "matrix-synapse";
version = "1.48.0";
version = "1.49.0";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-G09VbfC9mZ0+shLHRNutR91URewvLW4l4lQaVrsZYaQ=";
sha256 = "sha256-vvLcRy2qt/qOgKkQpj2eHP4691OX0AW58dD3EecluZM=";
};
patches = [
./0001-setup-add-homeserver-as-console-script.patch
./0002-Expose-generic-worker-as-binary-under-NixOS.patch
];
buildInputs = [ openssl ];
propagatedBuildInputs = [

View File

@ -82,5 +82,9 @@ stdenv.mkDerivation rec {
maintainers = [];
platforms = lib.platforms.unix;
knownVulnerabilities = [ "CVE-2021-42785" ];
# Unfortunately, upstream doesn't maintain the 1.3 branch anymore, and the
# new 2.x branch is substantially different (requiring either Windows or Java)
};
}

View File

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "oapi-codegen";
version = "1.8.3";
version = "1.9.0";
src = fetchFromGitHub {
owner = "deepmap";
repo = pname;
rev = "v${version}";
sha256 = "sha256-VAtfJ1PXTSPMoQ4NqX0GcZMyi15edxWj6Xsj6h1b7hc=";
sha256 = "sha256-pGkTCOQ2OR/9c5+L9UgESJjSMmz9FjfJw9NB8Nr6gRQ=";
};
vendorSha256 = "sha256-s6+Rs+G4z5fcmUiwGjeDoQYKWJz0a/PCejfKyn8WWxs=";
vendorSha256 = "sha256-hvY64cmfvEeHniscD1WDyaeFpWeBJwsDNwr76e9F6ow=";
# Tests use network
doCheck = false;